mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2025-12-12 01:52:31 +01:00
Merge branch 'ohmyzsh:master' into features/rke-plugin
This commit is contained in:
commit
4e3fed007a
503 changed files with 51593 additions and 13280 deletions
9
plugins/1password/1password.plugin.zsh
Normal file
9
plugins/1password/1password.plugin.zsh
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Do nothing if op is not installed
|
||||
(( ${+commands[op]} )) || return
|
||||
|
||||
# Load op completion
|
||||
eval "$(op completion zsh)"
|
||||
compdef _op op
|
||||
|
||||
# Load opswd function
|
||||
autoload -Uz opswd
|
||||
38
plugins/1password/README.md
Normal file
38
plugins/1password/README.md
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# 1Password
|
||||
|
||||
This plugin adds 1Password functionality to oh-my-zsh.
|
||||
|
||||
To use, add `1password` to the list of plugins in your `.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... 1password)
|
||||
```
|
||||
|
||||
Then, you can use the command `opswd` to copy passwords for services into your
|
||||
clipboard.
|
||||
|
||||
## `opswd`
|
||||
|
||||
The `opswd` command is a wrapper around the `op` command. It takes a service
|
||||
name as an argument and copies the password for that service to the clipboard.
|
||||
|
||||
If the service also contains a TOTP, it is copied to the clipboard after 10 seconds.
|
||||
Finally, after 20 seconds, the clipboard is cleared.
|
||||
|
||||
The function has completion support, so you can use tab completion to select
|
||||
which service you want to get.
|
||||
|
||||
For example, `opswd github.com` will put your GitHub password into your clipboard, and if
|
||||
a TOTP is available, it will be copied to the clipboard after 10 seconds.
|
||||
|
||||
> NOTE: you need to be signed in for `opswd` to work. If you are using biometric unlock,
|
||||
> 1Password CLI will automatically prompt you to sign in. See:
|
||||
>
|
||||
> - [Get started with 1Password CLI 2: Sign in](https://developer.1password.com/docs/cli/get-started#sign-in)
|
||||
> - [Sign in to your 1Password account manually](https://developer.1password.com/docs/cli/sign-in-manually)
|
||||
|
||||
## Requirements
|
||||
|
||||
- [1Password CLI 2](https://developer.1password.com/docs/cli/get-started#install)
|
||||
|
||||
> NOTE: if you're using 1Password CLI 1, [see how to upgrade to CLI 2](https://developer.1password.com/docs/cli/upgrade).
|
||||
19
plugins/1password/_opswd
Normal file
19
plugins/1password/_opswd
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
#compdef opswd
|
||||
|
||||
function _opswd() {
|
||||
local -a services
|
||||
services=("${(@f)$(op item list --categories Login --cache 2>/dev/null | awk 'NR != 1 { print $2 }')}")
|
||||
[[ -z "$services" ]] || compadd -a -- services
|
||||
}
|
||||
|
||||
# TODO: 2022-03-26: Remove support for op CLI 1
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 2.0.0 $(op --version) || {
|
||||
function _opswd() {
|
||||
local -a services
|
||||
services=("${(@f)$(op list items --categories Login 2>/dev/null | op get item - --fields title 2>/dev/null)}")
|
||||
[[ -z "$services" ]] || compadd -a -- services
|
||||
}
|
||||
}
|
||||
|
||||
_opswd "$@"
|
||||
78
plugins/1password/opswd
Normal file
78
plugins/1password/opswd
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
#autoload
|
||||
|
||||
# opswd puts the password of the named service into the clipboard. If there's a
|
||||
# one time password, it will be copied into the clipboard after 10 seconds. The
|
||||
# clipboard is cleared after another 20 seconds.
|
||||
function opswd() {
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: opswd <service>"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local service=$1
|
||||
|
||||
# If not logged in, print error and return
|
||||
op user list > /dev/null || return
|
||||
|
||||
local password
|
||||
# Copy the password to the clipboard
|
||||
if ! password=$(op item get "$service" --fields password 2>/dev/null); then
|
||||
echo "error: could not obtain password for $service"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -n "$password" | clipcopy
|
||||
echo "✔ password for $service copied to clipboard"
|
||||
|
||||
# If there's a one time password, copy it to the clipboard after 10 seconds
|
||||
local totp
|
||||
if totp=$(op item get --otp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
|
||||
sleep 10 && echo -n "$totp" | clipcopy
|
||||
echo "✔ TOTP for $service copied to clipboard"
|
||||
fi
|
||||
|
||||
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
|
||||
}
|
||||
|
||||
# TODO: 2022-03-26: Remove support for op CLI 1
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 2.0.0 $(op --version) || {
|
||||
print -ru2 ${(%):-"%F{yellow}opswd: usage with op version $(op --version) is deprecated. Upgrade to CLI 2 and reload zsh.
|
||||
For instructions, see https://developer.1password.com/docs/cli/upgrade.%f"}
|
||||
|
||||
# opswd puts the password of the named service into the clipboard. If there's a
|
||||
# one time password, it will be copied into the clipboard after 10 seconds. The
|
||||
# clipboard is cleared after another 20 seconds.
|
||||
function opswd() {
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: opswd <service>"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local service=$1
|
||||
|
||||
# If not logged in, print error and return
|
||||
op list users > /dev/null || return
|
||||
|
||||
local password
|
||||
# Copy the password to the clipboard
|
||||
if ! password=$(op get item "$service" --fields password 2>/dev/null); then
|
||||
echo "error: could not obtain password for $service"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -n "$password" | clipcopy
|
||||
echo "✔ password for $service copied to clipboard"
|
||||
|
||||
# If there's a one time password, copy it to the clipboard after 5 seconds
|
||||
local totp
|
||||
if totp=$(op get totp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
|
||||
sleep 10 && echo -n "$totp" | clipcopy
|
||||
echo "✔ TOTP for $service copied to clipboard"
|
||||
fi
|
||||
|
||||
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
|
||||
}
|
||||
}
|
||||
|
||||
opswd "$@"
|
||||
13
plugins/ag/README.md
Normal file
13
plugins/ag/README.md
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
# The Silver Searcher
|
||||
|
||||
This plugin provides completion support for [`ag`](https://github.com/ggreer/the_silver_searcher).
|
||||
|
||||
To use it, add ag to the plugins array in your zshrc file.
|
||||
|
||||
```zsh
|
||||
plugins=(... ag)
|
||||
```
|
||||
|
||||
## INSTALLATION NOTES
|
||||
|
||||
Besides oh-my-zsh, `ag` needs to be installed by following these steps: https://github.com/ggreer/the_silver_searcher#installing.
|
||||
66
plugins/ag/_ag
Normal file
66
plugins/ag/_ag
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
#compdef ag
|
||||
#autoload
|
||||
|
||||
typeset -A opt_args
|
||||
|
||||
# Took the liberty of not listing every option… specially aliases and -D
|
||||
_ag () {
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
'--ackmate:Print results in AckMate-parseable format'
|
||||
{'-A','--after'}':[LINES] Print lines after match (Default: 2)'
|
||||
{'-B','--before'}':[LINES] Print lines before match (Default: 2)'
|
||||
'--break:Print newlines between matches in different files'
|
||||
'--nobreak:Do not print newlines between matches in different files'
|
||||
{'-c','--count'}':Only print the number of matches in each file'
|
||||
'--color:Print color codes in results (Default: On)'
|
||||
'--nocolor:Do not print color codes in results'
|
||||
'--color-line-number:Color codes for line numbers (Default: 1;33)'
|
||||
'--color-match:Color codes for result match numbers (Default: 30;43)'
|
||||
'--color-path:Color codes for path names (Default: 1;32)'
|
||||
'--column:Print column numbers in results'
|
||||
{'-H','--heading'}':Print file names (On unless searching a single file)'
|
||||
'--noheading:Do not print file names (On unless searching a single file)'
|
||||
'--line-numbers:Print line numbers even for streams'
|
||||
{'-C','--context'}':[LINES] Print lines before and after matches (Default: 2)'
|
||||
'-g:[PATTERN] Print filenames matching PATTERN'
|
||||
{'-l','--files-with-matches'}':Only print filenames that contain matches'
|
||||
{'-L','--files-without-matches'}':Only print filenames that do not contain matches'
|
||||
'--no-numbers:Do not print line numbers'
|
||||
{'-o','--only-matching'}':Prints only the matching part of the lines'
|
||||
'--print-long-lines:Print matches on very long lines (Default: 2k characters)'
|
||||
'--passthrough:When searching a stream, print all lines even if they do not match'
|
||||
'--silent:Suppress all log messages, including errors'
|
||||
'--stats:Print stats (files scanned, time taken, etc.)'
|
||||
'--vimgrep:Print results like vim :vimgrep /pattern/g would'
|
||||
{'-0','--null'}':Separate filenames with null (for "xargs -0")'
|
||||
|
||||
{'-a','--all-types'}':Search all files (does not include hidden files / .gitignore)'
|
||||
'--depth:[NUM] Search up to NUM directories deep (Default: 25)'
|
||||
{'-f','--follow'}':Follow symlinks'
|
||||
{'-G','--file-search-regex'}':[PATTERN] Limit search to filenames matching PATTERN'
|
||||
'--hidden:Search hidden files (obeys .*ignore files)'
|
||||
{'-i','--ignore-case'}':Match case insensitively'
|
||||
'--ignore:[PATTERN] Ignore files/directories matching PATTERN'
|
||||
{'-m','--max-count'}':[NUM] Skip the rest of a file after NUM matches (Default: 10k)'
|
||||
{'-p','--path-to-agignore'}':[PATH] Use .agignore file at PATH'
|
||||
{'-Q','--literal'}':Do not parse PATTERN as a regular expression'
|
||||
{'-s','--case-sensitive'}':Match case'
|
||||
{'-S','--smart-case'}':Insensitive match unless PATTERN has uppercase (Default: On)'
|
||||
'--search-binary:Search binary files for matches'
|
||||
{'-t','--all-text'}':Search all text files (Hidden files not included)'
|
||||
{'-u','--unrestricted'}':Search all files (ignore .agignore and _all_)'
|
||||
{'-U','--skip-vcs-ignores'}':Ignore VCS files (stil obey .agignore)'
|
||||
{'-v','--invert-match'}':Invert match'
|
||||
{'-w','--word-regexp'}':Only match whole words'
|
||||
{'-z','--search-zip'}':Search contents of compressed (e.g., gzip) files'
|
||||
|
||||
'--list-file-types:list of supported file types'
|
||||
)
|
||||
|
||||
if [[ $words[-1] =~ "^-" ]]; then
|
||||
_describe -t commands "ag options" _1st_arguments && ret=0
|
||||
else
|
||||
_files && ret=0
|
||||
fi
|
||||
}
|
||||
1
plugins/aliases/.gitignore
vendored
Normal file
1
plugins/aliases/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
__pycache__
|
||||
28
plugins/aliases/README.md
Normal file
28
plugins/aliases/README.md
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# Aliases cheatsheet
|
||||
|
||||
**Maintainer:** [@hqingyi](https://github.com/hqingyi)
|
||||
|
||||
With lots of 3rd-party amazing aliases installed, this plugin helps list the shortcuts
|
||||
that are currently available based on the plugins you have enabled.
|
||||
|
||||
To use it, add `aliases` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(aliases)
|
||||
```
|
||||
|
||||
Requirements: Python needs to be installed.
|
||||
|
||||
## Usage
|
||||
|
||||
- `acs`: show all aliases by group.
|
||||
|
||||
- `acs -h/--help`: print help mesage.
|
||||
|
||||
- `acs <keyword>`: filter aliases by `<keyword>` and highlight.
|
||||
|
||||
- `acs -g <group>/--group <group`: show only aliases for group `<group>`. Multiple uses of the flag show all groups,
|
||||
|
||||
- `acs --groups-only`: show only group names
|
||||
|
||||

|
||||
14
plugins/aliases/aliases.plugin.zsh
Normal file
14
plugins/aliases/aliases.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
eval '
|
||||
function acs(){
|
||||
(( $+commands[python3] )) || {
|
||||
echo "[error] No python executable detected"
|
||||
return
|
||||
}
|
||||
alias | python3 "'"${0:h}"'/cheatsheet.py" "$@"
|
||||
}
|
||||
'
|
||||
68
plugins/aliases/cheatsheet.py
Normal file
68
plugins/aliases/cheatsheet.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import itertools
|
||||
import termcolor
|
||||
import argparse
|
||||
|
||||
def parse(line):
|
||||
left = line[0:line.find('=')].strip()
|
||||
right = line[line.find('=')+1:].strip('\'"\n ')
|
||||
try:
|
||||
cmd = next(part for part in right.split() if len([char for char in '=<>' if char in part])==0)
|
||||
except StopIteration:
|
||||
cmd = right
|
||||
return (left, right, cmd)
|
||||
|
||||
def cheatsheet(lines):
|
||||
exps = [ parse(line) for line in lines ]
|
||||
cheatsheet = {'_default': []}
|
||||
for key, group in itertools.groupby(exps, lambda exp:exp[2]):
|
||||
group_list = [ item for item in group ]
|
||||
if len(group_list)==1:
|
||||
target_aliases = cheatsheet['_default']
|
||||
else:
|
||||
if key not in cheatsheet:
|
||||
cheatsheet[key] = []
|
||||
target_aliases = cheatsheet[key]
|
||||
target_aliases.extend(group_list)
|
||||
return cheatsheet
|
||||
|
||||
def pretty_print_group(key, aliases, highlight=None, only_groupname=False):
|
||||
if len(aliases) == 0:
|
||||
return
|
||||
group_hl_formatter = lambda g, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'red') for part in ('[%s]' % g).split(hl)])
|
||||
alias_hl_formatter = lambda alias, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'green') for part in ('\t%s = %s' % alias[0:2]).split(hl)])
|
||||
group_formatter = lambda g: termcolor.colored('[%s]' % g, 'red')
|
||||
alias_formatter = lambda alias: termcolor.colored('\t%s = %s' % alias[0:2], 'green')
|
||||
if highlight and len(highlight)>0:
|
||||
print (group_hl_formatter(key, highlight))
|
||||
if not only_groupname:
|
||||
print ('\n'.join([alias_hl_formatter(alias, highlight) for alias in aliases]))
|
||||
else:
|
||||
print (group_formatter(key))
|
||||
if not only_groupname:
|
||||
print ('\n'.join([alias_formatter(alias) for alias in aliases]))
|
||||
print ('')
|
||||
|
||||
def pretty_print(cheatsheet, wfilter, group_list=None, groups_only=False):
|
||||
sorted_key = sorted(cheatsheet.keys())
|
||||
for key in sorted_key:
|
||||
if group_list and key not in group_list:
|
||||
continue
|
||||
aliases = cheatsheet.get(key)
|
||||
if not wfilter:
|
||||
pretty_print_group(key, aliases, wfilter, groups_only)
|
||||
else:
|
||||
pretty_print_group(key, [ alias for alias in aliases if alias[0].find(wfilter)>-1 or alias[1].find(wfilter)>-1], wfilter)
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description="Pretty print aliases.")
|
||||
parser.add_argument('filter', nargs="*", help="search aliases matching string")
|
||||
parser.add_argument('-g', '--group', dest="group_list", action='append', help="only print aliases in given groups")
|
||||
parser.add_argument('--groups', dest='groups_only', action='store_true', help="only print alias groups")
|
||||
args = parser.parse_args()
|
||||
|
||||
lines = sys.stdin.readlines()
|
||||
group_list = args.group_list or None
|
||||
wfilter = " ".join(args.filter) or None
|
||||
pretty_print(cheatsheet(lines), wfilter, group_list, args.groups_only)
|
||||
168
plugins/aliases/termcolor.py
Normal file
168
plugins/aliases/termcolor.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
# coding: utf-8
|
||||
# Copyright (c) 2008-2011 Volvox Development Team
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# Author: Konstantin Lepa <konstantin.lepa@gmail.com>
|
||||
|
||||
"""ANSI Color formatting for output in terminal."""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
|
||||
|
||||
__ALL__ = [ 'colored', 'cprint' ]
|
||||
|
||||
VERSION = (1, 1, 0)
|
||||
|
||||
ATTRIBUTES = dict(
|
||||
list(zip([
|
||||
'bold',
|
||||
'dark',
|
||||
'',
|
||||
'underline',
|
||||
'blink',
|
||||
'',
|
||||
'reverse',
|
||||
'concealed'
|
||||
],
|
||||
list(range(1, 9))
|
||||
))
|
||||
)
|
||||
del ATTRIBUTES['']
|
||||
|
||||
|
||||
HIGHLIGHTS = dict(
|
||||
list(zip([
|
||||
'on_grey',
|
||||
'on_red',
|
||||
'on_green',
|
||||
'on_yellow',
|
||||
'on_blue',
|
||||
'on_magenta',
|
||||
'on_cyan',
|
||||
'on_white'
|
||||
],
|
||||
list(range(40, 48))
|
||||
))
|
||||
)
|
||||
|
||||
|
||||
COLORS = dict(
|
||||
list(zip([
|
||||
'grey',
|
||||
'red',
|
||||
'green',
|
||||
'yellow',
|
||||
'blue',
|
||||
'magenta',
|
||||
'cyan',
|
||||
'white',
|
||||
],
|
||||
list(range(30, 38))
|
||||
))
|
||||
)
|
||||
|
||||
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
def colored(text, color=None, on_color=None, attrs=None):
|
||||
"""Colorize text.
|
||||
|
||||
Available text colors:
|
||||
red, green, yellow, blue, magenta, cyan, white.
|
||||
|
||||
Available text highlights:
|
||||
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
|
||||
|
||||
Available attributes:
|
||||
bold, dark, underline, blink, reverse, concealed.
|
||||
|
||||
Example:
|
||||
colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink'])
|
||||
colored('Hello, World!', 'green')
|
||||
"""
|
||||
if os.getenv('ANSI_COLORS_DISABLED') is None:
|
||||
fmt_str = '\033[%dm%s'
|
||||
if color is not None:
|
||||
text = fmt_str % (COLORS[color], text)
|
||||
|
||||
if on_color is not None:
|
||||
text = fmt_str % (HIGHLIGHTS[on_color], text)
|
||||
|
||||
if attrs is not None:
|
||||
for attr in attrs:
|
||||
text = fmt_str % (ATTRIBUTES[attr], text)
|
||||
|
||||
text += RESET
|
||||
return text
|
||||
|
||||
|
||||
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
|
||||
"""Print colorize text.
|
||||
|
||||
It accepts arguments of print function.
|
||||
"""
|
||||
|
||||
print((colored(text, color, on_color, attrs)), **kwargs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Current terminal type: %s' % os.getenv('TERM'))
|
||||
print('Test basic colors:')
|
||||
cprint('Grey color', 'grey')
|
||||
cprint('Red color', 'red')
|
||||
cprint('Green color', 'green')
|
||||
cprint('Yellow color', 'yellow')
|
||||
cprint('Blue color', 'blue')
|
||||
cprint('Magenta color', 'magenta')
|
||||
cprint('Cyan color', 'cyan')
|
||||
cprint('White color', 'white')
|
||||
print(('-' * 78))
|
||||
|
||||
print('Test highlights:')
|
||||
cprint('On grey color', on_color='on_grey')
|
||||
cprint('On red color', on_color='on_red')
|
||||
cprint('On green color', on_color='on_green')
|
||||
cprint('On yellow color', on_color='on_yellow')
|
||||
cprint('On blue color', on_color='on_blue')
|
||||
cprint('On magenta color', on_color='on_magenta')
|
||||
cprint('On cyan color', on_color='on_cyan')
|
||||
cprint('On white color', color='grey', on_color='on_white')
|
||||
print('-' * 78)
|
||||
|
||||
print('Test attributes:')
|
||||
cprint('Bold grey color', 'grey', attrs=['bold'])
|
||||
cprint('Dark red color', 'red', attrs=['dark'])
|
||||
cprint('Underline green color', 'green', attrs=['underline'])
|
||||
cprint('Blink yellow color', 'yellow', attrs=['blink'])
|
||||
cprint('Reversed blue color', 'blue', attrs=['reverse'])
|
||||
cprint('Concealed Magenta color', 'magenta', attrs=['concealed'])
|
||||
cprint('Bold underline reverse cyan color', 'cyan',
|
||||
attrs=['bold', 'underline', 'reverse'])
|
||||
cprint('Dark blink concealed white color', 'white',
|
||||
attrs=['dark', 'blink', 'concealed'])
|
||||
print(('-' * 78))
|
||||
|
||||
print('Test mixing:')
|
||||
cprint('Underline red on grey color', 'red', 'on_grey',
|
||||
['underline'])
|
||||
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
|
||||
|
||||
|
|
@ -4,7 +4,7 @@ function ansible-version(){
|
|||
}
|
||||
|
||||
function ansible-role-init(){
|
||||
if ! [ -z $1] ; then
|
||||
if ! [ -z $1 ] ; then
|
||||
echo "Ansible Role : $1 Creating...."
|
||||
ansible-galaxy init $1
|
||||
tree $1
|
||||
|
|
@ -25,4 +25,4 @@ alias ainv='ansible-inventory '
|
|||
alias adoc='ansible-doc '
|
||||
alias agal='ansible-galaxy '
|
||||
alias apull='ansible-pull '
|
||||
alias aval='ansible-vault'
|
||||
alias aval='ansible-vault'
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
This plugin provides completion for [Ant](https://ant.apache.org/).
|
||||
|
||||
To use it add ant to the plugins array in your zshrc file.
|
||||
To use it, add `ant` to the plugins array in your zshrc file:
|
||||
|
||||
```bash
|
||||
```zsh
|
||||
plugins=(... ant)
|
||||
```
|
||||
|
||||
|
|
|
|||
22
plugins/ant/_ant
Normal file
22
plugins/ant/_ant
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
#compdef ant
|
||||
|
||||
_ant_does_target_list_need_generating () {
|
||||
[[ ! -f .ant_targets ]] && return 0
|
||||
[[ build.xml -nt .ant_targets ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
_ant () {
|
||||
if [[ ! -f build.xml ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
if ! _ant_does_target_list_need_generating; then
|
||||
return
|
||||
fi
|
||||
|
||||
ant -p | awk -F " " 'NR > 5 { print lastTarget } { lastTarget = $1 }' >| .ant_targets
|
||||
compadd -- "$(cat .ant_targets)"
|
||||
}
|
||||
|
||||
_ant "$@"
|
||||
|
|
@ -1,16 +1,2 @@
|
|||
_ant_does_target_list_need_generating () {
|
||||
[ ! -f .ant_targets ] && return 0;
|
||||
[ build.xml -nt .ant_targets ] && return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
_ant () {
|
||||
if [ -f build.xml ]; then
|
||||
if _ant_does_target_list_need_generating; then
|
||||
ant -p | awk -F " " 'NR > 5 { print lastTarget }{lastTarget = $1}' > .ant_targets
|
||||
fi
|
||||
compadd -- `cat .ant_targets`
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _ant ant
|
||||
# Default to colored output
|
||||
export ANT_ARGS='-logger org.apache.tools.ant.listener.AnsiColorLogger'
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ plugins=(... arcanist)
|
|||
| ------- | ---------------------------------- |
|
||||
| ara | `arc amend` |
|
||||
| arb | `arc branch` |
|
||||
| arbl | `arc bland` |
|
||||
| arco | `arc cover` |
|
||||
| arci | `arc commit` |
|
||||
| ard | `arc diff` |
|
||||
|
|
@ -24,6 +25,7 @@ plugins=(... arcanist)
|
|||
| ardpc | `arc diff --plan-changes` |
|
||||
| are | `arc export` |
|
||||
| arh | `arc help` |
|
||||
| arho | `arc hotfix` |
|
||||
| arl | `arc land` |
|
||||
| arli | `arc lint` |
|
||||
| arls | `arc list` |
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
alias ara='arc amend'
|
||||
alias arb='arc branch'
|
||||
alias arbl='arc bland'
|
||||
alias arco='arc cover'
|
||||
alias arci='arc commit'
|
||||
|
||||
|
|
@ -17,6 +18,7 @@ alias ardp='arc diff --preview' # creates a new diff in the phab interface
|
|||
|
||||
alias are='arc export'
|
||||
alias arh='arc help'
|
||||
alias arho='arc hotfix'
|
||||
alias arl='arc land'
|
||||
alias arli='arc lint'
|
||||
alias arls='arc list'
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Archlinux plugin
|
||||
# Arch Linux plugin
|
||||
|
||||
This plugin adds some aliases and functions to work with Arch Linux.
|
||||
|
||||
|
|
@ -10,145 +10,174 @@ plugins=(... archlinux)
|
|||
|
||||
## Features
|
||||
|
||||
#### YAY
|
||||
### Pacman
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|------------------------------------|---------------------------------------------------------------------|
|
||||
| yaconf | yay -Pg | Print current configuration |
|
||||
| yain | yay -S | Install packages from the repositories |
|
||||
| yains | yay -U | Install a package from a local file |
|
||||
| yainsd | yay -S --asdeps | Install packages as dependencies of another package |
|
||||
| yaloc | yay -Qi | Display information about a package in the local database |
|
||||
| yalocs | yay -Qs | Search for packages in the local database |
|
||||
| yalst | yay -Qe | List installed packages including from AUR (tagged as "local") |
|
||||
| yamir | yay -Syy | Force refresh of all package lists after updating mirrorlist |
|
||||
| yaorph | yay -Qtd | Remove orphans using yay |
|
||||
| yare | yay -R | Remove packages, keeping its settings and dependencies |
|
||||
| yarem | yay -Rns | Remove packages, including its settings and unneeded dependencies |
|
||||
| yarep | yay -Si | Display information about a package in the repositories |
|
||||
| yareps | yay -Ss | Search for packages in the repositories |
|
||||
| yaupg | yay -Syu | Sync with repositories before upgrading packages |
|
||||
| yasu | yay -Syu --no-confirm | Same as `yaupg`, but without confirmation |
|
||||
| Alias | Command | Description |
|
||||
|--------------|----------------------------------------|------------------------------------------------------------------|
|
||||
| pacin | `sudo pacman -S` | Install packages from the repositories |
|
||||
| pacins | `sudo pacman -U` | Install a package from a local file |
|
||||
| pacinsd | `sudo pacman -S --asdeps` | Install packages as dependencies of another package |
|
||||
| paclean | `sudo pacman -Sc` | Clean out old and unused caches and packages |
|
||||
| pacloc | `pacman -Qi` | Display information about a package in the local database |
|
||||
| paclocs | `pacman -Qs` | Search for packages in the local database |
|
||||
| paclr | `sudo pacman -Scc` | Remove all files from the cache |
|
||||
| paclsorphans | `sudo pacman -Qdt` | List all orphaned packages |
|
||||
| pacmir | `sudo pacman -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| pacre | `sudo pacman -R` | Remove packages, keeping its settings and dependencies |
|
||||
| pacrem | `sudo pacman -Rns` | Remove packages, including its settings and dependencies |
|
||||
| pacrep | `pacman -Si` | Display information about a package in the repositories |
|
||||
| pacreps | `pacman -Ss` | Search for packages in the repositories |
|
||||
| pacrmorphans | `sudo pacman -Rs $(pacman -Qtdq)` | Delete all orphaned packages |
|
||||
| pacupd | `sudo pacman -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| pacupg | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
|
||||
| pacfileupg | `sudo pacman -Fy` | Download fresh package databases from the server |
|
||||
| pacfiles | `pacman -F` | Search package file names for matching strings |
|
||||
| pacls | `pacman -Ql` | List files in a package |
|
||||
| pacown | `pacman -Qo` | Show which package owns a file |
|
||||
| upgrade[¹](#f1) | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### TRIZEN
|
||||
| Function | Description |
|
||||
|----------------|-----------------------------------------------------------|
|
||||
| pacdisowned | List all disowned files in your system |
|
||||
| paclist | List all explicitly installed packages with a description |
|
||||
| pacmanallkeys | Get all keys for developers and trusted users |
|
||||
| pacmansignkeys | Locally trust all keys passed as parameters |
|
||||
| pacweb | Open the website of an ArchLinux package |
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|------------------------------------|---------------------------------------------------------------------|
|
||||
| trconf | trizen -C | Fix all configuration files with vimdiff |
|
||||
| trin | trizen -S | Install packages from the repositories |
|
||||
| trins | trizen -U | Install a package from a local file |
|
||||
| trinsd | trizen -S --asdeps | Install packages as dependencies of another package |
|
||||
| trloc | trizen -Qi | Display information about a package in the local database |
|
||||
| trlocs | trizen -Qs | Search for packages in the local database |
|
||||
| trlst | trizen -Qe | List installed packages including from AUR (tagged as "local") |
|
||||
| trmir | trizen -Syy | Force refresh of all package lists after updating mirrorlist |
|
||||
| trorph | trizen -Qtd | Remove orphans using yaourt |
|
||||
| trre | trizen -R | Remove packages, keeping its settings and dependencies |
|
||||
| trrem | trizen -Rns | Remove packages, including its settings and unneeded dependencies |
|
||||
| trrep | trizen -Si | Display information about a package in the repositories |
|
||||
| trreps | trizen -Ss | Search for packages in the repositories |
|
||||
| trupd | trizen -Sy && sudo abs && sudo aur | Update and refresh local package, ABS and AUR databases |
|
||||
| trupd | trizen -Sy && sudo abs | Update and refresh the local package and ABS databases |
|
||||
| trupd | trizen -Sy && sudo aur | Update and refresh the local package and AUR databases |
|
||||
| trupd | trizen -Sy | Update and refresh the local package database |
|
||||
| trupg | trizen -Syua | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| trsu | trizen -Syua --no-confirm | Same as `trupg`, but without confirmation |
|
||||
| upgrade | trizen -Syu | Sync with repositories before upgrading packages |
|
||||
Note: paclist used to print packages with a description which are (1) explicitly installed
|
||||
and (2) available for upgrade. Due to flawed scripting, it also printed all packages if no
|
||||
upgrades were available. Use `pacman -Que` instead.
|
||||
|
||||
#### YAOURT
|
||||
### AUR helpers
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|------------------------------------|---------------------------------------------------------------------|
|
||||
| yaconf | yaourt -C | Fix all configuration files with vimdiff |
|
||||
| yain | yaourt -S | Install packages from the repositories |
|
||||
| yains | yaourt -U | Install a package from a local file |
|
||||
| yainsd | yaourt -S --asdeps | Install packages as dependencies of another package |
|
||||
| yaloc | yaourt -Qi | Display information about a package in the local database |
|
||||
| yalocs | yaourt -Qs | Search for packages in the local database |
|
||||
| yalst | yaourt -Qe | List installed packages including from AUR (tagged as "local") |
|
||||
| yamir | yaourt -Syy | Force refresh of all package lists after updating mirrorlist |
|
||||
| yaorph | yaourt -Qtd | Remove orphans using yaourt |
|
||||
| yare | yaourt -R | Remove packages, keeping its settings and dependencies |
|
||||
| yarem | yaourt -Rns | Remove packages, including its settings and unneeded dependencies |
|
||||
| yarep | yaourt -Si | Display information about a package in the repositories |
|
||||
| yareps | yaourt -Ss | Search for packages in the repositories |
|
||||
| yaupd | yaourt -Sy && sudo abs && sudo aur | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupd | yaourt -Sy && sudo abs | Update and refresh the local package and ABS databases |
|
||||
| yaupd | yaourt -Sy && sudo aur | Update and refresh the local package and AUR databases |
|
||||
| yaupd | yaourt -Sy | Update and refresh the local package database |
|
||||
| yaupg | yaourt -Syua | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| yasu | yaourt -Syua --no-confirm | Same as `yaupg`, but without confirmation |
|
||||
| upgrade | yaourt -Syu | Sync with repositories before upgrading packages |
|
||||
#### Aura
|
||||
|
||||
#### PACAUR
|
||||
| Alias | Command | Description |
|
||||
|---------|-------------------------------------------------|-------------------------------------------------------------------------|
|
||||
| auclean | `sudo aura -Sc` | Clean out old and unused caches and packages |
|
||||
| auclr | `sudo aura -Scc` | Remove all files from the cache |
|
||||
| auin | `sudo aura -S` | Install packages from the repositories |
|
||||
| aurin | `sudo aura -A` | Install packages from the repositories |
|
||||
| auins | `sudo aura -U` | Install a package from a local file |
|
||||
| auinsd | `sudo aura -S --asdeps` | Install packages as dependencies of another package (repositories only) |
|
||||
| aurinsd | `sudo aura -A --asdeps` | Install packages as dependencies of another package (AUR only) |
|
||||
| auloc | `aura -Qi` | Display information about a package in the local database |
|
||||
| aulocs | `aura -Qs` | Search for packages in the local database |
|
||||
| auls | `aura -Qql` | List all files owned by a given package |
|
||||
| aulst | `aura -Qe` | List installed packages including from AUR (tagged as "local") |
|
||||
| aumir | `sudo aura -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| aurph | `sudo aura -Oj` | Remove orphans using aura |
|
||||
| auown | `aura -Qqo` | Search for packages that own the specified file(s) |
|
||||
| aure | `sudo aura -R` | Remove packages, keeping its settings and dependencies |
|
||||
| aurem | `sudo aura -Rns` | Remove packages, including its settings and unneeded dependencies |
|
||||
| aurep | `aura -Si` | Display information about a package in the repositories |
|
||||
| aurrep | `aura -Ai` | Display information about a package from AUR |
|
||||
| aureps | `aura -As --both` | Search for packages in the repositories and AUR |
|
||||
| auras | `aura -As --both` | Same as above |
|
||||
| auupd | `sudo aura -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| auupg | `sudo sh -c "aura -Syu && aura -Au"` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| ausu | `sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"` | Same as `auupg`, but without confirmation |
|
||||
| upgrade[¹](#f1) | `sudo aura -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|------------------------------------|---------------------------------------------------------------------|
|
||||
| pain | pacaur -S | Install packages from the repositories |
|
||||
| pains | pacaur -U | Install a package from a local file |
|
||||
| painsd | pacaur -S --asdeps | Install packages as dependencies of another package |
|
||||
| paloc | pacaur -Qi | Display information about a package in the local database |
|
||||
| palocs | pacaur -Qs | Search for packages in the local database |
|
||||
| palst | pacaur -Qe | List installed packages including from AUR (tagged as "local") |
|
||||
| pamir | pacaur -Syy | Force refresh of all package lists after updating mirrorlist |
|
||||
| paorph | pacaur -Qtd | Remove orphans using pacaur |
|
||||
| pare | pacaur -R | Remove packages, keeping its settings and dependencies |
|
||||
| parem | pacaur -Rns | Remove packages, including its settings and unneeded dependencies |
|
||||
| parep | pacaur -Si | Display information about a package in the repositories |
|
||||
| pareps | pacaur -Ss | Search for packages in the repositories |
|
||||
| paupd | pacaur -Sy && sudo abs && sudo aur | Update and refresh local package, ABS and AUR databases |
|
||||
| paupd | pacaur -Sy && sudo abs | Update and refresh the local package and ABS databases |
|
||||
| paupd | pacaur -Sy && sudo aur | Update and refresh the local package and AUR databases |
|
||||
| paupd | pacaur -Sy | Update and refresh the local package database |
|
||||
| paupg | pacaur -Syua | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| pasu | pacaur -Syua --no-confirm | Same as `paupg`, but without confirmation |
|
||||
| upgrade | pacaur -Syu | Sync with repositories before upgrading packages |
|
||||
| Function | Description |
|
||||
|-----------------|---------------------------------------------------------------------|
|
||||
| auownloc _file_ | Display information about a package that owns the specified file(s) |
|
||||
| auownls _file_ | List all files owned by a package that owns the specified file(s) |
|
||||
|
||||
#### PACMAN
|
||||
#### Pacaur
|
||||
|
||||
| Alias | Command | Description |
|
||||
|--------------|-----------------------------------------|--------------------------------------------------------------|
|
||||
| pacin | sudo pacman -S | Install packages from the repositories |
|
||||
| pacins | sudo pacman -U | Install a package from a local file |
|
||||
| pacinsd | sudo pacman -S --asdeps | Install packages as dependencies of another package |
|
||||
| pacloc | pacman -Qi | Display information about a package in the local database |
|
||||
| paclocs | pacman -Qs | Search for packages in the local database |
|
||||
| paclsorphans | sudo pacman -Qdt | List all orphaned packages |
|
||||
| pacmir | sudo pacman -Syy | Force refresh of all package lists after updating mirrorlist |
|
||||
| pacre | sudo pacman -R | Remove packages, keeping its settings and dependencies |
|
||||
| pacrem | sudo pacman -Rns | Remove packages, including its settings and dependencies |
|
||||
| pacrep | pacman -Si | Display information about a package in the repositories |
|
||||
| pacreps | pacman -Ss | Search for packages in the repositories |
|
||||
| pacrmorphans | sudo pacman -Rs $(pacman -Qtdq) | Delete all orphaned packages |
|
||||
| pacupd | sudo pacman -Sy && sudo abs && sudo aur | Update and refresh the local package, ABS and AUR databases |
|
||||
| pacupd | sudo pacman -Sy && sudo abs | Update and refresh the local package and ABS databases |
|
||||
| pacupd | sudo pacman -Sy && sudo aur | Update and refresh the local package and AUR databases |
|
||||
| pacupd | sudo pacman -Sy | Update and refresh the local package database |
|
||||
| pacupg | sudo pacman -Syu | Sync with repositories before upgrading packages |
|
||||
| upgrade | sudo pacman -Syu | Sync with repositories before upgrading packages |
|
||||
| pacfileupg | sudo pacman -Fy | Download fresh package databases from the server |
|
||||
| pacfiles | pacman -F | Search package file names for matching strings |
|
||||
| pacls | pacman -Ql | List files in a package |
|
||||
| pacown | pacman -Qo | Show which package owns a file |
|
||||
| Alias | Command | Description |
|
||||
|---------|-----------------------------------|---------------------------------------------------------------------|
|
||||
| pacclean| `pacaur -Sc` | Clean out old and unused caches and packages |
|
||||
| pacclr | `pacaur -Scc` | Remove all files from the cache |
|
||||
| pain | `pacaur -S` | Install packages from the repositories |
|
||||
| pains | `pacaur -U` | Install a package from a local file |
|
||||
| painsd | `pacaur -S --asdeps` | Install packages as dependencies of another package |
|
||||
| paloc | `pacaur -Qi` | Display information about a package in the local database |
|
||||
| palocs | `pacaur -Qs` | Search for packages in the local database |
|
||||
| palst | `pacaur -Qe` | List installed packages including from AUR (tagged as "local") |
|
||||
| pamir | `pacaur -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| paorph | `pacaur -Qtd` | Remove orphans using pacaur |
|
||||
| pare | `pacaur -R` | Remove packages, keeping its settings and dependencies |
|
||||
| parem | `pacaur -Rns` | Remove packages, including its settings and unneeded dependencies |
|
||||
| parep | `pacaur -Si` | Display information about a package in the repositories |
|
||||
| pareps | `pacaur -Ss` | Search for packages in the repositories |
|
||||
| paupd | `pacaur -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| paupg | `pacaur -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| pasu | `pacaur -Syua --no-confirm` | Same as `paupg`, but without confirmation |
|
||||
| upgrade[¹](#f1) | `pacaur -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
| Function | Description |
|
||||
|----------------|------------------------------------------------------|
|
||||
| pacdisowned | List all disowned files in your system |
|
||||
| paclist | List all installed packages with a short description |
|
||||
| pacmanallkeys | Get all keys for developers and trusted users |
|
||||
| pacmansignkeys | Locally trust all keys passed as parameters |
|
||||
| pacweb | Open the website of an ArchLinux package |
|
||||
#### Trizen
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|-----------------------------------|---------------------------------------------------------------------|
|
||||
| trconf | `trizen -C` | Fix all configuration files with vimdiff |
|
||||
| trclean | `trizen -Sc` | Clean out old and unused caches and packages |
|
||||
| trclr | `trizen -Scc` | Remove all files from the cache |
|
||||
| trin | `trizen -S` | Install packages from the repositories |
|
||||
| trins | `trizen -U` | Install a package from a local file |
|
||||
| trinsd | `trizen -S --asdeps` | Install packages as dependencies of another package |
|
||||
| trloc | `trizen -Qi` | Display information about a package in the local database |
|
||||
| trlocs | `trizen -Qs` | Search for packages in the local database |
|
||||
| trlst | `trizen -Qe` | List installed packages including from AUR (tagged as "local") |
|
||||
| trmir | `trizen -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| trorph | `trizen -Qtd` | Remove orphans using yaourt |
|
||||
| trre | `trizen -R` | Remove packages, keeping its settings and dependencies |
|
||||
| trrem | `trizen -Rns` | Remove packages, including its settings and unneeded dependencies |
|
||||
| trrep | `trizen -Si` | Display information about a package in the repositories |
|
||||
| trreps | `trizen -Ss` | Search for packages in the repositories |
|
||||
| trupd | `trizen -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| trupg | `trizen -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| trsu | `trizen -Syua --no-confirm` | Same as `trupg`, but without confirmation |
|
||||
| upgrade[¹](#f1) | `trizen -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### Yay
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|--------------------------------|-------------------------------------------------------------------|
|
||||
| yaconf | `yay -Pg` | Print current configuration |
|
||||
| yaclean | `yay -Sc` | Clean out old and unused caches and packages |
|
||||
| yaclr | `yay -Scc` | Remove all files from the cache |
|
||||
| yain | `yay -S` | Install packages from the repositories |
|
||||
| yains | `yay -U` | Install a package from a local file |
|
||||
| yainsd | `yay -S --asdeps` | Install packages as dependencies of another package |
|
||||
| yaloc | `yay -Qi` | Display information about a package in the local database |
|
||||
| yalocs | `yay -Qs` | Search for packages in the local database |
|
||||
| yalst | `yay -Qe` | List installed packages including from AUR (tagged as "local") |
|
||||
| yamir | `yay -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| yaorph | `yay -Qtd` | Remove orphans using yay |
|
||||
| yare | `yay -R` | Remove packages, keeping its settings and dependencies |
|
||||
| yarem | `yay -Rns` | Remove packages, including its settings and unneeded dependencies |
|
||||
| yarep | `yay -Si` | Display information about a package in the repositories |
|
||||
| yareps | `yay -Ss` | Search for packages in the repositories |
|
||||
| yaupd | `yay -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupg | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
| yasu | `yay -Syu --no-confirm` | Same as `yaupg`, but without confirmation |
|
||||
| upgrade[¹](#f1) | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
---
|
||||
|
||||
<span id="f1">¹</span>
|
||||
The `upgrade` alias is set for all package managers. Its value will depend on
|
||||
whether the package manager is installed, checked in the following order:
|
||||
|
||||
1. `yay`
|
||||
2. `trizen`
|
||||
3. `pacaur`
|
||||
4. `aura`
|
||||
5. `pacman`
|
||||
|
||||
## Contributors
|
||||
|
||||
- Benjamin Boudreau - dreurmail@gmail.com
|
||||
- Celso Miranda - contacto@celsomiranda.net
|
||||
- ratijas (ivan tkachenko) - me@ratijas.tk
|
||||
- Juraj Fiala - doctorjellyface@riseup.net
|
||||
- KhasMek - Boushh@gmail.com
|
||||
- Majora320 (Moses Miller) - Majora320@gmail.com
|
||||
- Martin Putniorz - mputniorz@gmail.com
|
||||
- MatthR3D - matthr3d@gmail.com
|
||||
- ornicar - thibault.duplessis@gmail.com
|
||||
- Juraj Fiala - doctorjellyface@riseup.net
|
||||
- Majora320 (Moses Miller) - Majora320@gmail.com
|
||||
- Ybalrid (Arthur Brainville) - ybalrid@ybalrid.info
|
||||
- Jeff M. Hubbard - jeffmhubbard@gmail.com
|
||||
- K. Harishankar(harishnkr) - hari2menon1234@gmail.com
|
||||
|
|
|
|||
|
|
@ -1,143 +1,13 @@
|
|||
if (( $+commands[trizen] )); then
|
||||
alias trconf='trizen -C'
|
||||
alias trupg='trizen -Syua'
|
||||
alias trsu='trizen -Syua --noconfirm'
|
||||
alias trin='trizen -S'
|
||||
alias trins='trizen -U'
|
||||
alias trre='trizen -R'
|
||||
alias trrem='trizen -Rns'
|
||||
alias trrep='trizen -Si'
|
||||
alias trreps='trizen -Ss'
|
||||
alias trloc='trizen -Qi'
|
||||
alias trlocs='trizen -Qs'
|
||||
alias trlst='trizen -Qe'
|
||||
alias trorph='trizen -Qtd'
|
||||
alias trinsd='trizen -S --asdeps'
|
||||
alias trmir='trizen -Syy'
|
||||
|
||||
|
||||
if (( $+commands[abs] && $+commands[aur] )); then
|
||||
alias trupd='trizen -Sy && sudo abs && sudo aur'
|
||||
elif (( $+commands[abs] )); then
|
||||
alias trupd='trizen -Sy && sudo abs'
|
||||
elif (( $+commands[aur] )); then
|
||||
alias trupd='trizen -Sy && sudo aur'
|
||||
else
|
||||
alias trupd='trizen -Sy'
|
||||
fi
|
||||
fi
|
||||
|
||||
if (( $+commands[yaourt] )); then
|
||||
alias yaconf='yaourt -C'
|
||||
alias yaupg='yaourt -Syua'
|
||||
alias yasu='yaourt -Syua --noconfirm'
|
||||
alias yain='yaourt -S'
|
||||
alias yains='yaourt -U'
|
||||
alias yare='yaourt -R'
|
||||
alias yarem='yaourt -Rns'
|
||||
alias yarep='yaourt -Si'
|
||||
alias yareps='yaourt -Ss'
|
||||
alias yaloc='yaourt -Qi'
|
||||
alias yalocs='yaourt -Qs'
|
||||
alias yalst='yaourt -Qe'
|
||||
alias yaorph='yaourt -Qtd'
|
||||
alias yainsd='yaourt -S --asdeps'
|
||||
alias yamir='yaourt -Syy'
|
||||
|
||||
|
||||
if (( $+commands[abs] && $+commands[aur] )); then
|
||||
alias yaupd='yaourt -Sy && sudo abs && sudo aur'
|
||||
elif (( $+commands[abs] )); then
|
||||
alias yaupd='yaourt -Sy && sudo abs'
|
||||
elif (( $+commands[aur] )); then
|
||||
alias yaupd='yaourt -Sy && sudo aur'
|
||||
else
|
||||
alias yaupd='yaourt -Sy'
|
||||
fi
|
||||
fi
|
||||
|
||||
if (( $+commands[yay] )); then
|
||||
alias yaconf='yay -Pg'
|
||||
alias yaupg='yay -Syu'
|
||||
alias yasu='yay -Syu --noconfirm'
|
||||
alias yain='yay -S'
|
||||
alias yains='yay -U'
|
||||
alias yare='yay -R'
|
||||
alias yarem='yay -Rns'
|
||||
alias yarep='yay -Si'
|
||||
alias yareps='yay -Ss'
|
||||
alias yaloc='yay -Qi'
|
||||
alias yalocs='yay -Qs'
|
||||
alias yalst='yay -Qe'
|
||||
alias yaorph='yay -Qtd'
|
||||
alias yainsd='yay -S --asdeps'
|
||||
alias yamir='yay -Syy'
|
||||
|
||||
|
||||
if (( $+commands[abs] && $+commands[aur] )); then
|
||||
alias yaupd='yay -Sy && sudo abs && sudo aur'
|
||||
elif (( $+commands[abs] )); then
|
||||
alias yaupd='yay -Sy && sudo abs'
|
||||
elif (( $+commands[aur] )); then
|
||||
alias yaupd='yay -Sy && sudo aur'
|
||||
else
|
||||
alias yaupd='yay -Sy'
|
||||
fi
|
||||
fi
|
||||
|
||||
if (( $+commands[pacaur] )); then
|
||||
alias paupg='pacaur -Syu'
|
||||
alias pasu='pacaur -Syu --noconfirm'
|
||||
alias pain='pacaur -S'
|
||||
alias pains='pacaur -U'
|
||||
alias pare='pacaur -R'
|
||||
alias parem='pacaur -Rns'
|
||||
alias parep='pacaur -Si'
|
||||
alias pareps='pacaur -Ss'
|
||||
alias paloc='pacaur -Qi'
|
||||
alias palocs='pacaur -Qs'
|
||||
alias palst='pacaur -Qe'
|
||||
alias paorph='pacaur -Qtd'
|
||||
alias painsd='pacaur -S --asdeps'
|
||||
alias pamir='pacaur -Syy'
|
||||
|
||||
if (( $+commands[abs] && $+commands[aur] )); then
|
||||
alias paupd='pacaur -Sy && sudo abs && sudo aur'
|
||||
elif (( $+commands[abs] )); then
|
||||
alias paupd='pacaur -Sy && sudo abs'
|
||||
elif (( $+commands[aur] )); then
|
||||
alias paupd='pacaur -Sy && sudo aur'
|
||||
else
|
||||
alias paupd='pacaur -Sy'
|
||||
fi
|
||||
fi
|
||||
|
||||
if (( $+commands[trizen] )); then
|
||||
function upgrade() {
|
||||
trizen -Syu
|
||||
}
|
||||
elif (( $+commands[pacaur] )); then
|
||||
function upgrade() {
|
||||
pacaur -Syu
|
||||
}
|
||||
elif (( $+commands[yaourt] )); then
|
||||
function upgrade() {
|
||||
yaourt -Syu
|
||||
}
|
||||
elif (( $+commands[yay] )); then
|
||||
function upgrade() {
|
||||
yay -Syu
|
||||
}
|
||||
else
|
||||
function upgrade() {
|
||||
sudo pacman -Syu
|
||||
}
|
||||
fi
|
||||
#######################################
|
||||
# Pacman #
|
||||
#######################################
|
||||
|
||||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias pacupg='sudo pacman -Syu'
|
||||
alias pacin='sudo pacman -S'
|
||||
alias paclean='sudo pacman -Sc'
|
||||
alias pacins='sudo pacman -U'
|
||||
alias paclr='sudo pacman -Scc'
|
||||
alias pacre='sudo pacman -R'
|
||||
alias pacrem='sudo pacman -Rns'
|
||||
alias pacrep='pacman -Si'
|
||||
|
|
@ -152,33 +22,24 @@ alias pacfileupg='sudo pacman -Fy'
|
|||
alias pacfiles='pacman -F'
|
||||
alias pacls='pacman -Ql'
|
||||
alias pacown='pacman -Qo'
|
||||
|
||||
|
||||
if (( $+commands[abs] && $+commands[aur] )); then
|
||||
alias pacupd='sudo pacman -Sy && sudo abs && sudo aur'
|
||||
elif (( $+commands[abs] )); then
|
||||
alias pacupd='sudo pacman -Sy && sudo abs'
|
||||
elif (( $+commands[aur] )); then
|
||||
alias pacupd='sudo pacman -Sy && sudo aur'
|
||||
else
|
||||
alias pacupd='sudo pacman -Sy'
|
||||
fi
|
||||
alias pacupd="sudo pacman -Sy"
|
||||
alias upgrade='sudo pacman -Syu'
|
||||
|
||||
function paclist() {
|
||||
# Source: https://bbs.archlinux.org/viewtopic.php?id=93683
|
||||
LC_ALL=C pacman -Qei $(pacman -Qu | cut -d " " -f 1) | \
|
||||
awk 'BEGIN {FS=":"} /^Name/{printf("\033[1;36m%s\033[1;37m", $2)} /^Description/{print $2}'
|
||||
# Based on https://bbs.archlinux.org/viewtopic.php?id=93683
|
||||
pacman -Qqe | \
|
||||
xargs -I '{}' \
|
||||
expac "${bold_color}% 20n ${fg_no_bold[white]}%d${reset_color}" '{}'
|
||||
}
|
||||
|
||||
function pacdisowned() {
|
||||
emulate -L zsh
|
||||
|
||||
local tmp db fs
|
||||
tmp=${TMPDIR-/tmp}/pacman-disowned-$UID-$$
|
||||
db=$tmp/db
|
||||
fs=$tmp/fs
|
||||
|
||||
mkdir "$tmp"
|
||||
trap 'rm -rf "$tmp"' EXIT
|
||||
trap 'rm -rf "$tmp"' EXIT
|
||||
|
||||
pacman -Qlq | sort -u > "$db"
|
||||
|
||||
|
|
@ -188,16 +49,11 @@ function pacdisowned() {
|
|||
comm -23 "$fs" "$db"
|
||||
}
|
||||
|
||||
function pacmanallkeys() {
|
||||
emulate -L zsh
|
||||
curl -s https://www.archlinux.org/people/{developers,trustedusers}/ | \
|
||||
awk -F\" '(/pgp.mit.edu/) { sub(/.*search=0x/,""); print $1}' | \
|
||||
xargs sudo pacman-key --recv-keys
|
||||
}
|
||||
alias pacmanallkeys='sudo pacman-key --refresh-keys'
|
||||
|
||||
function pacmansignkeys() {
|
||||
emulate -L zsh
|
||||
for key in $*; do
|
||||
local key
|
||||
for key in $@; do
|
||||
sudo pacman-key --recv-keys $key
|
||||
sudo pacman-key --lsign-key $key
|
||||
printf 'trust\n3\n' | sudo gpg --homedir /etc/pacman.d/gnupg \
|
||||
|
|
@ -207,13 +63,122 @@ function pacmansignkeys() {
|
|||
|
||||
if (( $+commands[xdg-open] )); then
|
||||
function pacweb() {
|
||||
pkg="$1"
|
||||
infos="$(LANG=C pacman -Si "$pkg")"
|
||||
if [[ $# = 0 || "$1" =~ '--help|-h' ]]; then
|
||||
local underline_color="\e[${color[underline]}m"
|
||||
echo "$0 - open the website of an ArchLinux package"
|
||||
echo
|
||||
echo "Usage:"
|
||||
echo " $bold_color$0$reset_color ${underline_color}target${reset_color}"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local pkg="$1"
|
||||
local infos="$(LANG=C pacman -Si "$pkg")"
|
||||
if [[ -z "$infos" ]]; then
|
||||
return
|
||||
fi
|
||||
repo="$(grep -m 1 '^Repo' <<< "$infos" | grep -oP '[^ ]+$')"
|
||||
arch="$(grep -m 1 '^Arch' <<< "$infos" | grep -oP '[^ ]+$')"
|
||||
local repo="$(grep -m 1 '^Repo' <<< "$infos" | grep -oP '[^ ]+$')"
|
||||
local arch="$(grep -m 1 '^Arch' <<< "$infos" | grep -oP '[^ ]+$')"
|
||||
xdg-open "https://www.archlinux.org/packages/$repo/$arch/$pkg/" &>/dev/null
|
||||
}
|
||||
fi
|
||||
|
||||
#######################################
|
||||
# AUR helpers #
|
||||
#######################################
|
||||
|
||||
if (( $+commands[aura] )); then
|
||||
alias auin='sudo aura -S'
|
||||
alias aurin='sudo aura -A'
|
||||
alias auclean='sudo aura -Sc'
|
||||
alias auclr='sudo aura -Scc'
|
||||
alias auins='sudo aura -U'
|
||||
alias auinsd='sudo aura -S --asdeps'
|
||||
alias aurinsd='sudo aura -A --asdeps'
|
||||
alias auloc='aura -Qi'
|
||||
alias aulocs='aura -Qs'
|
||||
alias aulst='aura -Qe'
|
||||
alias aumir='sudo aura -Syy'
|
||||
alias aurph='sudo aura -Oj'
|
||||
alias aure='sudo aura -R'
|
||||
alias aurem='sudo aura -Rns'
|
||||
alias aurep='aura -Si'
|
||||
alias aurrep='aura -Ai'
|
||||
alias aureps='aura -As --both'
|
||||
alias auras='aura -As --both'
|
||||
alias auupd="sudo aura -Sy"
|
||||
alias auupg='sudo sh -c "aura -Syu && aura -Au"'
|
||||
alias ausu='sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"'
|
||||
alias upgrade='sudo aura -Syu'
|
||||
|
||||
# extra bonus specially for aura
|
||||
alias auown="aura -Qqo"
|
||||
alias auls="aura -Qql"
|
||||
function auownloc() { aura -Qi $(aura -Qqo $@); }
|
||||
function auownls () { aura -Qql $(aura -Qqo $@); }
|
||||
fi
|
||||
|
||||
if (( $+commands[pacaur] )); then
|
||||
alias pacclean='pacaur -Sc'
|
||||
alias pacclr='pacaur -Scc'
|
||||
alias paupg='pacaur -Syu'
|
||||
alias pasu='pacaur -Syu --noconfirm'
|
||||
alias pain='pacaur -S'
|
||||
alias pains='pacaur -U'
|
||||
alias pare='pacaur -R'
|
||||
alias parem='pacaur -Rns'
|
||||
alias parep='pacaur -Si'
|
||||
alias pareps='pacaur -Ss'
|
||||
alias paloc='pacaur -Qi'
|
||||
alias palocs='pacaur -Qs'
|
||||
alias palst='pacaur -Qe'
|
||||
alias paorph='pacaur -Qtd'
|
||||
alias painsd='pacaur -S --asdeps'
|
||||
alias pamir='pacaur -Syy'
|
||||
alias paupd="pacaur -Sy"
|
||||
alias upgrade='pacaur -Syu'
|
||||
fi
|
||||
|
||||
if (( $+commands[trizen] )); then
|
||||
alias trconf='trizen -C'
|
||||
alias trupg='trizen -Syua'
|
||||
alias trsu='trizen -Syua --noconfirm'
|
||||
alias trin='trizen -S'
|
||||
alias trclean='trizen -Sc'
|
||||
alias trclr='trizen -Scc'
|
||||
alias trins='trizen -U'
|
||||
alias trre='trizen -R'
|
||||
alias trrem='trizen -Rns'
|
||||
alias trrep='trizen -Si'
|
||||
alias trreps='trizen -Ss'
|
||||
alias trloc='trizen -Qi'
|
||||
alias trlocs='trizen -Qs'
|
||||
alias trlst='trizen -Qe'
|
||||
alias trorph='trizen -Qtd'
|
||||
alias trinsd='trizen -S --asdeps'
|
||||
alias trmir='trizen -Syy'
|
||||
alias trupd="trizen -Sy"
|
||||
alias upgrade='trizen -Syu'
|
||||
fi
|
||||
|
||||
if (( $+commands[yay] )); then
|
||||
alias yaconf='yay -Pg'
|
||||
alias yaclean='yay -Sc'
|
||||
alias yaclr='yay -Scc'
|
||||
alias yaupg='yay -Syu'
|
||||
alias yasu='yay -Syu --noconfirm'
|
||||
alias yain='yay -S'
|
||||
alias yains='yay -U'
|
||||
alias yare='yay -R'
|
||||
alias yarem='yay -Rns'
|
||||
alias yarep='yay -Si'
|
||||
alias yareps='yay -Ss'
|
||||
alias yaloc='yay -Qi'
|
||||
alias yalocs='yay -Qs'
|
||||
alias yalst='yay -Qe'
|
||||
alias yaorph='yay -Qtd'
|
||||
alias yainsd='yay -S --asdeps'
|
||||
alias yamir='yay -Syy'
|
||||
alias yaupd="yay -Sy"
|
||||
alias upgrade='yay -Syu'
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -2,18 +2,26 @@
|
|||
ASDF_DIR="${ASDF_DIR:-$HOME/.asdf}"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR/completions"
|
||||
|
||||
# If not found, check for archlinux/AUR package (/opt/asdf-vm/)
|
||||
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && [[ -f "/opt/asdf-vm/asdf.sh" ]]; then
|
||||
ASDF_DIR="/opt/asdf-vm"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR"
|
||||
fi
|
||||
|
||||
# If not found, check for Homebrew package
|
||||
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && (( $+commands[brew] )); then
|
||||
ASDF_DIR="$(brew --prefix asdf)"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR/etc/bash_completion.d"
|
||||
brew_prefix="$(brew --prefix asdf)"
|
||||
ASDF_DIR="${brew_prefix}/libexec"
|
||||
ASDF_COMPLETIONS="${brew_prefix}/etc/bash_completion.d"
|
||||
unset brew_prefix
|
||||
fi
|
||||
|
||||
# Load command
|
||||
if [[ -f "$ASDF_DIR/asdf.sh" ]]; then
|
||||
. "$ASDF_DIR/asdf.sh"
|
||||
. "$ASDF_DIR/asdf.sh"
|
||||
|
||||
# Load completions
|
||||
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
|
||||
. "$ASDF_COMPLETIONS/asdf.bash"
|
||||
fi
|
||||
# Load completions
|
||||
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
|
||||
. "$ASDF_COMPLETIONS/asdf.bash"
|
||||
fi
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ if ! type autoenv_init >/dev/null; then
|
|||
~/.autoenv
|
||||
~/.local/bin
|
||||
/usr/local/opt/autoenv
|
||||
/opt/homebrew/opt/autoenv
|
||||
/usr/local/bin
|
||||
/usr/share/autoenv-git
|
||||
~/Library/Python/bin
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ autojump_paths=(
|
|||
/etc/profile.d/autojump.zsh # manual installation
|
||||
/etc/profile.d/autojump.sh # Gentoo installation
|
||||
/usr/local/share/autojump/autojump.zsh # FreeBSD installation
|
||||
/usr/pkg/share/autojump/autojump.zsh # NetBSD installation
|
||||
/opt/local/etc/profile.d/autojump.sh # macOS with MacPorts
|
||||
/usr/local/etc/profile.d/autojump.sh # macOS with Homebrew (default)
|
||||
/opt/homebrew/etc/profile.d/autojump.sh # macOS with Homebrew (default on M1 macs)
|
||||
)
|
||||
|
||||
for file in $autojump_paths; do
|
||||
|
|
|
|||
|
|
@ -14,6 +14,14 @@ plugins=(... aws)
|
|||
* `asp [<profile>]`: sets `$AWS_PROFILE` and `$AWS_DEFAULT_PROFILE` (legacy) to `<profile>`.
|
||||
It also sets `$AWS_EB_PROFILE` to `<profile>` for the Elastic Beanstalk CLI.
|
||||
Run `asp` without arguments to clear the profile.
|
||||
* `asp [<profile>] login`: If AWS SSO has been configured in your aws profile, it will run the `aws sso login` command following profile selection.
|
||||
|
||||
* `acp [<profile>] [<mfa_token>]`: in addition to `asp` functionality, it actually changes
|
||||
the profile by assuming the role specified in the `<profile>` configuration. It supports
|
||||
MFA and sets `$AWS_ACCESS_KEY_ID`, `$AWS_SECRET_ACCESS_KEY` and `$AWS_SESSION_TOKEN`, if
|
||||
obtained. It requires the roles to be configured as per the
|
||||
[official guide](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
|
||||
Run `acp` without arguments to clear the profile.
|
||||
|
||||
* `agp`: gets the current value of `$AWS_PROFILE`.
|
||||
|
||||
|
|
@ -33,6 +41,36 @@ plugins=(... aws)
|
|||
The plugin creates an `aws_prompt_info` function that you can use in your theme, which displays
|
||||
the current `$AWS_PROFILE`. It uses two variables to control how that is shown:
|
||||
|
||||
- ZSH_THEME_AWS_PREFIX: sets the prefix of the AWS_PROFILE. Defaults to `<aws:`.
|
||||
* ZSH_THEME_AWS_PREFIX: sets the prefix of the AWS_PROFILE. Defaults to `<aws:`.
|
||||
|
||||
- ZSH_THEME_AWS_SUFFIX: sets the suffix of the AWS_PROFILE. Defaults to `>`.
|
||||
* ZSH_THEME_AWS_SUFFIX: sets the suffix of the AWS_PROFILE. Defaults to `>`.
|
||||
|
||||
## Configuration
|
||||
|
||||
[Configuration and credential file settings](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-files.html) by AWS
|
||||
|
||||
### Scenario: IAM roles with a source profile and MFA authentication
|
||||
|
||||
Source profile credentials in `~/.aws/credentials`:
|
||||
|
||||
```
|
||||
[source-profile-name]
|
||||
aws_access_key_id = ...
|
||||
aws_secret_access_key = ...
|
||||
```
|
||||
|
||||
Role configuration in `~/.aws/config`:
|
||||
|
||||
```
|
||||
[profile source-profile-name]
|
||||
mfa_serial = arn:aws:iam::111111111111:mfa/myuser
|
||||
region = us-east-1
|
||||
output = json
|
||||
|
||||
[profile profile-with-role]
|
||||
role_arn = arn:aws:iam::9999999999999:role/myrole
|
||||
mfa_serial = arn:aws:iam::111111111111:mfa/myuser
|
||||
source_profile = source-profile-name
|
||||
region = us-east-1
|
||||
output = json
|
||||
```
|
||||
|
|
|
|||
|
|
@ -21,6 +21,112 @@ function asp() {
|
|||
export AWS_DEFAULT_PROFILE=$1
|
||||
export AWS_PROFILE=$1
|
||||
export AWS_EB_PROFILE=$1
|
||||
|
||||
if [[ "$2" == "login" ]]; then
|
||||
aws sso login
|
||||
fi
|
||||
}
|
||||
|
||||
# AWS profile switch
|
||||
function acp() {
|
||||
if [[ -z "$1" ]]; then
|
||||
unset AWS_DEFAULT_PROFILE AWS_PROFILE AWS_EB_PROFILE
|
||||
unset AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN
|
||||
echo AWS profile cleared.
|
||||
return
|
||||
fi
|
||||
|
||||
local -a available_profiles
|
||||
available_profiles=($(aws_profiles))
|
||||
if [[ -z "${available_profiles[(r)$1]}" ]]; then
|
||||
echo "${fg[red]}Profile '$1' not found in '${AWS_CONFIG_FILE:-$HOME/.aws/config}'" >&2
|
||||
echo "Available profiles: ${(j:, :)available_profiles:-no profiles found}${reset_color}" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
local profile="$1"
|
||||
local mfa_token="$2"
|
||||
|
||||
# Get fallback credentials for if the aws command fails or no command is run
|
||||
local aws_access_key_id="$(aws configure get aws_access_key_id --profile $profile)"
|
||||
local aws_secret_access_key="$(aws configure get aws_secret_access_key --profile $profile)"
|
||||
local aws_session_token="$(aws configure get aws_session_token --profile $profile)"
|
||||
|
||||
|
||||
# First, if the profile has MFA configured, lets get the token and session duration
|
||||
local mfa_serial="$(aws configure get mfa_serial --profile $profile)"
|
||||
local sess_duration="$(aws configure get duration_seconds --profile $profile)"
|
||||
|
||||
if [[ -n "$mfa_serial" ]]; then
|
||||
local -a mfa_opt
|
||||
if [[ -z "$mfa_token" ]]; then
|
||||
echo -n "Please enter your MFA token for $mfa_serial: "
|
||||
read -r mfa_token
|
||||
fi
|
||||
if [[ -z "$sess_duration" ]]; then
|
||||
echo -n "Please enter the session duration in seconds (900-43200; default: 3600, which is the default maximum for a role): "
|
||||
read -r sess_duration
|
||||
fi
|
||||
mfa_opt=(--serial-number "$mfa_serial" --token-code "$mfa_token" --duration-seconds "${sess_duration:-3600}")
|
||||
fi
|
||||
|
||||
# Now see whether we need to just MFA for the current role, or assume a different one
|
||||
local role_arn="$(aws configure get role_arn --profile $profile)"
|
||||
local sess_name="$(aws configure get role_session_name --profile $profile)"
|
||||
|
||||
if [[ -n "$role_arn" ]]; then
|
||||
# Means we need to assume a specified role
|
||||
aws_command=(aws sts assume-role --role-arn "$role_arn" "${mfa_opt[@]}")
|
||||
|
||||
# Check whether external_id is configured to use while assuming the role
|
||||
local external_id="$(aws configure get external_id --profile $profile)"
|
||||
if [[ -n "$external_id" ]]; then
|
||||
aws_command+=(--external-id "$external_id")
|
||||
fi
|
||||
|
||||
# Get source profile to use to assume role
|
||||
local source_profile="$(aws configure get source_profile --profile $profile)"
|
||||
if [[ -z "$sess_name" ]]; then
|
||||
sess_name="${source_profile:-profile}"
|
||||
fi
|
||||
aws_command+=(--profile="${source_profile:-profile}" --role-session-name "${sess_name}")
|
||||
|
||||
echo "Assuming role $role_arn using profile ${source_profile:-profile}"
|
||||
else
|
||||
# Means we only need to do MFA
|
||||
aws_command=(aws sts get-session-token --profile="$profile" "${mfa_opt[@]}")
|
||||
echo "Obtaining session token for profile $profile"
|
||||
fi
|
||||
|
||||
# Format output of aws command for easier processing
|
||||
aws_command+=(--query '[Credentials.AccessKeyId,Credentials.SecretAccessKey,Credentials.SessionToken]' --output text)
|
||||
|
||||
# Run the aws command to obtain credentials
|
||||
local -a credentials
|
||||
credentials=(${(ps:\t:)"$(${aws_command[@]})"})
|
||||
|
||||
if [[ -n "$credentials" ]]; then
|
||||
aws_access_key_id="${credentials[1]}"
|
||||
aws_secret_access_key="${credentials[2]}"
|
||||
aws_session_token="${credentials[3]}"
|
||||
fi
|
||||
|
||||
# Switch to AWS profile
|
||||
if [[ -n "${aws_access_key_id}" && -n "$aws_secret_access_key" ]]; then
|
||||
export AWS_DEFAULT_PROFILE="$profile"
|
||||
export AWS_PROFILE="$profile"
|
||||
export AWS_EB_PROFILE="$profile"
|
||||
export AWS_ACCESS_KEY_ID="$aws_access_key_id"
|
||||
export AWS_SECRET_ACCESS_KEY="$aws_secret_access_key"
|
||||
|
||||
if [[ -n "$aws_session_token" ]]; then
|
||||
export AWS_SESSION_TOKEN="$aws_session_token"
|
||||
else
|
||||
unset AWS_SESSION_TOKEN
|
||||
fi
|
||||
|
||||
echo "Switched to AWS Profile: $profile"
|
||||
fi
|
||||
}
|
||||
|
||||
function aws_change_access_key() {
|
||||
|
|
@ -29,33 +135,33 @@ function aws_change_access_key() {
|
|||
return 1
|
||||
fi
|
||||
|
||||
echo Insert the credentials when asked.
|
||||
echo "Insert the credentials when asked."
|
||||
asp "$1" || return 1
|
||||
AWS_PAGER="" aws iam create-access-key
|
||||
AWS_PAGER="" aws configure --profile "$1"
|
||||
|
||||
echo You can now safely delete the old access key running \`aws iam delete-access-key --access-key-id ID\`
|
||||
echo Your current keys are:
|
||||
echo "You can now safely delete the old access key running \`aws iam delete-access-key --access-key-id ID\`"
|
||||
echo "Your current keys are:"
|
||||
AWS_PAGER="" aws iam list-access-keys
|
||||
}
|
||||
|
||||
function aws_profiles() {
|
||||
[[ -r "${AWS_CONFIG_FILE:-$HOME/.aws/config}" ]] || return 1
|
||||
grep '\[profile' "${AWS_CONFIG_FILE:-$HOME/.aws/config}"|sed -e 's/.*profile \([a-zA-Z0-9@_\.-]*\).*/\1/'
|
||||
grep --color=never -Eo '\[.*\]' "${AWS_CONFIG_FILE:-$HOME/.aws/config}" | sed -E 's/^[[:space:]]*\[(profile)?[[:space:]]*([^[:space:]]+)\][[:space:]]*$/\2/g'
|
||||
}
|
||||
|
||||
function _aws_profiles() {
|
||||
reply=($(aws_profiles))
|
||||
}
|
||||
compctl -K _aws_profiles asp aws_change_access_key
|
||||
compctl -K _aws_profiles asp acp aws_change_access_key
|
||||
|
||||
# AWS prompt
|
||||
function aws_prompt_info() {
|
||||
[[ -z $AWS_PROFILE ]] && return
|
||||
echo "${ZSH_THEME_AWS_PREFIX:=<aws:}${AWS_PROFILE}${ZSH_THEME_AWS_SUFFIX:=>}"
|
||||
[[ -n "$AWS_PROFILE" ]] || return
|
||||
echo "${ZSH_THEME_AWS_PREFIX=<aws:}${AWS_PROFILE:gs/%/%%}${ZSH_THEME_AWS_SUFFIX=>}"
|
||||
}
|
||||
|
||||
if [ "$SHOW_AWS_PROMPT" != false ]; then
|
||||
if [[ "$SHOW_AWS_PROMPT" != false && "$RPROMPT" != *'$(aws_prompt_info)'* ]]; then
|
||||
RPROMPT='$(aws_prompt_info)'"$RPROMPT"
|
||||
fi
|
||||
|
||||
|
|
@ -64,6 +170,7 @@ fi
|
|||
|
||||
# AWS CLI v2 comes with its own autocompletion. Check if that is there, otherwise fall back
|
||||
if command -v aws_completer &> /dev/null; then
|
||||
autoload -Uz bashcompinit && bashcompinit
|
||||
complete -C aws_completer aws
|
||||
else
|
||||
function _awscli-homebrew-installed() {
|
||||
|
|
|
|||
|
|
@ -8,15 +8,22 @@ To use, add `battery` to the list of plugins in your `.zshrc` file:
|
|||
|
||||
Then, add the `battery_pct_prompt` function to your custom theme. For example:
|
||||
|
||||
```
|
||||
```zsh
|
||||
RPROMPT='$(battery_pct_prompt) ...'
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
On Linux, you must have the `acpi` tool installed on your operating system.
|
||||
- On Linux, you must have the `acpi` or `acpitool` commands installed on your operating system.
|
||||
On Debian/Ubuntu, you can do that with `sudo apt install acpi` or `sudo apt install acpitool`.
|
||||
|
||||
Here's an example of how to install with apt:
|
||||
```
|
||||
sudo apt-get install acpi
|
||||
```
|
||||
- On Android (via [Termux](https://play.google.com/store/apps/details?id=com.termux)), you must have:
|
||||
|
||||
1. The `Termux:API` addon app installed:
|
||||
[Google Play](https://play.google.com/store/apps/details?id=com.termux.api) | [F-Droid](https://f-droid.org/packages/com.termux.api/)
|
||||
|
||||
2. The `termux-api` package installed within termux:
|
||||
|
||||
```sh
|
||||
pkg install termux-api
|
||||
```
|
||||
|
|
|
|||
|
|
@ -10,20 +10,17 @@
|
|||
# Author: J (927589452) #
|
||||
# Modified to add support for FreeBSD #
|
||||
###########################################
|
||||
# Author: Avneet Singh (kalsi-avneet) #
|
||||
# Modified to add support for Android #
|
||||
###########################################
|
||||
|
||||
if [[ "$OSTYPE" = darwin* ]]; then
|
||||
|
||||
function battery_is_charging() {
|
||||
ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ Yes'
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
local battery_status="$(ioreg -rc AppleSmartBattery)"
|
||||
local -i capacity=$(sed -n -e '/MaxCapacity/s/^.*"MaxCapacity"\ =\ //p' <<< $battery_status)
|
||||
local -i current=$(sed -n -e '/CurrentCapacity/s/^.*"CurrentCapacity"\ =\ //p' <<< $battery_status)
|
||||
echo $(( current * 100 / capacity ))
|
||||
pmset -g batt | grep -Eo "\d+%" | cut -d% -f1
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if battery_is_charging; then
|
||||
echo "External Power"
|
||||
|
|
@ -31,7 +28,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
battery_pct
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
local smart_battery_status="$(ioreg -rc "AppleSmartBattery")"
|
||||
if [[ $(echo $smart_battery_status | command grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]]; then
|
||||
|
|
@ -45,7 +41,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
echo "∞"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_prompt () {
|
||||
local battery_pct color
|
||||
if ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ No'; then
|
||||
|
|
@ -64,17 +59,14 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
}
|
||||
|
||||
elif [[ "$OSTYPE" = freebsd* ]]; then
|
||||
|
||||
function battery_is_charging() {
|
||||
[[ $(sysctl -n hw.acpi.battery.state) -eq 2 ]]
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
if (( $+commands[sysctl] )); then
|
||||
sysctl -n hw.acpi.battery.life
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
|
|
@ -82,7 +74,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
local remaining_time
|
||||
remaining_time=$(sysctl -n hw.acpi.battery.time)
|
||||
|
|
@ -92,7 +83,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
printf %02d:%02d $hour $minute
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
|
|
@ -109,19 +99,22 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
|
||||
elif [[ "$OSTYPE" = linux* ]]; then
|
||||
|
||||
elif [[ "$OSTYPE" = linux-android ]] && (( ${+commands[termux-battery-status]} )); then
|
||||
function battery_is_charging() {
|
||||
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
|
||||
termux-battery-status 2>/dev/null | command awk '/status/ { exit ($0 ~ /DISCHARGING/) }'
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
if (( $+commands[acpi] )); then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -E '^Battery.*(Full|(Disc|C)harging)' | cut -f2 -d ',' | tr -cd '[:digit:]'
|
||||
fi
|
||||
# Sample output:
|
||||
# {
|
||||
# "health": "GOOD",
|
||||
# "percentage": 93,
|
||||
# "plugged": "UNPLUGGED",
|
||||
# "status": "DISCHARGING",
|
||||
# "temperature": 29.0,
|
||||
# "current": 361816
|
||||
# }
|
||||
termux-battery-status 2>/dev/null | command awk '/percentage/ { gsub(/[,]/,""); print $2}'
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
|
|
@ -129,13 +122,72 @@ elif [[ "$OSTYPE" = linux* ]]; then
|
|||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() { } # Not available on android
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
if battery_is_charging; then
|
||||
echo "∞"
|
||||
else
|
||||
if [[ $battery_pct -gt 50 ]]; then
|
||||
color='green'
|
||||
elif [[ $battery_pct -gt 20 ]]; then
|
||||
color='yellow'
|
||||
else
|
||||
color='red'
|
||||
fi
|
||||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
elif [[ "$OSTYPE" = linux* ]]; then
|
||||
function battery_is_charging() {
|
||||
if (( $+commands[acpitool] )); then
|
||||
! acpitool 2>/dev/null | command grep -qE '^\s+Battery.*Discharging'
|
||||
elif (( $+commands[acpi] )); then
|
||||
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
|
||||
fi
|
||||
}
|
||||
function battery_pct() {
|
||||
if (( $+commands[acpitool] )); then
|
||||
# Sample output:
|
||||
# Battery #1 : Unknown, 99.55%
|
||||
# Battery #2 : Discharging, 49.58%, 01:12:05
|
||||
# All batteries : 62.60%, 02:03:03
|
||||
local -i pct=$(acpitool 2>/dev/null | command awk -F, '
|
||||
/^\s+All batteries/ {
|
||||
gsub(/[^0-9.]/, "", $1)
|
||||
pct=$1
|
||||
exit
|
||||
}
|
||||
!pct && /^\s+Battery/ {
|
||||
gsub(/[^0-9.]/, "", $2)
|
||||
pct=$2
|
||||
}
|
||||
END { print pct }
|
||||
')
|
||||
echo $pct
|
||||
elif (( $+commands[acpi] )); then
|
||||
# Sample output:
|
||||
# Battery 0: Discharging, 0%, rate information unavailable
|
||||
# Battery 1: Full, 100%
|
||||
acpi 2>/dev/null | command awk -F, '
|
||||
/rate information unavailable/ { next }
|
||||
/^Battery.*: /{ gsub(/[^0-9]/, "", $2); print $2; exit }
|
||||
'
|
||||
fi
|
||||
}
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
else
|
||||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
function battery_time_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
|
||||
fi
|
||||
}
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
|
|
@ -152,7 +204,6 @@ elif [[ "$OSTYPE" = linux* ]]; then
|
|||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
|
||||
else
|
||||
# Empty functions so we don't cause errors in prompts
|
||||
function battery_is_charging { false }
|
||||
|
|
@ -177,7 +228,7 @@ function battery_level_gauge() {
|
|||
local charging_color=${BATTERY_CHARGING_COLOR:-$color_yellow}
|
||||
local charging_symbol=${BATTERY_CHARGING_SYMBOL:-'⚡'}
|
||||
|
||||
local battery_remaining_percentage=$(battery_pct)
|
||||
local -i battery_remaining_percentage=$(battery_pct)
|
||||
local filled empty gauge_color
|
||||
|
||||
if [[ $battery_remaining_percentage =~ [0-9]+ ]]; then
|
||||
|
|
|
|||
|
|
@ -1,5 +1,14 @@
|
|||
## Bazel autocomplete plugin
|
||||
# Bazel plugin
|
||||
|
||||
A copy of the completion script from the
|
||||
[bazelbuild/bazel](https://github.com/bazelbuild/bazel/master/scripts/zsh_completion/_bazel)
|
||||
git repo.
|
||||
This plugin adds completion for [bazel](https://bazel.build), an open-source build and
|
||||
test tool that scalably supports multi-language and multi-platform projects.
|
||||
|
||||
To use it, add `bazel` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... bazel)
|
||||
```
|
||||
|
||||
The plugin has a copy of [the completion script from the git repository][1].
|
||||
|
||||
[1]: https://github.com/bazelbuild/bazel/blob/master/scripts/zsh_completion/_bazel
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ _get_build_targets() {
|
|||
;;
|
||||
esac
|
||||
completions=(${$(_bazel_b query "kind(\"${rule_re}\", ${pkg}:all)" 2>/dev/null)##*:})
|
||||
if ( (( ${#completions} > 0 )) && [[ $target_type != run ]] ); then
|
||||
if ( (( ${#completions} > 0 )) && [[ $target_type != bin ]] ); then
|
||||
completions+=(all)
|
||||
fi
|
||||
echo ${completions[*]}
|
||||
|
|
|
|||
5
plugins/bedtools/README.md
Normal file
5
plugins/bedtools/README.md
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# Bedtools plugin
|
||||
|
||||
This plugin adds support for the [bedtools suite](http://bedtools.readthedocs.org/en/latest/):
|
||||
|
||||
* Adds autocomplete options for all bedtools sub commands.
|
||||
64
plugins/bedtools/_bedtools
Normal file
64
plugins/bedtools/_bedtools
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
#compdef bedtools
|
||||
#autoload
|
||||
|
||||
local curcontext="$curcontext" state line ret=1
|
||||
local -a _files
|
||||
|
||||
_arguments -C \
|
||||
'1: :->cmds' \
|
||||
'2:: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
_values "bedtools command" \
|
||||
"--contact[Feature requests, bugs, mailing lists, etc.]" \
|
||||
"--help[Print this help menu.]" \
|
||||
"--version[What version of bedtools are you using?.]" \
|
||||
"annotate[Annotate coverage of features from multiple files.]" \
|
||||
"bamtobed[Convert BAM alignments to BED (& other) formats.]" \
|
||||
"bamtofastq[Convert BAM records to FASTQ records.]" \
|
||||
"bed12tobed6[Breaks BED12 intervals into discrete BED6 intervals.]" \
|
||||
"bedpetobam[Convert BEDPE intervals to BAM records.]" \
|
||||
"bedtobam[Convert intervals to BAM records.]" \
|
||||
"closest[Find the closest, potentially non-overlapping interval.]" \
|
||||
"cluster[Cluster (but don't merge) overlapping/nearby intervals.]" \
|
||||
"complement[Extract intervals _not_ represented by an interval file.]" \
|
||||
"coverage[Compute the coverage over defined intervals.]" \
|
||||
"expand[Replicate lines based on lists of values in columns.]" \
|
||||
"fisher[Calculate Fisher statistic b/w two feature files.]" \
|
||||
"flank[Create new intervals from the flanks of existing intervals.]" \
|
||||
"genomecov[Compute the coverage over an entire genome.]" \
|
||||
"getfasta[Use intervals to extract sequences from a FASTA file.]" \
|
||||
"groupby[Group by common cols. & summarize oth. cols. (~ SQL "groupBy")]" \
|
||||
"igv[Create an IGV snapshot batch script.]" \
|
||||
"intersect[Find overlapping intervals in various ways.]" \
|
||||
"jaccard[Calculate the Jaccard statistic b/w two sets of intervals.]" \
|
||||
"links[Create a HTML page of links to UCSC locations.]" \
|
||||
"makewindows[Make interval "windows" across a genome.]" \
|
||||
"map[Apply a function to a column for each overlapping interval.]" \
|
||||
"maskfasta[Use intervals to mask sequences from a FASTA file.]" \
|
||||
"merge[Combine overlapping/nearby intervals into a single interval.]" \
|
||||
"multicov[Counts coverage from multiple BAMs at specific intervals.]" \
|
||||
"multiinter[Identifies common intervals among multiple interval files.]" \
|
||||
"nuc[Profile the nucleotide content of intervals in a FASTA file.]" \
|
||||
"overlap[Computes the amount of overlap from two intervals.]" \
|
||||
"pairtobed[Find pairs that overlap intervals in various ways.]" \
|
||||
"pairtopair[Find pairs that overlap other pairs in various ways.]" \
|
||||
"random[Generate random intervals in a genome.]" \
|
||||
"reldist[Calculate the distribution of relative distances b/w two files.]" \
|
||||
"sample[Sample random records from file using reservoir sampling.]" \
|
||||
"shuffle[Randomly redistrubute intervals in a genome.]" \
|
||||
"slop[Adjust the size of intervals.]" \
|
||||
"sort[Order the intervals in a file.]" \
|
||||
"subtract[Remove intervals based on overlaps b/w two files.]" \
|
||||
"tag[Tag BAM alignments based on overlaps with interval files.]" \
|
||||
"unionbedg[Combines coverage intervals from multiple BEDGRAPH files.]" \
|
||||
"window[Find overlapping intervals within a window around an interval.]" \
|
||||
ret=0
|
||||
;;
|
||||
*)
|
||||
_files
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
|
|
@ -20,6 +20,12 @@ if ! (type bgnotify_formatted | grep -q 'function'); then ## allow custom functi
|
|||
}
|
||||
fi
|
||||
|
||||
currentAppId () {
|
||||
if (( $+commands[osascript] )); then
|
||||
osascript -e 'tell application (path to frontmost application as text) to id' 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
currentWindowId () {
|
||||
if hash osascript 2>/dev/null; then #osx
|
||||
osascript -e 'tell application (path to frontmost application as text) to id of front window' 2&> /dev/null || echo "0"
|
||||
|
|
@ -32,11 +38,20 @@ currentWindowId () {
|
|||
|
||||
bgnotify () { ## args: (title, subtitle)
|
||||
if hash terminal-notifier 2>/dev/null; then #osx
|
||||
[[ "$TERM_PROGRAM" == 'iTerm.app' ]] && term_id='com.googlecode.iterm2';
|
||||
[[ "$TERM_PROGRAM" == 'Apple_Terminal' ]] && term_id='com.apple.terminal';
|
||||
local term_id="$bgnotify_appid"
|
||||
if [[ -z "$term_id" ]]; then
|
||||
case "$TERM_PROGRAM" in
|
||||
iTerm.app) term_id='com.googlecode.iterm2' ;;
|
||||
Apple_Terminal) term_id='com.apple.terminal' ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
## now call terminal-notifier, (hopefully with $term_id!)
|
||||
[ -z "$term_id" ] && terminal-notifier -message "$2" -title "$1" >/dev/null ||
|
||||
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" >/dev/null
|
||||
if [[ -z "$term_id" ]]; then
|
||||
terminal-notifier -message "$2" -title "$1" >/dev/null
|
||||
else
|
||||
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" >/dev/null
|
||||
fi
|
||||
elif hash growlnotify 2>/dev/null; then #osx growl
|
||||
growlnotify -m "$1" "$2"
|
||||
elif hash notify-send 2>/dev/null; then #ubuntu gnome!
|
||||
|
|
@ -54,6 +69,7 @@ bgnotify () { ## args: (title, subtitle)
|
|||
bgnotify_begin() {
|
||||
bgnotify_timestamp=$EPOCHSECONDS
|
||||
bgnotify_lastcmd="${1:-$2}"
|
||||
bgnotify_appid="$(currentAppId)"
|
||||
bgnotify_windowid=$(currentWindowId)
|
||||
}
|
||||
|
||||
|
|
@ -62,7 +78,7 @@ bgnotify_end() {
|
|||
elapsed=$(( EPOCHSECONDS - bgnotify_timestamp ))
|
||||
past_threshold=$(( elapsed >= bgnotify_threshold ))
|
||||
if (( bgnotify_timestamp > 0 )) && (( past_threshold )); then
|
||||
if [ $(currentWindowId) != "$bgnotify_windowid" ]; then
|
||||
if [[ $(currentAppId) != "$bgnotify_appid" || $(currentWindowId) != "$bgnotify_windowid" ]]; then
|
||||
print -n "\a"
|
||||
bgnotify_formatted "$didexit" "$bgnotify_lastcmd" "$elapsed"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
## Boot2docker autocomplete plugin
|
||||
|
||||
- Adds autocomplete options for all boot2docker commands.
|
||||
|
||||
|
||||
Maintainer : Manfred Touron ([@moul](https://github.com/moul))
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
#compdef boot2docker
|
||||
|
||||
# Boot2docker autocompletion for oh-my-zsh
|
||||
# Requires: Boot2docker installed
|
||||
# Author: Manfred Touron (@moul)
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
"init":"Create a new Boot2Docker VM."
|
||||
"up":"Start VM from any states."
|
||||
"start":"Start VM from any states."
|
||||
"boot":"Start VM from any states."
|
||||
"ssh":"[ssh-command] Login to VM via SSH."
|
||||
"save":"Suspend VM and save state to disk."
|
||||
"suspend":"Suspend VM and save state to disk."
|
||||
"down":"Gracefully shutdown the VM."
|
||||
"stop":"Gracefully shutdown the VM."
|
||||
"halt":"Gracefully shutdown the VM."
|
||||
"restart":"Gracefully reboot the VM."
|
||||
"poweroff":"Forcefully power off the VM (may corrupt disk image)."
|
||||
"reset":"Forcefully power cycle the VM (may corrupt disk image)."
|
||||
"delete":"Delete Boot2Docker VM and its disk image."
|
||||
"destroy":"Delete Boot2Docker VM and its disk image."
|
||||
"config":"Show selected profile file settings."
|
||||
"cfg":"Show selected profile file settings."
|
||||
"info":"Display detailed information of VM."
|
||||
"ip":"Display the IP address of the VM's Host-only network."
|
||||
"socket":"Display the DOCKER_HOST socket to connect to."
|
||||
"shellinit":"Display the shell command to set up the Docker client."
|
||||
"status":"Display current state of VM."
|
||||
"download":"Download Boot2Docker ISO image."
|
||||
"upgrade":"Upgrade the Boot2Docker ISO image (restart if running)."
|
||||
"version":"Display version information."
|
||||
)
|
||||
|
||||
_arguments \
|
||||
'(--basevmdk)--basevmdk[Path to VMDK to use as base for persistent partition]' \
|
||||
'(--cpus)'{-c,--cpus}'[number of CPUs for boot2docker.]' \
|
||||
'(--clobber)--clobber[overwrite Docker client binary on boot2docker upgrade]' \
|
||||
'(--dhcp)--dhcp[enable VirtualBox host-only network DHCP.]' \
|
||||
'(--dhcpip)--dhcpip[VirtualBox host-only network DHCP server address.]' \
|
||||
'(-s --disksize)'{-s,--disksize}'[boot2docker disk image size (in MB).]' \
|
||||
'(--dockerport)--dockerport[host Docker port (forward to port 2376 in VM). (deprecated - use with care)]' \
|
||||
'(--driver)--driver[hypervisor driver.]' \
|
||||
'(--force-upgrade-download)--force-upgrade-download[always download on boot2docker upgrade, never skip.]' \
|
||||
'(--hostip)--hostip[VirtualBox host-only network IP address.]' \
|
||||
'(--iso)--iso[path to boot2docker ISO image.]' \
|
||||
'(--iso-url)--iso-url[/api.github.com/repos/boot2docker/boot2docker/releases": source URL to provision the boot2docker ISO image.]' \
|
||||
'(--lowerip)--lowerip[VirtualBox host-only network DHCP lower bound.]' \
|
||||
'(--memory)'{-m,--memory}'[virtual machine memory size (in MB).]' \
|
||||
'(--netmask)--netmask[VirtualBox host-only network mask.]' \
|
||||
'(--no-dummy)--no-dummy[Example parameter for the dummy driver.]' \
|
||||
'(--retries)--retries[number of port knocking retries during 'start']' \
|
||||
'(--serial)--serial[try serial console to get IP address (experimental)]' \
|
||||
'(--serialfile)--serialfile[path to the serial socket/pipe.]' \
|
||||
'(--ssh)--ssh[path to SSH client utility.]' \
|
||||
'(--ssh-keygen)--ssh-keygen[path to ssh-keygen utility.]' \
|
||||
'(--sshkey)--sshkey[path to SSH key to use.]' \
|
||||
'(--sshport)--sshport[host SSH port (forward to port 22 in VM).]' \
|
||||
'(--upperip)--upperip[VirtualBox host-only network DHCP upper bound.]' \
|
||||
'(--vbm)--vbm[path to VirtualBox management utility.]' \
|
||||
'(--vbox-share)--vbox-share[(defaults to "/Users=Users" if no shares are specified; use "disable" to explicitly prevent any shares from being created) List of directories to share during "up|start|boot" via VirtualBox Guest Additions, with optional labels]' \
|
||||
'(--verbose)'{-v,--verbose}'[display verbose command invocations.]' \
|
||||
'(--vm)--vm[virtual machine name.]' \
|
||||
'(--waittime)--waittime[Time in milliseconds to wait between port knocking retries during 'start']' \
|
||||
'*:: :->subcmds' && return 0
|
||||
|
||||
#_arguments '*:: :->command'
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "boot2docker command" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
alias bi="bower install"
|
||||
alias bisd="bower install --save-dev"
|
||||
alias bis="bower install --save"
|
||||
alias bl="bower list"
|
||||
alias bs="bower search"
|
||||
|
||||
|
|
@ -7,7 +9,7 @@ _bower_installed_packages () {
|
|||
}
|
||||
_bower ()
|
||||
{
|
||||
local -a _1st_arguments _no_color _dopts _save_dev _force_lastest _production
|
||||
local -a _1st_arguments _no_color _dopts _save_dev _force_latest _production
|
||||
local expl
|
||||
typeset -A opt_args
|
||||
|
||||
|
|
@ -20,7 +22,7 @@ _bower ()
|
|||
|
||||
_save_dev=('(--save-dev)--save-dev[Save installed packages into the project"s bower.json devDependencies]')
|
||||
|
||||
_force_lastest=('(--force-latest)--force-latest[Force latest version on conflict]')
|
||||
_force_latest=('(--force-latest)--force-latest[Force latest version on conflict]')
|
||||
|
||||
_production=('(--production)--production[Do not install project devDependencies]')
|
||||
|
||||
|
|
@ -52,7 +54,7 @@ _bower ()
|
|||
_arguments \
|
||||
$_dopts \
|
||||
$_save_dev \
|
||||
$_force_lastest \
|
||||
$_force_latest \
|
||||
$_no_color \
|
||||
$_production
|
||||
;;
|
||||
|
|
@ -60,7 +62,7 @@ _bower ()
|
|||
_arguments \
|
||||
$_dopts \
|
||||
$_no_color \
|
||||
$_force_lastest
|
||||
$_force_latest
|
||||
_bower_installed_packages
|
||||
compadd "$@" $(echo $bower_package_list)
|
||||
;;
|
||||
|
|
|
|||
|
|
@ -1,31 +1,47 @@
|
|||
# Branch
|
||||
# Branch plugin
|
||||
|
||||
Displays the current Git or Mercurial branch fast.
|
||||
This plugin displays the current Git or Mercurial branch, fast. If in a Mercurial repository,
|
||||
also display the current bookmark, if present.
|
||||
|
||||
To use it, add `branch` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... branch)
|
||||
```
|
||||
|
||||
## Speed test
|
||||
|
||||
### Mercurial
|
||||
- `hg branch`:
|
||||
|
||||
```shell
|
||||
$ time hg branch
|
||||
0.11s user 0.14s system 70% cpu 0.355 total
|
||||
```
|
||||
```console
|
||||
$ time hg branch
|
||||
0.11s user 0.14s system 70% cpu 0.355 total
|
||||
```
|
||||
|
||||
### Branch plugin
|
||||
- branch plugin:
|
||||
|
||||
```shell
|
||||
$ time zsh /tmp/branch_prompt_info_test.zsh
|
||||
0.00s user 0.01s system 78% cpu 0.014 total
|
||||
```
|
||||
```console
|
||||
$ time zsh /tmp/branch_prompt_info_test.zsh
|
||||
0.00s user 0.01s system 78% cpu 0.014 total
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Edit your theme file (eg.: `~/.oh-my-zsh/theme/robbyrussell.zsh-theme`)
|
||||
adding `$(branch_prompt_info)` in your prompt like this:
|
||||
Copy your theme to `$ZSH_CUSTOM/themes/` and modify it to add `$(branch_prompt_info)` in your prompt.
|
||||
This example is for the `robbyrussell` theme:
|
||||
|
||||
```diff
|
||||
- PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
|
||||
+ PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)$(branch_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
|
||||
diff --git a/themes/robbyrussell.zsh-theme b/themes/robbyrussell.zsh-theme
|
||||
index 2fd5f2cd..9d89a464 100644
|
||||
--- a/themes/robbyrussell.zsh-theme
|
||||
+++ b/themes/robbyrussell.zsh-theme
|
||||
@@ -1,5 +1,5 @@
|
||||
PROMPT="%(?:%{$fg_bold[green]%}➜ :%{$fg_bold[red]%}➜ )"
|
||||
-PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(git_prompt_info)'
|
||||
+PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(branch_prompt_info)'
|
||||
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[blue]%}git:(%{$fg[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
```
|
||||
|
||||
## Maintainer
|
||||
|
|
|
|||
|
|
@ -3,29 +3,33 @@
|
|||
# Oct 2, 2015
|
||||
|
||||
function branch_prompt_info() {
|
||||
# Defines path as current directory
|
||||
local current_dir=$PWD
|
||||
# While current path is not root path
|
||||
while [[ $current_dir != '/' ]]
|
||||
do
|
||||
# Git repository
|
||||
if [[ -d "${current_dir}/.git" ]]
|
||||
then
|
||||
echo '±' ${"$(<"$current_dir/.git/HEAD")"##*/}
|
||||
return;
|
||||
# Start checking in current working directory
|
||||
local branch="" dir="$PWD"
|
||||
while [[ "$dir" != '/' ]]; do
|
||||
# Found .git directory
|
||||
if [[ -d "${dir}/.git" ]]; then
|
||||
branch="${"$(<"${dir}/.git/HEAD")"##*/}"
|
||||
echo '±' "${branch:gs/%/%%}"
|
||||
return
|
||||
fi
|
||||
# Mercurial repository
|
||||
if [[ -d "${current_dir}/.hg" ]]
|
||||
then
|
||||
if [[ -f "$current_dir/.hg/branch" ]]
|
||||
then
|
||||
echo '☿' $(<"$current_dir/.hg/branch")
|
||||
|
||||
# Found .hg directory
|
||||
if [[ -d "${dir}/.hg" ]]; then
|
||||
if [[ -f "${dir}/.hg/branch" ]]; then
|
||||
branch="$(<"${dir}/.hg/branch")"
|
||||
else
|
||||
echo '☿ default'
|
||||
branch="default"
|
||||
fi
|
||||
return;
|
||||
|
||||
if [[ -f "${dir}/.hg/bookmarks.current" ]]; then
|
||||
branch="${branch}/$(<"${dir}/.hg/bookmarks.current")"
|
||||
fi
|
||||
|
||||
echo '☿' "${branch:gs/%/%%}"
|
||||
return
|
||||
fi
|
||||
# Defines path as parent directory and keeps looking for :)
|
||||
current_dir="${current_dir:h}"
|
||||
|
||||
# Check parent directory
|
||||
dir="${dir:h}"
|
||||
done
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,19 +8,26 @@ To use it, add `brew` to the plugins array of your zshrc file:
|
|||
plugins=(... brew)
|
||||
```
|
||||
|
||||
## Shellenv
|
||||
|
||||
If `brew` is not found in the PATH, this plugin will attempt to find it in common
|
||||
locations, and execute `brew shellenv` to set the environment appropriately.
|
||||
This plugin will also export `HOMEBREW_PREFIX="$(brew --prefix)"` if not previously
|
||||
defined for convenience.
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
|----------|------------------------------------------------------------- |---------------------------------------------------------------------|
|
||||
| `brewp` | `brew pin` | Pin a specified formula so that it's not upgraded. |
|
||||
| `brews` | `brew list -1` | List installed formulae or the installed files for a given formula. |
|
||||
| `brewsp` | `brew list --pinned` | List pinned formulae, or show the version of a given formula. |
|
||||
| `bubo` | `brew update && brew outdated` | Update Homebrew data, then list outdated formulae and casks. |
|
||||
| `bubc` | `brew upgrade && brew cleanup` | Upgrade outdated formulae and casks, then run cleanup. |
|
||||
| `bubu` | `bubo && bubc` | Do the last two operations above. |
|
||||
| `buf` | `brew upgrade --formula` | Upgrade only formulas (not casks). |
|
||||
| `bcubo` | `brew update && brew outdated --cask` | Update Homebrew data, then list outdated casks. |
|
||||
| `bcubc` | `brew cask reinstall $(brew outdated --cask) && brew cleanup` | Update outdated casks, then run cleanup. |
|
||||
| Alias | Command | Description |
|
||||
| -------- | ------------------------------------- | ------------------------------------------------------------------- |
|
||||
| `bcubc` | `brew upgrade --cask && brew cleanup` | Update outdated casks, then run cleanup. |
|
||||
| `bcubo` | `brew update && brew outdated --cask` | Update Homebrew data, then list outdated casks. |
|
||||
| `brewp` | `brew pin` | Pin a specified formula so that it's not upgraded. |
|
||||
| `brews` | `brew list -1` | List installed formulae or the installed files for a given formula. |
|
||||
| `brewsp` | `brew list --pinned` | List pinned formulae, or show the version of a given formula. |
|
||||
| `bubc` | `brew upgrade && brew cleanup` | Upgrade outdated formulae and casks, then run cleanup. |
|
||||
| `bubo` | `brew update && brew outdated` | Update Homebrew data, then list outdated formulae and casks. |
|
||||
| `bubu` | `bubo && bubc` | Do the last two operations above. |
|
||||
| `buf` | `brew upgrade --formula` | Upgrade only formulas (not casks). |
|
||||
|
||||
## Completion
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,48 @@
|
|||
if (( ! $+commands[brew] )); then
|
||||
if [[ -x /opt/homebrew/bin/brew ]]; then
|
||||
BREW_LOCATION="/opt/homebrew/bin/brew"
|
||||
elif [[ -x /usr/local/bin/brew ]]; then
|
||||
BREW_LOCATION="/usr/local/bin/brew"
|
||||
elif [[ -x /home/linuxbrew/.linuxbrew/bin/brew ]]; then
|
||||
BREW_LOCATION="/home/linuxbrew/.linuxbrew/bin/brew"
|
||||
elif [[ -x "$HOME/.linuxbrew/bin/brew" ]]; then
|
||||
BREW_LOCATION="$HOME/.linuxbrew/bin/brew"
|
||||
else
|
||||
return
|
||||
fi
|
||||
|
||||
# Only add Homebrew installation to PATH, MANPATH, and INFOPATH if brew is
|
||||
# not already on the path, to prevent duplicate entries. This aligns with
|
||||
# the behavior of the brew installer.sh post-install steps.
|
||||
eval "$("$BREW_LOCATION" shellenv)"
|
||||
unset BREW_LOCATION
|
||||
fi
|
||||
|
||||
if [[ -z "$HOMEBREW_PREFIX" ]]; then
|
||||
# Maintain compatability with potential custom user profiles, where we had
|
||||
# previously relied on always sourcing shellenv. OMZ plugins should not rely
|
||||
# on this to be defined due to out of order processing.
|
||||
export HOMEBREW_PREFIX="$(brew --prefix)"
|
||||
fi
|
||||
|
||||
alias bcubc='brew upgrade --cask && brew cleanup'
|
||||
alias bcubo='brew update && brew outdated --cask'
|
||||
alias brewp='brew pin'
|
||||
alias brews='brew list -1'
|
||||
alias brewsp='brew list --pinned'
|
||||
alias bubo='brew update && brew outdated'
|
||||
alias bubc='brew upgrade && brew cleanup'
|
||||
alias bubo='brew update && brew outdated'
|
||||
alias bubu='bubo && bubc'
|
||||
alias buf='brew upgrade --formula'
|
||||
alias bcubo='brew update && brew outdated --cask'
|
||||
alias bcubc='brew cask reinstall $(brew outdated --cask) && brew cleanup'
|
||||
|
||||
function brews() {
|
||||
local formulae="$(brew leaves | xargs brew deps --installed --for-each)"
|
||||
local casks="$(brew list --cask)"
|
||||
|
||||
local blue="$(tput setaf 4)"
|
||||
local bold="$(tput bold)"
|
||||
local off="$(tput sgr0)"
|
||||
|
||||
echo "${blue}==>${off} ${bold}Formulae${off}"
|
||||
echo "${formulae}" | sed "s/^\(.*\):\(.*\)$/\1${blue}\2${off}/"
|
||||
echo "\n${blue}==>${off} ${bold}Casks${off}\n${casks}"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ case $state in
|
|||
"check[Determine whether the requirements for your application are installed]" \
|
||||
"list[Show all of the gems in the current bundle]" \
|
||||
"show[Show the source location of a particular gem in the bundle]" \
|
||||
"info[Show details of a particular gem in the bundle]" \
|
||||
"outdated[Show all of the outdated gems in the current bundle]" \
|
||||
"console[Start an IRB session in the context of the current bundle]" \
|
||||
"open[Open an installed gem in the editor]" \
|
||||
|
|
@ -84,7 +85,7 @@ case $state in
|
|||
'(--verbose)--verbose[Enable verbose output mode]'
|
||||
ret=0
|
||||
;;
|
||||
(open|show)
|
||||
(open|show|info)
|
||||
_gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') )
|
||||
if [[ $_gems != "" ]]; then
|
||||
_values 'gems' $_gems && ret=0
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ bundle_install() {
|
|||
else
|
||||
local cores_num="$(nproc)"
|
||||
fi
|
||||
bundle install --jobs="$cores_num" "$@"
|
||||
BUNDLE_JOBS="$cores_num" bundle install "$@"
|
||||
}
|
||||
|
||||
## Gem wrapper
|
||||
|
|
@ -81,14 +81,12 @@ bundled_commands=(
|
|||
)
|
||||
|
||||
# Remove $UNBUNDLED_COMMANDS from the bundled_commands list
|
||||
for cmd in $UNBUNDLED_COMMANDS; do
|
||||
bundled_commands=(${bundled_commands#$cmd});
|
||||
done
|
||||
bundled_commands=(${bundled_commands:|UNBUNDLED_COMMANDS})
|
||||
unset UNBUNDLED_COMMANDS
|
||||
|
||||
# Add $BUNDLED_COMMANDS to the bundled_commands list
|
||||
for cmd in $BUNDLED_COMMANDS; do
|
||||
bundled_commands+=($cmd);
|
||||
done
|
||||
bundled_commands+=($BUNDLED_COMMANDS)
|
||||
unset BUNDLED_COMMANDS
|
||||
|
||||
# Check if in the root or a subdirectory of a bundled project
|
||||
_within-bundled-project() {
|
||||
|
|
@ -126,5 +124,4 @@ for cmd in $bundled_commands; do
|
|||
compdef "_$cmd" "bundled_$cmd"="$cmd"
|
||||
fi
|
||||
done
|
||||
|
||||
unset cmd bundled_commands
|
||||
|
|
|
|||
|
|
@ -1,11 +0,0 @@
|
|||
# cargo
|
||||
|
||||
This plugin adds completion for the Rust build tool [`Cargo`](https://github.com/rust-lang/cargo).
|
||||
|
||||
To use it, add `cargo` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... cargo)
|
||||
```
|
||||
|
||||
Updated on March 3rd, 2019, from [Cargo 0.34.0](https://github.com/rust-lang/cargo/releases/tag/0.34.0).
|
||||
|
|
@ -1,407 +0,0 @@
|
|||
#compdef cargo
|
||||
|
||||
autoload -U regexp-replace
|
||||
|
||||
_cargo() {
|
||||
local curcontext="$curcontext" ret=1
|
||||
local -a command_scope_spec common parallel features msgfmt triple target registry
|
||||
local -a state line state_descr # These are set by _arguments
|
||||
typeset -A opt_args
|
||||
|
||||
common=(
|
||||
'(-q --quiet)*'{-v,--verbose}'[use verbose output]'
|
||||
'(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]'
|
||||
'-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags'
|
||||
'--frozen[require that Cargo.lock and cache are up-to-date]'
|
||||
'--locked[require that Cargo.lock is up-to-date]'
|
||||
'--color=[specify colorization option]:coloring:(auto always never)'
|
||||
'(- 1 *)'{-h,--help}'[show help message]'
|
||||
)
|
||||
|
||||
# leading items in parentheses are an exclusion list for the arguments following that arg
|
||||
# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
|
||||
# - => exclude all other options
|
||||
# 1 => exclude positional arg 1
|
||||
# * => exclude all other args
|
||||
# +blah => exclude +blah
|
||||
_arguments -s -S -C $common \
|
||||
'(- 1 *)--list[list installed commands]' \
|
||||
'(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \
|
||||
'(- 1 *)'{-V,--version}'[show version information]' \
|
||||
'(+beta +nightly)+stable[use the stable toolchain]' \
|
||||
'(+stable +nightly)+beta[use the beta toolchain]' \
|
||||
'(+stable +beta)+nightly[use the nightly toolchain]' \
|
||||
'1: :_cargo_cmds' \
|
||||
'*:: :->args'
|
||||
|
||||
# These flags are mutually exclusive specifiers for the scope of a command; as
|
||||
# they are used in multiple places without change, they are expanded into the
|
||||
# appropriate command's `_arguments` where appropriate.
|
||||
command_scope_spec=(
|
||||
'(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names'
|
||||
'(--bench --bin --test --lib)--example=[specify example name]:example name'
|
||||
'(--bench --example --test --lib)--bin=[specify binary name]:binary name'
|
||||
'(--bench --bin --example --test)--lib=[specify library name]:library name'
|
||||
'(--bench --bin --example --lib)--test=[specify test name]:test name'
|
||||
)
|
||||
|
||||
parallel=(
|
||||
'(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
|
||||
)
|
||||
|
||||
features=(
|
||||
'(--all-features)--features=[specify features to activate]:feature'
|
||||
'(--features)--all-features[activate all available features]'
|
||||
"--no-default-features[don't build the default features]"
|
||||
)
|
||||
|
||||
msgfmt='--message-format=[specify error format]:error format [human]:(human json short)'
|
||||
triple='--target=[specify target triple]:target triple'
|
||||
target='--target-dir=[specify directory for all generated artifacts]:directory:_directories'
|
||||
manifest='--manifest-path=[specify path to manifest]:path:_directories'
|
||||
registry='--registry=[specify registry to use]:registry'
|
||||
|
||||
case $state in
|
||||
args)
|
||||
curcontext="${curcontext%:*}-${words[1]}:"
|
||||
case ${words[1]} in
|
||||
bench)
|
||||
_arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--all-targets[benchmark all targets]' \
|
||||
"--no-run[compile but don't run]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \
|
||||
'--exclude=[exclude packages from the benchmark]:spec' \
|
||||
'--no-fail-fast[run all benchmarks regardless of failure]' \
|
||||
'1: :_guard "^-*" "bench name"' \
|
||||
'*:args:_default'
|
||||
;;
|
||||
|
||||
build)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'--build-plan[output the build plan in JSON]' \
|
||||
;;
|
||||
|
||||
check)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \
|
||||
'--release[check in release mode]' \
|
||||
;;
|
||||
|
||||
clean)
|
||||
_arguments -s -S $common $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \
|
||||
'--release[clean release artifacts]' \
|
||||
'--doc[clean just the documentation directory]'
|
||||
;;
|
||||
|
||||
doc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--no-deps[do not build docs for dependencies]' \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open docs in browser after the build]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
;;
|
||||
|
||||
fetch)
|
||||
_arguments -s -S $common $triple $manifest
|
||||
;;
|
||||
|
||||
fix)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--broken-code[fix code even if it already has compiler errors]' \
|
||||
'--edition[fix in preparation for the next edition]' \
|
||||
'--edition-idioms[fix warnings to migrate to the idioms of an edition]' \
|
||||
'--allow-no-vcs[fix code even if a VCS was not detected]' \
|
||||
'--allow-dirty[fix code even if the working directory is dirty]' \
|
||||
'--allow-staged[fix code even if the working directory has staged changes]'
|
||||
;;
|
||||
|
||||
generate-lockfile)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
git-checkout)
|
||||
_arguments -s -S $common \
|
||||
'--reference=:reference' \
|
||||
'--url=:url:_urls'
|
||||
;;
|
||||
|
||||
help)
|
||||
_cargo_cmds
|
||||
;;
|
||||
|
||||
init)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--edition=[specify edition to set for the crate generated]:edition:(2015 2018)' \
|
||||
'--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \
|
||||
'--name=[set the resulting package name]:name' \
|
||||
'1:path:_directories'
|
||||
;;
|
||||
|
||||
install)
|
||||
_arguments -s -S $common $parallel $features $triple $registry \
|
||||
'(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \
|
||||
'--bin=[only install the specified binary]:binary' \
|
||||
'--branch=[branch to use when installing from git]:branch' \
|
||||
'--debug[build in debug mode instead of release mode]' \
|
||||
'--example=[install the specified example instead of binaries]:example' \
|
||||
'--git=[specify URL from which to install the crate]:url:_urls' \
|
||||
'--path=[local filesystem path to crate to install]: :_directories' \
|
||||
'--rev=[specific commit to use when installing from git]:commit' \
|
||||
'--root=[directory to install packages into]: :_directories' \
|
||||
'--tag=[tag to use when installing from git]:tag' \
|
||||
'--vers=[version to install from crates.io]:version' \
|
||||
'--list[list all installed packages and their versions]' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
locate-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
login)
|
||||
_arguments -s -S $common $registry \
|
||||
'*: :_guard "^-*" "token"'
|
||||
;;
|
||||
|
||||
metadata)
|
||||
_arguments -s -S $common $features $manifest \
|
||||
"--no-deps[output information only about the root package and don't fetch dependencies]" \
|
||||
'--format-version=[specify format version]:version [1]:(1)'
|
||||
;;
|
||||
|
||||
new)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--vcs:initialize a new repo with a given VCS:(git hg none)' \
|
||||
'--name=[set the resulting package name]'
|
||||
;;
|
||||
|
||||
owner)
|
||||
_arguments -s -S $common $registry \
|
||||
'(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \
|
||||
'--index=[specify registry index]:index' \
|
||||
'(-l --list)'{-l,--list}'[list owners of a crate]' \
|
||||
'(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
package)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest \
|
||||
'(-l --list)'{-l,--list}'[print files included in a package without making one]' \
|
||||
'--no-metadata[ignore warnings about a lack of human-usable metadata]' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't build to verify contents]"
|
||||
;;
|
||||
|
||||
pkgid)
|
||||
_arguments -s -S $common $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \
|
||||
'*: :_guard "^-*" "spec"'
|
||||
;;
|
||||
|
||||
publish)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't verify the contents by building them]" \
|
||||
'--token=[specify token to use when uploading]:token' \
|
||||
'--dry-run[perform all checks without uploading]'
|
||||
;;
|
||||
|
||||
read-manifest)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
run)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--example=[name of the bin target]:name' \
|
||||
'--bin=[name of the bin target]:name' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
rustc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--profile=[specify profile to build the selected target for]:profile' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustc rustc -default-'
|
||||
;;
|
||||
|
||||
rustdoc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open the docs in a browser after the operation]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustdoc rustdoc -default-'
|
||||
;;
|
||||
|
||||
search)
|
||||
_arguments -s -S $common $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--limit=[limit the number of results]:results [10]' \
|
||||
'*: :_guard "^-*" "query"'
|
||||
;;
|
||||
|
||||
test)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--test=[test name]: :_cargo_test_names' \
|
||||
'--no-fail-fast[run all tests regardless of failure]' \
|
||||
'--no-run[compile but do not run]' \
|
||||
'(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \
|
||||
'--all[test all packages in the workspace]' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
'1: :_cargo_test_names' \
|
||||
'(--doc --bin --example --test --bench)--lib[only test library]' \
|
||||
'(--lib --bin --example --test --bench)--doc[only test documentation]' \
|
||||
'(--lib --doc --example --test --bench)--bin=[binary name]' \
|
||||
'(--lib --doc --bin --test --bench)--example=[example name]' \
|
||||
'(--lib --doc --bin --example --bench)--test=[test name]' \
|
||||
'(--lib --doc --bin --example --test)--bench=[benchmark name]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
uninstall)
|
||||
_arguments -s -S $common \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \
|
||||
'--bin=[only uninstall the specified binary]:name' \
|
||||
'--root=[directory to uninstall packages from]: :_files -/' \
|
||||
'*:crate:_cargo_installed_crates -F line'
|
||||
;;
|
||||
|
||||
update)
|
||||
_arguments -s -S $common $manifest \
|
||||
'--aggressive=[force dependency update]' \
|
||||
"--dry-run[don't actually write the lockfile]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \
|
||||
'--precise=[update single dependency to precise release]:release'
|
||||
;;
|
||||
|
||||
verify-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
version)
|
||||
_arguments -s -S $common
|
||||
;;
|
||||
|
||||
yank)
|
||||
_arguments -s -S $common $registry \
|
||||
'--vers=[specify yank version]:version' \
|
||||
'--undo[undo a yank, putting a version back into the index]' \
|
||||
'--index=[specify registry index to yank from]:registry index' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
*)
|
||||
# allow plugins to define their own functions
|
||||
if ! _call_function ret _cargo-${words[1]}; then
|
||||
# fallback on default completion for unknown commands
|
||||
_default && ret=0
|
||||
fi
|
||||
(( ! ret ))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_cargo_unstable_flags() {
|
||||
local flags
|
||||
flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } )
|
||||
_describe -t flags 'unstable flag' flags
|
||||
}
|
||||
|
||||
_cargo_installed_crates() {
|
||||
local expl
|
||||
_description crates expl 'crate'
|
||||
compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *}
|
||||
}
|
||||
|
||||
_cargo_cmds() {
|
||||
local -a commands
|
||||
# This uses Parameter Expansion Flags, which are a built-in Zsh feature.
|
||||
# See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||
# and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion
|
||||
#
|
||||
# # How this work?
|
||||
#
|
||||
# First it splits the result of `cargo --list` at newline, then it removes the first line.
|
||||
# Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]).
|
||||
# Then it replaces those spaces between item and description with a `:`
|
||||
#
|
||||
# [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns
|
||||
commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} )
|
||||
_describe -t commands 'command' commands
|
||||
}
|
||||
|
||||
|
||||
#FIXME: Disabled until fixed
|
||||
#gets package names from the manifest file
|
||||
_cargo_package_names() {
|
||||
_message -e packages package
|
||||
}
|
||||
|
||||
# Extracts the values of "name" from the array given in $1 and shows them as
|
||||
# command line options for completion
|
||||
_cargo_names_from_array() {
|
||||
# strip json from the path
|
||||
local manifest=${${${"$(cargo locate-project)"}%\"\}}##*\"}
|
||||
if [[ -z $manifest ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local last_line
|
||||
local -a names;
|
||||
local in_block=false
|
||||
local block_name=$1
|
||||
names=()
|
||||
while read -r line; do
|
||||
if [[ $last_line == "[[$block_name]]" ]]; then
|
||||
in_block=true
|
||||
else
|
||||
if [[ $last_line =~ '\s*\[\[.*' ]]; then
|
||||
in_block=false
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $in_block == true ]]; then
|
||||
if [[ $line =~ '\s*name\s*=' ]]; then
|
||||
regexp-replace line '^\s*name\s*=\s*|"' ''
|
||||
names+=( "$line" )
|
||||
fi
|
||||
fi
|
||||
|
||||
last_line=$line
|
||||
done < "$manifest"
|
||||
_describe "$block_name" names
|
||||
|
||||
}
|
||||
|
||||
#Gets the test names from the manifest file
|
||||
_cargo_test_names() {
|
||||
_cargo_names_from_array "test"
|
||||
}
|
||||
|
||||
#Gets the bench names from the manifest file
|
||||
_cargo_benchmark_names() {
|
||||
_cargo_names_from_array "bench"
|
||||
}
|
||||
|
||||
_cargo
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# catimg script by Eduardo San Martin Morote aka Posva #
|
||||
# https://posva.net #
|
||||
# #
|
||||
# Ouput the content of an image to the stdout using the 256 colors of the #
|
||||
# Output the content of an image to the stdout using the 256 colors of the #
|
||||
# terminal. #
|
||||
# GitHub: https://github.com/posva/catimg #
|
||||
################################################################################
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# catimg script by Eduardo San Martin Morote aka Posva #
|
||||
# https://posva.net #
|
||||
# #
|
||||
# Ouput the content of an image to the stdout using the 256 colors of the #
|
||||
# Output the content of an image to the stdout using the 256 colors of the #
|
||||
# terminal. #
|
||||
# GitHub: https://github.com/posva/catimg #
|
||||
################################################################################
|
||||
|
|
|
|||
9
plugins/charm/README.md
Normal file
9
plugins/charm/README.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Charm plugin
|
||||
|
||||
This plugin adds completion for the [charm](https://github.com/charmbracelet/charm) CLI.
|
||||
|
||||
To use it, add `charm` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... charm)
|
||||
```
|
||||
14
plugins/charm/charm.plugin.zsh
Normal file
14
plugins/charm/charm.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Autocompletion for the Charm CLI (charm).
|
||||
if (( ! $+commands[charm] )); then
|
||||
return
|
||||
fi
|
||||
|
||||
# If the completion file doesn't exist yet, we need to autoload it and
|
||||
# bind it to `charm`. Otherwise, compinit will have already done that.
|
||||
if [[ ! -f "$ZSH_CACHE_DIR/completions/_charm" ]]; then
|
||||
typeset -g -A _comps
|
||||
autoload -Uz _charm
|
||||
_comps[charm]=_charm
|
||||
fi
|
||||
|
||||
charm completion zsh >| "$ZSH_CACHE_DIR/completions/_charm" &|
|
||||
|
|
@ -5,6 +5,7 @@ current Ruby version, and completion and a prompt function to display the Ruby v
|
|||
Supports brew and manual installation of chruby.
|
||||
|
||||
To use it, add `chruby` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... chruby)
|
||||
```
|
||||
|
|
@ -14,7 +15,7 @@ plugins=(... chruby)
|
|||
If you'd prefer to specify an explicit path to load chruby from
|
||||
you can set variables like so:
|
||||
|
||||
```
|
||||
```zsh
|
||||
zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
|
||||
zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,121 +1,94 @@
|
|||
#
|
||||
# INSTRUCTIONS
|
||||
#
|
||||
# With either a manual or brew installed chruby things should just work.
|
||||
#
|
||||
# If you'd prefer to specify an explicit path to load chruby from
|
||||
# you can set variables like so:
|
||||
#
|
||||
# zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
|
||||
# zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
|
||||
#
|
||||
# TODO
|
||||
# - autodetermine correct source path on non OS X systems
|
||||
# - completion if ruby-install exists
|
||||
## load chruby from different locations
|
||||
|
||||
_source-from-omz-settings() {
|
||||
local _chruby_path _chruby_auto
|
||||
|
||||
zstyle -s :omz:plugins:chruby path _chruby_path || return 1
|
||||
zstyle -s :omz:plugins:chruby auto _chruby_auto || return 1
|
||||
|
||||
if [[ -r ${_chruby_path} ]]; then
|
||||
source ${_chruby_path}
|
||||
fi
|
||||
|
||||
if [[ -r ${_chruby_auto} ]]; then
|
||||
source ${_chruby_auto}
|
||||
fi
|
||||
}
|
||||
|
||||
_source-from-homebrew() {
|
||||
(( $+commands[brew] )) || return 1
|
||||
|
||||
local _brew_prefix
|
||||
# check default brew prefix
|
||||
if [[ -h /usr/local/opt/chruby ]];then
|
||||
_brew_prefix="/usr/local/opt/chruby"
|
||||
else
|
||||
# ok , it is not default prefix
|
||||
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
|
||||
_brew_prefix=$(brew --prefix chruby)
|
||||
fi
|
||||
|
||||
[[ -r "$_brew_prefix" ]] || return 1
|
||||
|
||||
source $_brew_prefix/share/chruby/chruby.sh
|
||||
source $_brew_prefix/share/chruby/auto.sh
|
||||
}
|
||||
|
||||
_load-chruby-dirs() {
|
||||
local dir
|
||||
for dir in "$HOME/.rubies" "$PREFIX/opt/rubies"; do
|
||||
if [[ -d "$dir" ]]; then
|
||||
RUBIES+=("$dir")
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Load chruby
|
||||
if _source-from-omz-settings; then
|
||||
_load-chruby-dirs
|
||||
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
|
||||
source /usr/local/share/chruby/chruby.sh
|
||||
source /usr/local/share/chruby/auto.sh
|
||||
_load-chruby-dirs
|
||||
elif _source-from-homebrew; then
|
||||
_load-chruby-dirs
|
||||
fi
|
||||
|
||||
unfunction _source-from-homebrew _source-from-omz-settings _load-chruby-dirs
|
||||
|
||||
|
||||
## chruby utility functions and aliases
|
||||
|
||||
# rvm and rbenv plugins also provide this alias
|
||||
alias rubies='chruby'
|
||||
|
||||
|
||||
_homebrew-installed() {
|
||||
whence brew &> /dev/null
|
||||
_xit=$?
|
||||
if [ $_xit -eq 0 ];then
|
||||
# ok , we have brew installed
|
||||
# speculatively we check default brew prefix
|
||||
if [ -h /usr/local/opt/chruby ];then
|
||||
_brew_prefix="/usr/local/opt/chruby"
|
||||
else
|
||||
# ok , it is not default prefix
|
||||
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
|
||||
_brew_prefix=$(brew --prefix chruby)
|
||||
fi
|
||||
return 0
|
||||
else
|
||||
return $_xit
|
||||
fi
|
||||
}
|
||||
|
||||
_chruby-from-homebrew-installed() {
|
||||
[ -r _brew_prefix ] &> /dev/null
|
||||
}
|
||||
|
||||
_ruby-build_installed() {
|
||||
whence ruby-build &> /dev/null
|
||||
}
|
||||
|
||||
_ruby-install-installed() {
|
||||
whence ruby-install &> /dev/null
|
||||
}
|
||||
|
||||
# Simple definition completer for ruby-build
|
||||
if _ruby-build_installed; then
|
||||
_ruby-build() { compadd $(ruby-build --definitions) }
|
||||
compdef _ruby-build ruby-build
|
||||
fi
|
||||
|
||||
_source_from_omz_settings() {
|
||||
local _chruby_path
|
||||
local _chruby_auto
|
||||
|
||||
zstyle -s :omz:plugins:chruby path _chruby_path
|
||||
zstyle -s :omz:plugins:chruby auto _chruby_auto
|
||||
|
||||
if [[ -r ${_chruby_path} ]]; then
|
||||
source ${_chruby_path}
|
||||
fi
|
||||
|
||||
if [[ -r ${_chruby_auto} ]]; then
|
||||
source ${_chruby_auto}
|
||||
fi
|
||||
}
|
||||
|
||||
_chruby_dirs() {
|
||||
chrubydirs=($HOME/.rubies/ $PREFIX/opt/rubies)
|
||||
for dir in chrubydirs; do
|
||||
if [[ -d $dir ]]; then
|
||||
RUBIES+=$dir
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
if _homebrew-installed && _chruby-from-homebrew-installed ; then
|
||||
source $_brew_prefix/share/chruby/chruby.sh
|
||||
source $_brew_prefix/share/chruby/auto.sh
|
||||
_chruby_dirs
|
||||
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
|
||||
source /usr/local/share/chruby/chruby.sh
|
||||
source /usr/local/share/chruby/auto.sh
|
||||
_chruby_dirs
|
||||
else
|
||||
_source_from_omz_settings
|
||||
_chruby_dirs
|
||||
fi
|
||||
|
||||
function ensure_chruby() {
|
||||
$(whence chruby)
|
||||
}
|
||||
|
||||
function current_ruby() {
|
||||
local _ruby
|
||||
_ruby="$(chruby |grep \* |tr -d '* ')"
|
||||
if [[ $(chruby |grep -c \*) -eq 1 ]]; then
|
||||
echo ${_ruby}
|
||||
else
|
||||
echo "system"
|
||||
fi
|
||||
local ruby
|
||||
ruby="$(chruby | grep \* | tr -d '* ')"
|
||||
if [[ $(chruby | grep -c \*) -eq 1 ]]; then
|
||||
echo ${ruby}
|
||||
else
|
||||
echo "system"
|
||||
fi
|
||||
}
|
||||
|
||||
function chruby_prompt_info() {
|
||||
echo "$(current_ruby)"
|
||||
echo "${$(current_ruby):gs/%/%%}"
|
||||
}
|
||||
|
||||
# complete on installed rubies
|
||||
# Complete chruby command with installed rubies
|
||||
_chruby() {
|
||||
compadd $(chruby | tr -d '* ')
|
||||
local default_path='/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin'
|
||||
if PATH=${default_path} type ruby &> /dev/null; then
|
||||
compadd system
|
||||
fi
|
||||
compadd $(chruby | tr -d '* ')
|
||||
if PATH="/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin" command ruby &>/dev/null; then
|
||||
compadd system
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _chruby chruby
|
||||
|
||||
|
||||
# Simple definition completer for ruby-build
|
||||
if command ruby-build &> /dev/null; then
|
||||
_ruby-build() { compadd $(ruby-build --definitions) }
|
||||
compdef _ruby-build ruby-build
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
# chucknorris
|
||||
|
||||
Chuck Norris fortunes plugin for oh-my-zsh. Perfectly suitable as MOTD.
|
||||
|
||||
**Maintainers**: [apjanke](https://github.com/apjanke) [maff](https://github.com/maff)
|
||||
Chuck Norris fortunes plugin for Oh My Zsh. Perfectly suitable as MOTD.
|
||||
|
||||
To use it add `chucknorris` to the plugins array in you zshrc file.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,28 +1,24 @@
|
|||
# chucknorris: Chuck Norris fortunes
|
||||
|
||||
# Automatically generate or update Chuck's compiled fortune data file
|
||||
# $0 must be used outside a local function. This variable name is unlikly to collide.
|
||||
CHUCKNORRIS_PLUGIN_DIR=${0:h}
|
||||
|
||||
() {
|
||||
local DIR=$CHUCKNORRIS_PLUGIN_DIR/fortunes
|
||||
if [[ ! -f $DIR/chucknorris.dat ]] || [[ $DIR/chucknorris.dat -ot $DIR/chucknorris ]]; then
|
||||
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
|
||||
local strfile=strfile
|
||||
if ! which strfile &>/dev/null && [[ -f /usr/sbin/strfile ]]; then
|
||||
strfile=/usr/sbin/strfile
|
||||
# %x: name of file containing code being executed
|
||||
local fortunes_dir="${${(%):-%x}:h}/fortunes"
|
||||
|
||||
# Aliases
|
||||
alias chuck="fortune -a $fortunes_dir"
|
||||
alias chuck_cow="chuck | cowthink"
|
||||
|
||||
# Automatically generate or update Chuck's compiled fortune data file
|
||||
if [[ "$fortunes_dir/chucknorris" -ot "$fortunes_dir/chucknorris.dat" ]]; then
|
||||
return
|
||||
fi
|
||||
if which $strfile &> /dev/null; then
|
||||
$strfile $DIR/chucknorris $DIR/chucknorris.dat >/dev/null
|
||||
else
|
||||
|
||||
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
|
||||
local strfile="${commands[strfile]:-/usr/sbin/strfile}"
|
||||
if [[ ! -x "$strfile" ]]; then
|
||||
echo "[oh-my-zsh] chucknorris depends on strfile, which is not installed" >&2
|
||||
echo "[oh-my-zsh] strfile is often provided as part of the 'fortune' package" >&2
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
# Aliases
|
||||
alias chuck="fortune -a $DIR"
|
||||
alias chuck_cow="chuck | cowthink"
|
||||
# Generate the compiled fortune data file
|
||||
$strfile "$fortunes_dir/chucknorris" "$fortunes_dir/chucknorris.dat" >/dev/null
|
||||
}
|
||||
|
||||
unset CHUCKNORRIS_PLUGIN_DIR
|
||||
|
|
|
|||
|
|
@ -228,7 +228,7 @@ Chuck Norris once punched the ground to stop an earthquake. The resulting afters
|
|||
%
|
||||
Chuck Norris once round-house kicked a salesman. Over the phone.
|
||||
%
|
||||
Chuck Norris once rounhouse kicked a football. The astronomical society now considers it a planet.
|
||||
Chuck Norris once roundhouse kicked a football. The astronomical society now considers it a planet.
|
||||
%
|
||||
Chuck Norris once thought he was wrong. He was, however, mistaken.
|
||||
%
|
||||
|
|
@ -312,7 +312,7 @@ Chuck Norris. Enough said.
|
|||
%
|
||||
Chuck Norris: even Naruto can't believe it.
|
||||
%
|
||||
Chunk Norris can make sour milk turn fresh.
|
||||
Chuck Norris can make sour milk turn fresh.
|
||||
%
|
||||
Contrary to popular belief, Rome WAS built in a day, by Chuck Norris.
|
||||
%
|
||||
|
|
@ -342,7 +342,7 @@ Every time there's an earthquake, you know Chuck Norris is hungry. The earthquak
|
|||
%
|
||||
Evolution's driving mechanism is nature's desperate attempt to escape Chuck Norris.
|
||||
%
|
||||
Fear of spiders is arachnaphobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
|
||||
Fear of spiders is arachnophobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
|
||||
%
|
||||
Fool me once, shame on you. Fool Chuck Norris once and he will roundhouse you in the face.
|
||||
%
|
||||
|
|
@ -426,7 +426,7 @@ Some people ask for a Kleenex when they sneeze, Chuck Norris asks for a body bag
|
|||
%
|
||||
Someone once videotaped Chuck Norris getting pissed off. It was called Walker: Texas Chain Saw Massacre.
|
||||
%
|
||||
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindess, and possibly foot sized brusies on the face.
|
||||
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindness, and possibly foot sized bruises on the face.
|
||||
%
|
||||
Taking Karate Lessons = $100, Buying MMA DVD's = $150, Subscribing to a UFC event = $50, Getting a Roundhouse Kick from Chuck Norris = PRICELESS.
|
||||
%
|
||||
|
|
@ -452,7 +452,7 @@ The best part of waking up is not Folgers in your cup. it's knowing that Chuck N
|
|||
%
|
||||
The chief export of Chuck Norris is pain.
|
||||
%
|
||||
The dictionary references Chuck Norris several times, he is metioned under Fear, Law, Order and Chucktatorship.
|
||||
The dictionary references Chuck Norris several times, he is mentioned under Fear, Law, Order and Chucktatorship.
|
||||
%
|
||||
The leading causes of death in the United States are: 1. Heart Disease 2. Chuck Norris 3. Cancer.
|
||||
%
|
||||
|
|
@ -468,7 +468,7 @@ The only way sharks will come near CN underwater is when CN is inside of a cage.
|
|||
%
|
||||
The only word that rhymes with orange is Chuck Norris.
|
||||
%
|
||||
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequal "The Last Skull Bender".
|
||||
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequel "The Last Skull Bender".
|
||||
%
|
||||
The quickest way to a man's heart is with Chuck Norris' fist.
|
||||
%
|
||||
|
|
@ -558,3 +558,11 @@ You know Chuck Norris' pet lizard, right? Last I heard, he was in the movie "God
|
|||
%
|
||||
http://chucknorrisfacts.com/ is built in Drupal because Chuck Norris knows a good CMS when he sees one.
|
||||
%
|
||||
Chuck Norris made the first Giraffe by uppercutting a horse.
|
||||
%
|
||||
Chuck Norris can hear sign language.
|
||||
%
|
||||
Chuck Norris make onions cry.
|
||||
%
|
||||
Chuck Norris doesn't shake hands, he makes them tremble.
|
||||
%
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
# CloudApp plugin
|
||||
|
||||
## The CloudApp API is deprecated, so the plugin will be removed shortly
|
||||
|
||||
[CloudApp](https://www.getcloudapp.com) brings screen recording, screenshots, and GIF creation to the cloud, in an easy-to-use enterprise-level app. The CloudApp plugin allows you to upload a file to your CloadApp account from the command line.
|
||||
|
||||
To use it, add `cloudapp` to the plugins array of your `~/.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... cloudapp)
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
1. [Aaron Russell's `cloudapp_api` gem](https://github.com/aaronrussell/cloudapp_api#installation)
|
||||
|
||||
2. That you set your CloudApp credentials in `~/.cloudapp` as a simple text file like below:
|
||||
```
|
||||
email
|
||||
password
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- `cloudapp <filename>`: uploads `<filename>` to your CloudApp account, and if you're using
|
||||
macOS, copies the URL to your clipboard.
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
print -Pn "%F{yellow}"
|
||||
print "[oh-my-zsh] The CloudApp API no longer works, so the cloudapp plugin will"
|
||||
print "[oh-my-zsh] be removed shortly. Please remove it from your plugins list."
|
||||
print -Pn "%f"
|
||||
|
|
@ -39,14 +39,14 @@
|
|||
#
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
local curcontext="$curcontext" state line ret=1 version opts first second third
|
||||
local curcontext="$curcontext" state line ret=1 version
|
||||
local -a opts
|
||||
typeset -A opt_args
|
||||
version=(${(f)"$(_call_program version $words[1] --version)"})
|
||||
version=(${(f)"$(_call_program version $words[1] --version)"}) || return ret
|
||||
version=${${(z)${version[1]}}[3]}
|
||||
first=$(echo $version|cut -d '.' -f 1)
|
||||
second=$(echo $version|cut -d '.' -f 2)
|
||||
third=$(echo $version|cut -d '.' -f 3)
|
||||
if (( $first < 2 )) && (( $second < 7 )) && (( $third < 3 ));then
|
||||
|
||||
autoload -Uz is-at-least
|
||||
if ! is-at-least 1.6.3 "$version"; then
|
||||
opts+=('(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]'
|
||||
'(-r --require)'{-r,--require}'[require a library before executing your script]:library')
|
||||
fi
|
||||
|
|
|
|||
1
plugins/colemak/.gitignore
vendored
Normal file
1
plugins/colemak/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
.less
|
||||
|
|
@ -19,4 +19,20 @@ bindkey -a 'N' vi-join
|
|||
bindkey -a 'j' vi-forward-word-end
|
||||
bindkey -a 'J' vi-forward-blank-word-end
|
||||
|
||||
lesskey $ZSH/plugins/colemak/colemak-less
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# New less versions will read this file directly
|
||||
export LESSKEYIN="${0:h:A}/colemak-less"
|
||||
|
||||
# Only run lesskey if less version is older than v582
|
||||
less_ver=$(less --version | awk '{print $2;exit}')
|
||||
autoload -Uz is-at-least
|
||||
if ! is-at-least 582 $less_ver; then
|
||||
# Old less versions will read this transformed file
|
||||
export LESSKEY="${0:h:A}/.less"
|
||||
lesskey -o "$LESSKEY" "$LESSKEYIN" 2>/dev/null
|
||||
fi
|
||||
unset less_ver
|
||||
|
|
|
|||
|
|
@ -16,3 +16,17 @@ You can also try to color other pages by prefixing the respective command with `
|
|||
```zsh
|
||||
colored git help clone
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
The plugin declares global associative array `less_termcap`, which maps termcap capabilities to escape
|
||||
sequences for the `less` pager. This mapping can be further customized by the user after the plugin is
|
||||
loaded. Check out sources for more.
|
||||
|
||||
For example: `less_termcap[md]` maps to `LESS_TERMCAP_md` which is the escape sequence that tells `less`
|
||||
how to print something in bold. It's currently shown in bold red, but if you want to change it, you
|
||||
can redefine `less_termcap[md]` in your zshrc file, after OMZ is sourced:
|
||||
|
||||
```zsh
|
||||
less_termcap[md]="${fg_bold[blue]}" # this tells less to print bold text in bold blue
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,39 +1,53 @@
|
|||
if [[ "$OSTYPE" = solaris* ]]
|
||||
then
|
||||
if [[ ! -x "$HOME/bin/nroff" ]]
|
||||
then
|
||||
mkdir -p "$HOME/bin"
|
||||
cat > "$HOME/bin/nroff" <<EOF
|
||||
#!/bin/sh
|
||||
if [ -n "\$_NROFF_U" -a "\$1,\$2,\$3" = "-u0,-Tlp,-man" ]; then
|
||||
shift
|
||||
exec /usr/bin/nroff -u\$_NROFF_U "\$@"
|
||||
fi
|
||||
#-- Some other invocation of nroff
|
||||
exec /usr/bin/nroff "\$@"
|
||||
EOF
|
||||
chmod +x "$HOME/bin/nroff"
|
||||
fi
|
||||
fi
|
||||
# Requires colors autoload.
|
||||
# See termcap(5).
|
||||
|
||||
# Set up once, and then reuse. This way it supports user overrides after the
|
||||
# plugin is loaded.
|
||||
typeset -AHg less_termcap
|
||||
|
||||
# bold & blinking mode
|
||||
less_termcap[mb]="${fg_bold[red]}"
|
||||
less_termcap[md]="${fg_bold[red]}"
|
||||
less_termcap[me]="${reset_color}"
|
||||
# standout mode
|
||||
less_termcap[so]="${fg_bold[yellow]}${bg[blue]}"
|
||||
less_termcap[se]="${reset_color}"
|
||||
# underlining
|
||||
less_termcap[us]="${fg_bold[green]}"
|
||||
less_termcap[ue]="${reset_color}"
|
||||
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# Absolute path to this file's directory.
|
||||
typeset -g __colored_man_pages_dir="${0:A:h}"
|
||||
|
||||
function colored() {
|
||||
command env \
|
||||
LESS_TERMCAP_mb=$(printf "\e[1;31m") \
|
||||
LESS_TERMCAP_md=$(printf "\e[1;31m") \
|
||||
LESS_TERMCAP_me=$(printf "\e[0m") \
|
||||
LESS_TERMCAP_se=$(printf "\e[0m") \
|
||||
LESS_TERMCAP_so=$(printf "\e[1;44;33m") \
|
||||
LESS_TERMCAP_ue=$(printf "\e[0m") \
|
||||
LESS_TERMCAP_us=$(printf "\e[1;32m") \
|
||||
PAGER="${commands[less]:-$PAGER}" \
|
||||
_NROFF_U=1 \
|
||||
PATH="$HOME/bin:$PATH" \
|
||||
"$@"
|
||||
local -a environment
|
||||
|
||||
# Convert associative array to plain array of NAME=VALUE items.
|
||||
local k v
|
||||
for k v in "${(@kv)less_termcap}"; do
|
||||
environment+=( "LESS_TERMCAP_${k}=${v}" )
|
||||
done
|
||||
|
||||
# Prefer `less` whenever available, since we specifically configured
|
||||
# environment for it.
|
||||
environment+=( PAGER="${commands[less]:-$PAGER}" )
|
||||
|
||||
# See ./nroff script.
|
||||
if [[ "$OSTYPE" = solaris* ]]; then
|
||||
environment+=( PATH="${__colored_man_pages_dir}:$PATH" )
|
||||
fi
|
||||
|
||||
command env $environment "$@"
|
||||
}
|
||||
|
||||
# Colorize man and dman/debman (from debian-goodies)
|
||||
function man \
|
||||
dman \
|
||||
debman {
|
||||
colored $0 "$@"
|
||||
dman \
|
||||
debman {
|
||||
colored $0 "$@"
|
||||
}
|
||||
|
|
|
|||
12
plugins/colored-man-pages/nroff
Executable file
12
plugins/colored-man-pages/nroff
Executable file
|
|
@ -0,0 +1,12 @@
|
|||
#!/bin/sh
|
||||
|
||||
# The whole point of this wrapper is to replace emboldening factor -u0 with
|
||||
# -u1 under certain circumstances on Solaris.
|
||||
|
||||
if [ "$1,$2,$3" = "-u0,-Tlp,-man" ]; then
|
||||
shift
|
||||
exec /usr/bin/nroff -u1 "$@"
|
||||
else
|
||||
# Some other invocation of nroff
|
||||
exec /usr/bin/nroff "$@"
|
||||
fi
|
||||
|
|
@ -6,7 +6,8 @@ alias cless="colorize_less"
|
|||
ZSH_COLORIZE_PLUGIN_PATH=$0:A
|
||||
|
||||
colorize_check_requirements() {
|
||||
local available_tools=("chroma" "pygmentize")
|
||||
local -a available_tools
|
||||
available_tools=("chroma" "pygmentize")
|
||||
|
||||
if [ -z "$ZSH_COLORIZE_TOOL" ]; then
|
||||
if (( $+commands[pygmentize] )); then
|
||||
|
|
@ -81,7 +82,7 @@ colorize_less() {
|
|||
# This variable tells less to pipe every file through the specified command
|
||||
# (see the man page of less INPUT PREPROCESSOR).
|
||||
# 'zsh -ic "colorize_cat %s 2> /dev/null"' would not work for huge files like
|
||||
# the ~/.zsh_history. For such files the tty of the preprocessor will be supended.
|
||||
# the ~/.zsh_history. For such files the tty of the preprocessor will be suspended.
|
||||
# Therefore we must source this file to make colorize_cat available in the
|
||||
# preprocessor without the interactive mode.
|
||||
# `2>/dev/null` will suppress the error for large files 'broken pipe' of the python
|
||||
|
|
@ -95,7 +96,7 @@ colorize_less() {
|
|||
# which assumes that his LESSOPEN has been executed.
|
||||
local LESSCLOSE=""
|
||||
|
||||
LESS="$LESS" LESSOPEN="$LESSOPEN" LESSCLOSE="$LESSCLOSE" less "$@"
|
||||
LESS="$LESS" LESSOPEN="$LESSOPEN" LESSCLOSE="$LESSCLOSE" command less "$@"
|
||||
}
|
||||
|
||||
if [ -t 0 ]; then
|
||||
|
|
|
|||
|
|
@ -28,5 +28,7 @@ It works out of the box with the command-not-found packages for:
|
|||
- [macOS (Homebrew)](https://github.com/Homebrew/homebrew-command-not-found)
|
||||
- [Fedora](https://fedoraproject.org/wiki/Features/PackageKitCommandNotFound)
|
||||
- [NixOS](https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found)
|
||||
- [Termux](https://github.com/termux/command-not-found)
|
||||
- [SUSE](https://www.unix.com/man-page/suse/1/command-not-found/)
|
||||
|
||||
You can add support for other platforms by submitting a Pull Request.
|
||||
|
|
|
|||
|
|
@ -1,38 +1,69 @@
|
|||
# Uses the command-not-found package zsh support
|
||||
# as seen in https://www.porcheron.info/command-not-found-for-zsh/
|
||||
# this is installed in Ubuntu
|
||||
## Platforms with a built-in command-not-found handler init file
|
||||
|
||||
[[ -e /etc/zsh_command_not_found ]] && source /etc/zsh_command_not_found
|
||||
for file (
|
||||
# Arch Linux. Must have pkgfile installed: https://wiki.archlinux.org/index.php/Pkgfile#Command_not_found
|
||||
/usr/share/doc/pkgfile/command-not-found.zsh
|
||||
# macOS (M1 and classic Homebrew): https://github.com/Homebrew/homebrew-command-not-found
|
||||
/opt/homebrew/Library/Taps/homebrew/homebrew-command-not-found/handler.sh
|
||||
/usr/local/Homebrew/Library/Taps/homebrew/homebrew-command-not-found/handler.sh
|
||||
); do
|
||||
if [[ -r "$file" ]]; then
|
||||
source "$file"
|
||||
unset file
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset file
|
||||
|
||||
# Arch Linux command-not-found support, you must have package pkgfile installed
|
||||
# https://wiki.archlinux.org/index.php/Pkgfile#.22Command_not_found.22_hook
|
||||
[[ -e /usr/share/doc/pkgfile/command-not-found.zsh ]] && source /usr/share/doc/pkgfile/command-not-found.zsh
|
||||
|
||||
# Fedora command-not-found support
|
||||
if [ -f /usr/libexec/pk-command-not-found ]; then
|
||||
command_not_found_handler () {
|
||||
runcnf=1
|
||||
retval=127
|
||||
[ ! -S /var/run/dbus/system_bus_socket ] && runcnf=0
|
||||
[ ! -x /usr/libexec/packagekitd ] && runcnf=0
|
||||
if [ $runcnf -eq 1 ]
|
||||
then
|
||||
/usr/libexec/pk-command-not-found $@
|
||||
retval=$?
|
||||
fi
|
||||
return $retval
|
||||
}
|
||||
## Platforms with manual command_not_found_handler() setup
|
||||
|
||||
# Debian and derivatives: https://launchpad.net/ubuntu/+source/command-not-found
|
||||
if [[ -x /usr/lib/command-not-found || -x /usr/share/command-not-found/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
if [[ -x /usr/lib/command-not-found ]]; then
|
||||
/usr/lib/command-not-found -- "$1"
|
||||
return $?
|
||||
elif [[ -x /usr/share/command-not-found/command-not-found ]]; then
|
||||
/usr/share/command-not-found/command-not-found -- "$1"
|
||||
return $?
|
||||
else
|
||||
printf "zsh: command not found: %s\n" "$1" >&2
|
||||
return 127
|
||||
fi
|
||||
}
|
||||
fi
|
||||
|
||||
# OSX command-not-found support
|
||||
# https://github.com/Homebrew/homebrew-command-not-found
|
||||
if [[ -s '/usr/local/Homebrew/Library/Taps/homebrew/homebrew-command-not-found/handler.sh' ]]; then
|
||||
source '/usr/local/Homebrew/Library/Taps/homebrew/homebrew-command-not-found/handler.sh'
|
||||
# Fedora: https://fedoraproject.org/wiki/Features/PackageKitCommandNotFound
|
||||
if [[ -x /usr/libexec/pk-command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
if [[ -S /var/run/dbus/system_bus_socket && -x /usr/libexec/packagekitd ]]; then
|
||||
/usr/libexec/pk-command-not-found "$@"
|
||||
return $?
|
||||
fi
|
||||
|
||||
printf "zsh: command not found: %s\n" "$1" >&2
|
||||
return 127
|
||||
}
|
||||
fi
|
||||
|
||||
# NixOS command-not-found support
|
||||
if [ -x /run/current-system/sw/bin/command-not-found ]; then
|
||||
command_not_found_handler () {
|
||||
/run/current-system/sw/bin/command-not-found $@
|
||||
}
|
||||
# NixOS: https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found
|
||||
if [[ -x /run/current-system/sw/bin/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/run/current-system/sw/bin/command-not-found "$@"
|
||||
}
|
||||
fi
|
||||
|
||||
# Termux: https://github.com/termux/command-not-found
|
||||
if [[ -x /data/data/com.termux/files/usr/libexec/termux/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/data/data/com.termux/files/usr/libexec/termux/command-not-found "$1"
|
||||
}
|
||||
fi
|
||||
|
||||
# SUSE and derivates: https://www.unix.com/man-page/suse/1/command-not-found/
|
||||
if [[ -x /usr/bin/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/usr/bin/command-not-found "$1"
|
||||
}
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -12,49 +12,55 @@ plugins=(... common-aliases)
|
|||
|
||||
### ls command
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|---------------|--------------------------------------------------------------------------------|
|
||||
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
|
||||
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
|
||||
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
|
||||
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
|
||||
| ll | `ls -l` | List files as a long list |
|
||||
| ldot | `ls -ld .*` | List dot files as a long list |
|
||||
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
|
||||
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
|
||||
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
|
||||
| Alias | Command | Description |
|
||||
| ----- | ------------ | --------------------------------------------------------------------------- |
|
||||
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
|
||||
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
|
||||
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
|
||||
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
|
||||
| ll | `ls -l` | List files as a long list |
|
||||
| ldot | `ls -ld .*` | List dot files as a long list |
|
||||
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
|
||||
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
|
||||
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
|
||||
| lsr | `ls -lARFh` | List all files and directories recursively |
|
||||
| lsn | `ls -1` | List files and directories in a single column |
|
||||
|
||||
### File handling
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------|------------------------------------------------------------------------------------|
|
||||
| rm | `rm -i` | Remove a file |
|
||||
| cp | `cp -i` | Copy a file |
|
||||
| mv | `mv -i` | Move a file |
|
||||
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
|
||||
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
|
||||
| duf | `du -sh` | Display the size of files in current location in human-readable form |
|
||||
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
|
||||
| Alias | Command | Description |
|
||||
| ----- | --------------------- | ------------------------------------------------------------------------------- |
|
||||
| rm | `rm -i` | Remove a file |
|
||||
| cp | `cp -i` | Copy a file |
|
||||
| mv | `mv -i` | Move a file |
|
||||
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
|
||||
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
|
||||
| duf\* | `du -sh` | Display the size of files in current location in human-readable form |
|
||||
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
|
||||
|
||||
\* Only if the [`duf`](https://github.com/muesli/duf) command isn't installed.
|
||||
|
||||
### find and grep
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------------------------------------|-----------------------------------------|
|
||||
| fd | `find . -type d -name` | Find a directory with the given name |
|
||||
| ff | `find . -type f -name` | Find a file with the given name |
|
||||
| grep | `grep --color` | Searches for a query string |
|
||||
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
|
||||
| Alias | Command | Description |
|
||||
| ----- | -------------------------------------------------- | ------------------------------------ |
|
||||
| fd\* | `find . -type d -name` | Find a directory with the given name |
|
||||
| ff | `find . -type f -name` | Find a file with the given name |
|
||||
| grep | `grep --color` | Searches for a query string |
|
||||
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
|
||||
|
||||
\* Only if the [`fd`](https://github.com/sharkdp/fd) command isn't installed.
|
||||
|
||||
### Other Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-----------|---------------------|-------------------------------------------------------------|
|
||||
| h | `history` | Lists all recently used commands |
|
||||
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
|
||||
| help | `man` | Opens up the man page for a command |
|
||||
| p | `ps -f` | Displays currently executing processes |
|
||||
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
|
||||
| unexport | `unset` | Used to unset an environment variable |
|
||||
| Alias | Command | Description |
|
||||
| -------- | ------------------ | ----------------------------------------------------------- |
|
||||
| h | `history` | Lists all recently used commands |
|
||||
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
|
||||
| help | `man` | Opens up the man page for a command |
|
||||
| p | `ps -f` | Displays currently executing processes |
|
||||
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
|
||||
| unexport | `unset` | Used to unset an environment variable |
|
||||
|
||||
## Global aliases
|
||||
|
||||
|
|
@ -62,12 +68,15 @@ These aliases are expanded in any position in the command line, meaning you can
|
|||
end of the command you've typed. Examples:
|
||||
|
||||
Quickly pipe to less:
|
||||
|
||||
```zsh
|
||||
$ ls -l /var/log L
|
||||
# will run
|
||||
$ ls -l /var/log | less
|
||||
```
|
||||
|
||||
Silences stderr output:
|
||||
|
||||
```zsh
|
||||
$ find . -type f NE
|
||||
# will run
|
||||
|
|
@ -75,7 +84,7 @@ $ find . -type f 2>/dev/null
|
|||
```
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------------|-------------------------------------------------------------|
|
||||
| ----- | --------------------------- | ----------------------------------------------------------- |
|
||||
| H | `\| head` | Pipes output to head which outputs the first part of a file |
|
||||
| T | `\| tail` | Pipes output to tail which outputs the last part of a file |
|
||||
| G | `\| grep` | Pipes output to grep to search for some word |
|
||||
|
|
@ -95,23 +104,23 @@ that file will be open with `acroread`.
|
|||
|
||||
### Reading Docs
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-------------|-------------------------------------|
|
||||
| pdf | `acroread` | Opens up a document using acroread |
|
||||
| ps | `gv` | Opens up a .ps file using gv |
|
||||
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
|
||||
| chm | `xchm` | Opens up a .chm file using xchm |
|
||||
| djvu | `djview` | Opens up a .djvu file using djview |
|
||||
| Alias | Command | Description |
|
||||
| ----- | ---------- | ---------------------------------- |
|
||||
| pdf | `acroread` | Opens up a document using acroread |
|
||||
| ps | `gv` | Opens up a .ps file using gv |
|
||||
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
|
||||
| chm | `xchm` | Opens up a .chm file using xchm |
|
||||
| djvu | `djview` | Opens up a .djvu file using djview |
|
||||
|
||||
### Listing files inside a packed file
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|-------------|-------------------------------------|
|
||||
| zip | `unzip -l` | Lists files inside a .zip file |
|
||||
| rar | `unrar l` | Lists files inside a .rar file |
|
||||
| tar | `tar tf` | Lists files inside a .tar file |
|
||||
| tar.gz | `echo` | Lists files inside a .tar.gz file |
|
||||
| ace | `unace l` | Lists files inside a .ace file |
|
||||
| Alias | Command | Description |
|
||||
| ------ | ---------- | --------------------------------- |
|
||||
| zip | `unzip -l` | Lists files inside a .zip file |
|
||||
| rar | `unrar l` | Lists files inside a .rar file |
|
||||
| tar | `tar tf` | Lists files inside a .tar file |
|
||||
| tar.gz | `echo` | Lists files inside a .tar.gz file |
|
||||
| ace | `unace l` | Lists files inside a .ace file |
|
||||
|
||||
### Some other features
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ alias ldot='ls -ld .*'
|
|||
alias lS='ls -1FSsh'
|
||||
alias lart='ls -1Fcart'
|
||||
alias lrt='ls -1Fcrt'
|
||||
alias lsr='ls -lARFh' #Recursive list of files and directories
|
||||
alias lsn='ls -1' #A column contains name of files and directories
|
||||
|
||||
alias zshrc='${=EDITOR} ${ZDOTDIR:-$HOME}/.zshrc' # Quick access to the .zshrc file
|
||||
|
||||
|
|
@ -33,8 +35,8 @@ alias -g NUL="> /dev/null 2>&1"
|
|||
alias -g P="2>&1| pygmentize -l pytb"
|
||||
|
||||
alias dud='du -d 1 -h'
|
||||
alias duf='du -sh *'
|
||||
alias fd='find . -type d -name'
|
||||
(( $+commands[duf] )) || alias duf='du -sh *'
|
||||
(( $+commands[fd] )) || alias fd='find . -type d -name'
|
||||
alias ff='find . -type f -name'
|
||||
|
||||
alias h='history'
|
||||
|
|
|
|||
|
|
@ -10,8 +10,11 @@ if (( ${+commands[compleat]} )); then
|
|||
local setup="${prefix}/share/compleat-1.0/compleat_setup"
|
||||
|
||||
if [[ -f "$setup" ]]; then
|
||||
if ! bashcompinit >/dev/null 2>&1; then
|
||||
autoload -U bashcompinit
|
||||
bashcompinit -i
|
||||
fi
|
||||
|
||||
source "$setup"
|
||||
fi
|
||||
|
||||
unset prefix setup
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -10,22 +10,26 @@ To use it add `composer` to the plugins array in your zshrc file.
|
|||
plugins=(... composer)
|
||||
```
|
||||
|
||||
Original author: Daniel Gomes <me@danielcsgomes.com>
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
| ------ | ------------------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| `c` | `composer` | Starts composer |
|
||||
| `csu` | `composer self-update` | Updates composer to the latest version |
|
||||
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
|
||||
| `cr` | `composer require` | Adds new packages to `composer.json` |
|
||||
| `crm` | `composer remove` | Removes packages from `composer.json` |
|
||||
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
|
||||
| `ccp` | `composer create-project` | Create new project from an existing package |
|
||||
| `cdu` | `composer dump-autoload` | Updates the autoloader |
|
||||
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
|
||||
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
|
||||
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
|
||||
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
|
||||
| `cget` | `curl -s https://getcomposer.org/installer` | Installs composer in the current directory |
|
||||
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
|
||||
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
|
||||
| Alias | Command | Description |
|
||||
| ------ | ---------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| `c` | `composer` | Starts composer |
|
||||
| `ccp` | `composer create-project` | Create new project from an existing package |
|
||||
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
|
||||
| `cdu` | `composer dump-autoload` | Updates the autoloader |
|
||||
| `cget` | `curl -s <installer> \| php` | Installs composer in the current directory |
|
||||
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
|
||||
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
|
||||
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
|
||||
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
|
||||
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
|
||||
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
|
||||
| `cr` | `composer require` | Adds new packages to `composer.json` |
|
||||
| `crm` | `composer remove` | Removes packages from `composer.json` |
|
||||
| `cs` | `composer show` | Lists available packages, with optional filtering |
|
||||
| `csu` | `composer self-update` | Updates composer to the latest version |
|
||||
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
|
||||
| `cuh` | `composer update -d <config-home>` | Updates globally installed packages |
|
||||
|
|
|
|||
|
|
@ -1,70 +1,76 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
# FILE: composer.plugin.zsh
|
||||
# DESCRIPTION: oh-my-zsh composer plugin file.
|
||||
# AUTHOR: Daniel Gomes (me@danielcsgomes.com)
|
||||
# VERSION: 1.0.0
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# Composer basic command completion
|
||||
_composer_get_command_list () {
|
||||
$_comp_command1 --no-ansi 2>/dev/null | sed "1,/Available commands/d" | awk '/^[ \t]*[a-z]+/ { print $1 }'
|
||||
}
|
||||
|
||||
_composer_get_required_list () {
|
||||
$_comp_command1 show -s --no-ansi 2>/dev/null | sed '1,/requires/d' | awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }'
|
||||
}
|
||||
|
||||
_composer () {
|
||||
## Basic Composer command completion
|
||||
# Since Zsh 5.7, an improved composer command completion is provided
|
||||
if ! is-at-least 5.7; then
|
||||
_composer () {
|
||||
local curcontext="$curcontext" state line
|
||||
typeset -A opt_args
|
||||
_arguments \
|
||||
'*:: :->subcmds'
|
||||
_arguments '*:: :->subcmds'
|
||||
|
||||
if (( CURRENT == 1 )) || ( ((CURRENT == 2)) && [ "$words[1]" = "global" ] ) ; then
|
||||
compadd $(_composer_get_command_list)
|
||||
if (( CURRENT == 1 )) || ( (( CURRENT == 2 )) && [[ "$words[1]" = "global" ]] ); then
|
||||
# Command list
|
||||
local -a subcmds
|
||||
subcmds=("${(@f)"$($_comp_command1 --no-ansi 2>/dev/null | awk '
|
||||
/Available commands/{ r=1 }
|
||||
r == 1 && /^[ \t]*[a-z]+/{
|
||||
gsub(/^[ \t]+/, "")
|
||||
gsub(/ +/, ":")
|
||||
print $0
|
||||
}
|
||||
')"}")
|
||||
_describe -t commands 'composer command' subcmds
|
||||
else
|
||||
compadd $(_composer_get_required_list)
|
||||
# Required list
|
||||
compadd $($_comp_command1 show -s --no-ansi 2>/dev/null \
|
||||
| sed '1,/requires/d' \
|
||||
| awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }')
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
compdef _composer composer
|
||||
compdef _composer composer.phar
|
||||
compdef _composer composer
|
||||
compdef _composer composer.phar
|
||||
fi
|
||||
|
||||
# Aliases
|
||||
|
||||
## Aliases
|
||||
alias c='composer'
|
||||
alias csu='composer self-update'
|
||||
alias cu='composer update'
|
||||
alias cr='composer require'
|
||||
alias crm='composer remove'
|
||||
alias ci='composer install'
|
||||
alias ccp='composer create-project'
|
||||
alias cdu='composer dump-autoload'
|
||||
alias cdo='composer dump-autoload -o'
|
||||
alias cgu='composer global update'
|
||||
alias cdu='composer dump-autoload'
|
||||
alias cget='curl -s https://getcomposer.org/installer | php'
|
||||
alias cgr='composer global require'
|
||||
alias cgrm='composer global remove'
|
||||
alias cgu='composer global update'
|
||||
alias ci='composer install'
|
||||
alias co='composer outdated'
|
||||
alias cod='composer outdated --direct'
|
||||
alias cr='composer require'
|
||||
alias crm='composer remove'
|
||||
alias cs='composer show'
|
||||
alias csu='composer self-update'
|
||||
alias cu='composer update'
|
||||
alias cuh='composer update --working-dir=$(composer config -g home)'
|
||||
|
||||
# install composer in the current directory
|
||||
alias cget='curl -s https://getcomposer.org/installer | php'
|
||||
|
||||
# Add Composer's global binaries to PATH, using Composer if available.
|
||||
if (( $+commands[composer] )); then
|
||||
autoload -Uz _store_cache _retrieve_cache
|
||||
## If Composer not found, try to add known directories to $PATH
|
||||
if (( ! $+commands[composer] )); then
|
||||
[[ -d "$HOME/.composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.composer/vendor/bin"
|
||||
[[ -d "$HOME/.config/composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.config/composer/vendor/bin"
|
||||
|
||||
_retrieve_cache composer
|
||||
|
||||
if [[ -z $__composer_bin_dir ]]; then
|
||||
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
|
||||
_store_cache composer __composer_bin_dir
|
||||
fi
|
||||
|
||||
# Add Composer's global binaries to PATH
|
||||
export PATH="$PATH:$__composer_bin_dir"
|
||||
|
||||
unset __composer_bin_dir
|
||||
else
|
||||
[ -d $HOME/.composer/vendor/bin ] && export PATH=$PATH:$HOME/.composer/vendor/bin
|
||||
[ -d $HOME/.config/composer/vendor/bin ] && export PATH=$PATH:$HOME/.config/composer/vendor/bin
|
||||
# If still not found, don't do the rest of the script
|
||||
(( $+commands[composer] )) || return 0
|
||||
fi
|
||||
|
||||
|
||||
## Add Composer's global binaries to PATH
|
||||
autoload -Uz _store_cache _retrieve_cache _cache_invalid
|
||||
_retrieve_cache composer
|
||||
|
||||
if [[ -z $__composer_bin_dir ]]; then
|
||||
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
|
||||
_store_cache composer __composer_bin_dir
|
||||
fi
|
||||
|
||||
# Add Composer's global binaries to PATH
|
||||
export PATH="$PATH:$__composer_bin_dir"
|
||||
|
||||
unset __composer_bin_dir
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
# `copybuffer` plugin
|
||||
|
||||
This plugin binds the ctrl-o keyboard shortcut to a command that copies the text
|
||||
that is currently typed in the command line ($BUFFER) to the system clipboard.
|
||||
This plugin adds the <kbd>ctrl-o</kbd> keyboard shortcut to copy the current text
|
||||
in the command line to the system clipboard.
|
||||
|
||||
This is useful if you type a command - and before you hit enter to execute it - want
|
||||
to copy it maybe so you can paste it into a script, gist or whatnot.
|
||||
|
|
|
|||
|
|
@ -5,10 +5,12 @@ copybuffer () {
|
|||
if which clipcopy &>/dev/null; then
|
||||
printf "%s" "$BUFFER" | clipcopy
|
||||
else
|
||||
echo "clipcopy function not found. Please make sure you have Oh My Zsh installed correctly."
|
||||
zle -M "clipcopy not found. Please make sure you have Oh My Zsh installed correctly."
|
||||
fi
|
||||
}
|
||||
|
||||
zle -N copybuffer
|
||||
|
||||
bindkey "^O" copybuffer
|
||||
bindkey -M emacs "^O" copybuffer
|
||||
bindkey -M viins "^O" copybuffer
|
||||
bindkey -M vicmd "^O" copybuffer
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
# copydir plugin
|
||||
|
||||
Copies the path of your current folder to the system clipboard.
|
||||
|
||||
To use, add `copydir` to your plugins array:
|
||||
```
|
||||
plugins=(... copydir)
|
||||
```
|
||||
|
||||
Then use the command `copydir` to copy the $PWD.
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
# Copies the pathname of the current directory to the system or X Windows clipboard
|
||||
function copydir {
|
||||
emulate -L zsh
|
||||
print -n $PWD | clipcopy
|
||||
}
|
||||
15
plugins/copypath/README.md
Normal file
15
plugins/copypath/README.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# copypath plugin
|
||||
|
||||
Copies the path of given directory or file to the system clipboard.
|
||||
|
||||
To use it, add `copypath` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... copypath)
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- `copypath`: copies the absolute path of the current directory.
|
||||
|
||||
- `copypath <file_or_directory>`: copies the absolute path of the given file.
|
||||
15
plugins/copypath/copypath.plugin.zsh
Normal file
15
plugins/copypath/copypath.plugin.zsh
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# Copies the path of given directory or file to the system or X Windows clipboard.
|
||||
# Copy current directory if no parameter.
|
||||
function copypath {
|
||||
# If no argument passed, use current directory
|
||||
local file="${1:-.}"
|
||||
|
||||
# If argument is not an absolute path, prepend $PWD
|
||||
[[ $file = /* ]] || file="$PWD/$file"
|
||||
|
||||
# Copy the absolute path without resolving symlinks
|
||||
# If clipcopy fails, exit the function with an error
|
||||
print -n "${file:a}" | clipcopy || return 1
|
||||
|
||||
echo ${(%):-"%B${file:a}%b copied to clipboard."}
|
||||
}
|
||||
|
|
@ -25,7 +25,7 @@ The enabled options for rsync are:
|
|||
|
||||
* `-hhh`: outputs numbers in human-readable format, in units of 1024 (K, M, G, T).
|
||||
|
||||
* `--backup-dir=/tmp/rsync`: move backup copies to "/tmp/rsync".
|
||||
* `--backup-dir="/tmp/rsync-$USERNAME"`: move backup copies to "/tmp/rsync-$USERNAME".
|
||||
|
||||
* `-e /dev/null`: only work on local files (disable remote shells).
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
cpv() {
|
||||
rsync -pogbr -hhh --backup-dir=/tmp/rsync -e /dev/null --progress "$@"
|
||||
rsync -pogbr -hhh --backup-dir="/tmp/rsync-${USERNAME}" -e /dev/null --progress "$@"
|
||||
}
|
||||
compdef _files cpv
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ arguments=(
|
|||
'--reinstall[Reinstall the distribution even if you already have the latest version installed]'
|
||||
'--interactive[Turn on interactive configure]'
|
||||
|
||||
'--scandeps[Scan the depencencies of given modules and output the tree in a text format]'
|
||||
'--scandeps[Scan the dependencies of given modules and output the tree in a text format]'
|
||||
'--format[Specify what format to display the scanned dependency tree]:scandeps format:(tree json yaml dists)'
|
||||
|
||||
'--save-dists[Specify the optional directory path to copy downloaded tarballs]'
|
||||
|
|
|
|||
|
|
@ -1,80 +1,84 @@
|
|||
# Usage: dash [keyword:]query
|
||||
dash() { open dash://"$*" }
|
||||
dash() { open -a Dash.app dash://"$*" }
|
||||
compdef _dash dash
|
||||
|
||||
_dash() {
|
||||
# No sense doing this for anything except the 2nd position and if we haven't
|
||||
# specified which docset to query against
|
||||
if [[ $CURRENT -eq 2 && ! "$words[2]" =~ ":" ]]; then
|
||||
local -a _all_docsets
|
||||
_all_docsets=()
|
||||
# Use defaults to get the array of docsets from preferences
|
||||
# Have to smash it into one big line so that each docset is an element of
|
||||
# our DOCSETS array
|
||||
DOCSETS=("${(@f)$(defaults read com.kapeli.dashdoc docsets | tr -d '\n' | grep -oE '\{.*?\}')}")
|
||||
if [[ $CURRENT -ne 2 || "$words[2]" =~ ":" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# remove all newlines since defaults prints so pretty like
|
||||
# Now get each docset and output each on their own line
|
||||
for doc in "$DOCSETS[@]"; do
|
||||
# Only output docsets that are actually enabled
|
||||
if [[ "`echo $doc | grep -Eo \"isEnabled = .*?;\" | sed 's/[^01]//g'`" == "0" ]]; then
|
||||
local -aU docsets
|
||||
docsets=()
|
||||
|
||||
# Use defaults to get the array of docsets from preferences
|
||||
# Have to smash it into one big line so that each docset is an element of our docsets array
|
||||
# Only output docsets that are actually enabled
|
||||
local -a enabled_docsets
|
||||
enabled_docsets=("${(@f)$(defaults read com.kapeli.dashdoc docsets \
|
||||
| tr -d '\n' | grep -oE '\{.*?\}' | grep -E 'isEnabled = 1;')}")
|
||||
|
||||
local docset name keyword
|
||||
# Now get each docset and output each on their own line
|
||||
for docset in "$enabled_docsets[@]"; do
|
||||
keyword=''
|
||||
# Order of preference as explained to me by @kapeli via email
|
||||
for locator in keyword suggestedKeyword platform; do
|
||||
# Echo the docset, try to find the appropriate keyword
|
||||
# Strip doublequotes and colon from any keyword so that everything has the
|
||||
# same format when output (we'll add the colon in the completion)
|
||||
if [[ "$docset" =~ "$locator = ([^;]*);" ]]; then
|
||||
keyword="${match[1]//[\":]}"
|
||||
fi
|
||||
|
||||
if [[ -z "$keyword" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
keyword=''
|
||||
|
||||
# Order of preference as explained to me by @kapeli via email
|
||||
KEYWORD_LOCATORS=(keyword suggestedKeyword platform)
|
||||
for locator in "$KEYWORD_LOCATORS[@]"; do
|
||||
# Echo the docset, try to find the appropriate keyword
|
||||
# Strip doublequotes and colon from any keyword so that everything has the
|
||||
# same format when output (we'll add the colon in the completion)
|
||||
keyword=`echo $doc | grep -Eo "$locator = .*?;" | sed -e "s/$locator = \(.*\);/\1/" -e "s/[\":]//g"`
|
||||
if [[ ! -z "$keyword" ]]; then
|
||||
# if we fall back to platform, we should do some checking per @kapeli
|
||||
if [[ "$locator" == "platform" ]]; then
|
||||
# Since these are the only special cases right now, let's not do the
|
||||
# expensive processing unless we have to
|
||||
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
|
||||
docsetName=`echo $doc | grep -Eo "docsetName = .*?;" | sed -e "s/docsetName = \(.*\);/\1/" -e "s/[\":]//g"`
|
||||
case "$keyword" in
|
||||
python)
|
||||
case "$docsetName" in
|
||||
"Python 2") keyword="python2" ;;
|
||||
"Python 3") keyword="python3" ;;
|
||||
esac ;;
|
||||
java)
|
||||
case "$docsetName" in
|
||||
"Java SE7") keyword="java7" ;;
|
||||
"Java SE6") keyword="java6" ;;
|
||||
"Java SE8") keyword="java8" ;;
|
||||
esac ;;
|
||||
qt)
|
||||
case "$docsetName" in
|
||||
"Qt 5") keyword="qt5" ;;
|
||||
"Qt 4"|Qt) keyword="qt4" ;;
|
||||
esac ;;
|
||||
cocos2d)
|
||||
case "$docsetName" in
|
||||
Cocos3D) keyword="cocos3d" ;;
|
||||
esac ;;
|
||||
esac
|
||||
fi
|
||||
# if we fall back to platform, we should do some checking per @kapeli
|
||||
if [[ "$locator" == "platform" ]]; then
|
||||
# Since these are the only special cases right now, let's not do the
|
||||
# expensive processing unless we have to
|
||||
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
|
||||
if [[ "$docset" =~ "docsetName = ([^;]*);" ]]; then
|
||||
name="${match[1]//[\":]}"
|
||||
case "$keyword" in
|
||||
python)
|
||||
case "$name" in
|
||||
"Python 2") keyword="python2" ;;
|
||||
"Python 3") keyword="python3" ;;
|
||||
esac ;;
|
||||
java)
|
||||
case "$name" in
|
||||
"Java SE7") keyword="java7" ;;
|
||||
"Java SE6") keyword="java6" ;;
|
||||
"Java SE8") keyword="java8" ;;
|
||||
esac ;;
|
||||
qt)
|
||||
case "$name" in
|
||||
"Qt 5") keyword="qt5" ;;
|
||||
"Qt 4"|Qt) keyword="qt4" ;;
|
||||
esac ;;
|
||||
cocos2d)
|
||||
case "$name" in
|
||||
Cocos3D) keyword="cocos3d" ;;
|
||||
esac ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Bail once we have a match
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If we have a keyword, add it to the list!
|
||||
if [[ ! -z "$keyword" ]]; then
|
||||
_all_docsets+=($keyword)
|
||||
fi
|
||||
|
||||
# Bail once we have a match
|
||||
break
|
||||
done
|
||||
|
||||
# special thanks to [arx] on #zsh for getting me sorted on this piece
|
||||
compadd -qS: -- "$_all_docsets[@]"
|
||||
return
|
||||
fi
|
||||
# If we have a keyword, add it to the list!
|
||||
if [[ -n "$keyword" ]]; then
|
||||
docsets+=($keyword)
|
||||
fi
|
||||
done
|
||||
|
||||
# special thanks to [arx] on #zsh for getting me sorted on this piece
|
||||
compadd -qS: -- "$docsets[@]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ plugins=(... debian)
|
|||
|
||||
## Settings
|
||||
|
||||
- `$apt_pref`: use apt or aptitude if installed, fallback is apt-get.
|
||||
- `$apt_pref`: use aptitude or apt if installed, fallback is apt-get.
|
||||
- `$apt_upgr`: use upgrade or safe-upgrade (for aptitude).
|
||||
|
||||
Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh My Zsh) to override this behavior.
|
||||
|
|
@ -30,30 +30,32 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
|
|||
|
||||
## Superuser Operations Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
| -------- | -------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- |
|
||||
| `aac` | `sudo $apt_pref autoclean` | Clears out the local repository of retrieved package files |
|
||||
| `abd` | `sudo $apt_pref build-dep` | Installs all dependencies for building packages |
|
||||
| `ac` | `sudo $apt_pref clean` | Clears out the local repository of retrieved package files except lock files |
|
||||
| `ad` | `sudo $apt_pref update` | Updates the package lists for upgrades for packages |
|
||||
| `adg` | `sudo $apt_pref update && sudo $apt_pref $apt_upgr` | Update and upgrade packages |
|
||||
| `adu` | `sudo $apt_pref update && sudo $apt_pref dist-upgrade` | Smart upgrade that handles dependencies |
|
||||
| `afu` | `sudo apt-file update` | Update the files in packages |
|
||||
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
|
||||
| `ai` | `sudo $apt_pref install` | Command-line tool to install package |
|
||||
| `ail` | `sed -e 's/ */ /g' -e 's/ *//' \| cut -s -d ' ' -f 1 \| xargs sudo $apt_pref install` | Install all packages given on the command line while using only the first word of each line |
|
||||
| `ap` | `sudo $apt_pref purge` | Removes packages along with configuration files |
|
||||
| `ar` | `sudo $apt_pref remove` | Removes packages, keeps the configuration files |
|
||||
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
|
||||
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
|
||||
| `di` | `sudo dpkg -i` | Install all .deb files in the current directory |
|
||||
| `kclean` | `sudo aptitude remove -P ?and(~i~nlinux-(ima\|hea) ?not(~n$(uname -r)))` | Remove ALL kernel images and headers EXCEPT the one in use |
|
||||
| Alias | Command | Description |
|
||||
| -------- | ------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- |
|
||||
| `aac` | `sudo $apt_pref autoclean` | Clears out the local repository of retrieved package files |
|
||||
| `aar` | `sudo $apt_pref autoremove` | Removes packages installed automatically that are no longer needed |
|
||||
| `abd` | `sudo $apt_pref build-dep` | Installs all dependencies for building packages |
|
||||
| `ac` | `sudo $apt_pref clean` | Clears out the local repository of retrieved package files except lock files |
|
||||
| `ad` | `sudo $apt_pref update` | Updates the package lists for upgrades for packages |
|
||||
| `adg` | `sudo $apt_pref update && sudo $apt_pref $apt_upgr` | Update and upgrade packages |
|
||||
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
|
||||
| `adu` | `sudo $apt_pref update && sudo $apt_pref dist-upgrade` | Smart upgrade that handles dependencies |
|
||||
| `afu` | `sudo apt-file update` | Update the files in packages |
|
||||
| `ai` | `sudo $apt_pref install` | Command-line tool to install package |
|
||||
| `ail` | `sed -e 's/ */ /g' -e 's/ *//' \| cut -s -d ' ' -f 1 \| xargs sudo $apt_pref install` | Install all packages given on the command line while using only the first word of each line |
|
||||
| `alu` | `sudo apt update && apt list -u && sudo apt upgrade` | Update, list and upgrade packages |
|
||||
| `ap` | `sudo $apt_pref purge` | Removes packages along with configuration files |
|
||||
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
|
||||
| `di` | `sudo dpkg -i` | Install all .deb files in the current directory |
|
||||
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
|
||||
| `kclean` | `sudo aptitude remove -P ?and(~i~nlinux-(ima\|hea) ?not(~n$(uname -r)))` | Remove ALL kernel images and headers EXCEPT the one in use |
|
||||
|
||||
## Aliases - Commands using `su`
|
||||
|
||||
| Alias | Command |
|
||||
| ----- | --------------------------------------------------------- |
|
||||
| `aac` | `su -ls "$apt_pref autoclean" root` |
|
||||
| `aar` | `su -ls "$apt_pref autoremove" root` |
|
||||
| `ac` | `su -ls "$apt_pref clean" root` |
|
||||
| `ad` | `su -lc "$apt_pref update" root` |
|
||||
| `adg` | `su -lc "$apt_pref update && aptitude $apt_upgr" root` |
|
||||
|
|
@ -75,8 +77,8 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
|
|||
| ------------------- | --------------------------------------------------------------- |
|
||||
| `apt-copy` | Create a simple script that can be used to 'duplicate' a system |
|
||||
| `apt-history` | Displays apt history for a command |
|
||||
| `kerndeb` | Builds kernel packages |
|
||||
| `apt-list-packages` | List packages by size |
|
||||
| `kerndeb` | Builds kernel packages |
|
||||
|
||||
## Authors
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
# Use apt or aptitude if installed, fallback is apt-get
|
||||
# Use aptitude or apt if installed, fallback is apt-get
|
||||
# You can just set apt_pref='apt-get' to override it.
|
||||
|
||||
if [[ -z $apt_pref || -z $apt_upgr ]]; then
|
||||
if [[ -e $commands[apt] ]]; then
|
||||
apt_pref='apt'
|
||||
apt_upgr='upgrade'
|
||||
elif [[ -e $commands[aptitude] ]]; then
|
||||
if [[ -e $commands[aptitude] ]]; then
|
||||
apt_pref='aptitude'
|
||||
apt_upgr='safe-upgrade'
|
||||
elif [[ -e $commands[apt] ]]; then
|
||||
apt_pref='apt'
|
||||
apt_upgr='upgrade'
|
||||
else
|
||||
apt_pref='apt-get'
|
||||
apt_upgr='upgrade'
|
||||
|
|
@ -52,13 +52,17 @@ if [[ $use_sudo -eq 1 ]]; then
|
|||
alias ai="sudo $apt_pref install"
|
||||
# Install all packages given on the command line while using only the first word of each line:
|
||||
# acs ... | ail
|
||||
|
||||
alias ail="sed -e 's/ */ /g' -e 's/ *//' | cut -s -d ' ' -f 1 | xargs sudo $apt_pref install"
|
||||
alias ap="sudo $apt_pref purge"
|
||||
alias ar="sudo $apt_pref remove"
|
||||
alias aar="sudo $apt_pref autoremove"
|
||||
|
||||
# apt-get only
|
||||
alias ads="sudo apt-get dselect-upgrade"
|
||||
|
||||
# apt only
|
||||
alias alu="sudo apt update && apt list -u && sudo apt upgrade"
|
||||
|
||||
# Install all .deb files in the current directory.
|
||||
# Warning: you will need to put the glob in single quotes if you use:
|
||||
# glob_subst
|
||||
|
|
@ -66,7 +70,7 @@ if [[ $use_sudo -eq 1 ]]; then
|
|||
alias di="sudo dpkg -i"
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='sudo aptitude remove -P ?and(~i~nlinux-(ima|hea) ?not(~n$(uname -r)))'
|
||||
alias kclean='sudo aptitude remove -P "?and(~i~nlinux-(ima|hea) ?not(~n$(uname -r)))"'
|
||||
|
||||
|
||||
# commands using su #########
|
||||
|
|
@ -84,28 +88,27 @@ else
|
|||
alias afu="su -lc '$apt-file update'"
|
||||
alias au="su -lc '$apt_pref $apt_upgr' root"
|
||||
function ai() {
|
||||
cmd="su -lc 'aptitude -P install $@' root"
|
||||
cmd="su -lc '$apt_pref install $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
function ap() {
|
||||
cmd="su -lc '$apt_pref -P purge $@' root"
|
||||
cmd="su -lc '$apt_pref purge $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
function ar() {
|
||||
cmd="su -lc '$apt_pref -P remove $@' root"
|
||||
function aar() {
|
||||
cmd="su -lc '$apt_pref autoremove $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
|
||||
# Install all .deb files in the current directory
|
||||
# Assumes glob_subst is off
|
||||
alias dia='su -lc "dpkg -i ./*.deb" root'
|
||||
alias di='su -lc "dpkg -i" root'
|
||||
|
||||
# Remove ALL kernel images and headers EXCEPT the one in use
|
||||
alias kclean='su -lc "aptitude remove -P ?and(~i~nlinux-(ima|hea) ?not(~n$(uname -r)))" root'
|
||||
alias kclean='su -lc "aptitude remove -P \"?and(~i~nlinux-(ima|hea) ?not(~n$(uname -r)))\"" root'
|
||||
fi
|
||||
|
||||
# Completion ################################################################
|
||||
|
|
@ -138,7 +141,7 @@ apt_pref_compdef au "$apt_upgr"
|
|||
apt_pref_compdef ai "install"
|
||||
apt_pref_compdef ail "install"
|
||||
apt_pref_compdef ap "purge"
|
||||
apt_pref_compdef ar "remove"
|
||||
apt_pref_compdef aar "autoremove"
|
||||
apt_pref_compdef ads "dselect-upgrade"
|
||||
|
||||
# Misc. #####################################################################
|
||||
|
|
|
|||
19
plugins/deno/README.md
Normal file
19
plugins/deno/README.md
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
# Deno Plugin
|
||||
|
||||
This plugin sets up completion and aliases for [Deno](https://deno.land).
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Full command |
|
||||
| ----- | ---------------- |
|
||||
| db | deno bundle |
|
||||
| dc | deno compile |
|
||||
| dca | deno cache |
|
||||
| dfmt | deno fmt |
|
||||
| dh | deno help |
|
||||
| dli | deno lint |
|
||||
| drn | deno run |
|
||||
| drA | deno run -A |
|
||||
| drw | deno run --watch |
|
||||
| dts | deno test |
|
||||
| dup | deno upgrade |
|
||||
27
plugins/deno/deno.plugin.zsh
Normal file
27
plugins/deno/deno.plugin.zsh
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
# ALIASES
|
||||
alias db='deno bundle'
|
||||
alias dc='deno compile'
|
||||
alias dca='deno cache'
|
||||
alias dfmt='deno fmt'
|
||||
alias dh='deno help'
|
||||
alias dli='deno lint'
|
||||
alias drn='deno run'
|
||||
alias drA='deno run -A'
|
||||
alias drw='deno run --watch'
|
||||
alias dts='deno test'
|
||||
alias dup='deno upgrade'
|
||||
|
||||
# COMPLETION FUNCTION
|
||||
if (( ! $+commands[deno] )); then
|
||||
return
|
||||
fi
|
||||
|
||||
# If the completion file doesn't exist yet, we need to autoload it and
|
||||
# bind it to `deno`. Otherwise, compinit will have already done that.
|
||||
if [[ ! -f "$ZSH_CACHE_DIR/completions/_deno" ]]; then
|
||||
typeset -g -A _comps
|
||||
autoload -Uz _deno
|
||||
_comps[deno]=_deno
|
||||
fi
|
||||
|
||||
deno completions zsh >| "$ZSH_CACHE_DIR/completions/_deno" &|
|
||||
|
|
@ -12,16 +12,27 @@ plugins=(... dirhistory)
|
|||
|
||||
| Shortcut | Description |
|
||||
|-----------------------------------|-----------------------------------------------------------|
|
||||
| <kbd>alt</kbd> + <kbd>left</kbd> | Go to previous directory |
|
||||
| <kbd>alt</kbd> + <kbd>right</kbd> | Undo <kbd>alt</kbd> + <kbd>left</kbd> |
|
||||
| <kbd>alt</kbd> + <kbd>up</kbd> | Move into the parent directory |
|
||||
| <kbd>alt</kbd> + <kbd>down</kbd> | Move into the first child directory by alphabetical order |
|
||||
| <kbd>Alt</kbd> + <kbd>Left</kbd> | Go to previous directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Right</kbd> | Go to next directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Up</kbd> | Move into the parent directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Down</kbd> | Move into the first child directory by alphabetical order |
|
||||
|
||||
**For macOS: use the Option key (<kbd>⌥</kbd>) instead of <kbd>Alt</kbd>**.
|
||||
|
||||
> NOTE: some terminals might override the <kbd>Alt</kbd> + Arrows key bindings (e.g. Windows Terminal).
|
||||
> If these don't work check your terminal settings and change them to a different keyboard shortcut.
|
||||
|
||||
## Usage
|
||||
|
||||
This plugin allows you to navigate the history of previous current-working-directories using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT. MAC users may alternately use OPT-LEFT and OPT-RIGHT.
|
||||
This plugin allows you to navigate the history of previous working directories using <kbd>Alt</kbd> + <kbd>Left</kbd>
|
||||
and <kbd>Alt</kbd> + <kbd>Right</kbd>. <kbd>Alt</kbd> + <kbd>Left</kbd> moves to past directories, and
|
||||
<kbd>Alt</kbd> + <kbd>Right</kbd> goes back to recent directories.
|
||||
|
||||
Also, navigate directory **hierarchy** using ALT-UP and ALT-DOWN. (mac keybindings not yet implemented). ALT-UP moves to higher hierarchy (shortcut for 'cd ..'). ALT-DOWN moves into the first directory found in alphabetical order (useful to navigate long empty directories e.g. java packages)
|
||||
**NOTE: the maximum directory history size is 30.**
|
||||
|
||||
You can also navigate **directory hierarchies** using <kbd>Alt</kbd> + <kbd>Up</kbd> and <kbd>Alt</kbd> + <kbd>Down</kbd>.
|
||||
<kbd>Alt</kbd> + <kbd>Up</kbd> moves to the parent directory, while <kbd>Alt</kbd> + <kbd>Down</kbd> moves into the first
|
||||
child directory found in alphabetical order (useful to navigate long empty directories, e.g. Java packages).
|
||||
|
||||
For example, if the shell was started, and the following commands were entered:
|
||||
|
||||
|
|
@ -32,8 +43,20 @@ cd share
|
|||
cd doc
|
||||
```
|
||||
|
||||
Then entering ALT-LEFT at the prompt would change directory from /usr/share/doc to /usr/share, then if pressed again to /usr/, then ~. If ALT-RIGHT were pressed the directory would be changed to /usr/ again.
|
||||
the directory stack (`dirs -v`) would look like this:
|
||||
|
||||
After that, ALT-DOWN will probably go to /usr/bin (depends on your /usr structure), ALT-UP will return to /usr, then ALT-UP will get you to /
|
||||
```console
|
||||
$ dirs -v
|
||||
0 /usr/share/doc
|
||||
1 /usr/share
|
||||
2 /usr
|
||||
3 ~
|
||||
```
|
||||
|
||||
**Currently the max history size is 30**. The navigation should work for xterm, PuTTY xterm mode, GNU screen, and on MAC with alternate keys as mentioned above.
|
||||
then entering <kbd>Alt</kbd> + <kbd>Left</kbd> at the prompt would change directory from `/usr/share/doc` to `/usr/share`,
|
||||
then if pressed again to `/usr`, then `~`. If <kbd>Alt</kbd> + <kbd>Right</kbd> were pressed the directory would be changed
|
||||
to `/usr` again.
|
||||
|
||||
After that, <kbd>Alt</kbd> + <kbd>Down</kbd> will probably go to `/usr/bin` if `bin` is the first directory in alphabetical
|
||||
order (depends on your `/usr` folder structure). <kbd>Alt</kbd> + <kbd>Up</kbd> will return to `/usr`, and once more will get
|
||||
you to the root folder (`/`).
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
##
|
||||
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
|
||||
##
|
||||
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
|
||||
# that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT.
|
||||
#
|
||||
# Navigate directory hierarchy using ALT-UP and ALT-DOWN. (mac keybindings not yet implemented)
|
||||
#
|
||||
# Navigate directory hierarchy using ALT-UP and ALT-DOWN.
|
||||
# ALT-UP moves to higher hierarchy (cd ..)
|
||||
# ALT-DOWN moves into the first directory found in alphabetical order
|
||||
#
|
||||
|
|
@ -14,25 +14,25 @@ export dirhistory_future
|
|||
|
||||
export DIRHISTORY_SIZE=30
|
||||
|
||||
# Pop the last element of dirhistory_past.
|
||||
# Pass the name of the variable to return the result in.
|
||||
# Pop the last element of dirhistory_past.
|
||||
# Pass the name of the variable to return the result in.
|
||||
# Returns the element if the array was not empty,
|
||||
# otherwise returns empty string.
|
||||
function pop_past() {
|
||||
eval "$1='$dirhistory_past[$#dirhistory_past]'"
|
||||
typeset -g $1="${dirhistory_past[$#dirhistory_past]}"
|
||||
if [[ $#dirhistory_past -gt 0 ]]; then
|
||||
dirhistory_past[$#dirhistory_past]=()
|
||||
fi
|
||||
}
|
||||
|
||||
function pop_future() {
|
||||
eval "$1='$dirhistory_future[$#dirhistory_future]'"
|
||||
typeset -g $1="${dirhistory_future[$#dirhistory_future]}"
|
||||
if [[ $#dirhistory_future -gt 0 ]]; then
|
||||
dirhistory_future[$#dirhistory_future]=()
|
||||
fi
|
||||
}
|
||||
|
||||
# Push a new element onto the end of dirhistory_past. If the size of the array
|
||||
# Push a new element onto the end of dirhistory_past. If the size of the array
|
||||
# is >= DIRHISTORY_SIZE, the array is shifted
|
||||
function push_past() {
|
||||
if [[ $#dirhistory_past -ge $DIRHISTORY_SIZE ]]; then
|
||||
|
|
@ -76,7 +76,7 @@ function dirhistory_back() {
|
|||
local d=""
|
||||
# Last element in dirhistory_past is the cwd.
|
||||
|
||||
pop_past cw
|
||||
pop_past cw
|
||||
if [[ "" == "$cw" ]]; then
|
||||
# Someone overwrote our variable. Recover it.
|
||||
dirhistory_past=($PWD)
|
||||
|
|
@ -108,44 +108,56 @@ function dirhistory_forward() {
|
|||
# Bind keys to history navigation
|
||||
function dirhistory_zle_dirhistory_back() {
|
||||
# Erase current line in buffer
|
||||
zle kill-buffer
|
||||
dirhistory_back
|
||||
zle accept-line
|
||||
zle .kill-buffer
|
||||
dirhistory_back
|
||||
zle .accept-line
|
||||
}
|
||||
|
||||
function dirhistory_zle_dirhistory_future() {
|
||||
# Erase current line in buffer
|
||||
zle kill-buffer
|
||||
zle .kill-buffer
|
||||
dirhistory_forward
|
||||
zle accept-line
|
||||
zle .accept-line
|
||||
}
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_back
|
||||
# xterm in normal mode
|
||||
bindkey "\e[3D" dirhistory_zle_dirhistory_back
|
||||
bindkey "\e[1;3D" dirhistory_zle_dirhistory_back
|
||||
# Mac teminal (alt+left/right)
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[b" dirhistory_zle_dirhistory_back
|
||||
fi
|
||||
# Putty:
|
||||
bindkey "\e\e[D" dirhistory_zle_dirhistory_back
|
||||
# GNU screen:
|
||||
bindkey "\eO3D" dirhistory_zle_dirhistory_back
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[3C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[1;3C" dirhistory_zle_dirhistory_future
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[f" dirhistory_zle_dirhistory_future
|
||||
fi
|
||||
bindkey "\e\e[C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\eO3C" dirhistory_zle_dirhistory_future
|
||||
|
||||
for keymap in emacs vicmd viins; do
|
||||
# dirhistory_back
|
||||
bindkey -M $keymap "\e[3D" dirhistory_zle_dirhistory_back # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3D" dirhistory_zle_dirhistory_back # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[D" dirhistory_zle_dirhistory_back # Putty
|
||||
bindkey -M $keymap "\eO3D" dirhistory_zle_dirhistory_back # GNU screen
|
||||
|
||||
#
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[b" dirhistory_zle_dirhistory_back ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[D" dirhistory_zle_dirhistory_back ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcub1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcub1]}" dirhistory_zle_dirhistory_back # urxvt
|
||||
fi
|
||||
|
||||
# dirhistory_future
|
||||
bindkey -M $keymap "\e[3C" dirhistory_zle_dirhistory_future # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3C" dirhistory_zle_dirhistory_future # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[C" dirhistory_zle_dirhistory_future # Putty
|
||||
bindkey -M $keymap "\eO3C" dirhistory_zle_dirhistory_future # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[f" dirhistory_zle_dirhistory_future ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[C" dirhistory_zle_dirhistory_future ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcuf1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcuf1]}" dirhistory_zle_dirhistory_future # urxvt
|
||||
fi
|
||||
done
|
||||
|
||||
#
|
||||
# HIERARCHY Implemented in this section, in case someone wants to split it to another plugin if it clashes bindings
|
||||
#
|
||||
#
|
||||
|
||||
# Move up in hierarchy
|
||||
function dirhistory_up() {
|
||||
|
|
@ -160,32 +172,50 @@ function dirhistory_down() {
|
|||
|
||||
# Bind keys to hierarchy navigation
|
||||
function dirhistory_zle_dirhistory_up() {
|
||||
zle kill-buffer # Erase current line in buffer
|
||||
zle .kill-buffer # Erase current line in buffer
|
||||
dirhistory_up
|
||||
zle accept-line
|
||||
zle .accept-line
|
||||
}
|
||||
|
||||
function dirhistory_zle_dirhistory_down() {
|
||||
zle kill-buffer # Erase current line in buffer
|
||||
zle .kill-buffer # Erase current line in buffer
|
||||
dirhistory_down
|
||||
zle accept-line
|
||||
zle .accept-line
|
||||
}
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_up
|
||||
# xterm in normal mode
|
||||
bindkey "\e[3A" dirhistory_zle_dirhistory_up
|
||||
bindkey "\e[1;3A" dirhistory_zle_dirhistory_up
|
||||
# Mac teminal (alt+up)
|
||||
#bindkey "^[?" dirhistory_zle_dirhistory_up #dont know it
|
||||
# Putty:
|
||||
bindkey "\e\e[A" dirhistory_zle_dirhistory_up
|
||||
# GNU screen:
|
||||
bindkey "\eO3A" dirhistory_zle_dirhistory_up
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[3B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[1;3B" dirhistory_zle_dirhistory_down
|
||||
# Mac teminal (alt+down)
|
||||
#bindkey "^[?" dirhistory_zle_dirhistory_down #dont know it
|
||||
bindkey "\e\e[B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\eO3B" dirhistory_zle_dirhistory_down
|
||||
|
||||
for keymap in emacs vicmd viins; do
|
||||
# dirhistory_up
|
||||
bindkey -M $keymap "\e[3A" dirhistory_zle_dirhistory_up # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3A" dirhistory_zle_dirhistory_up # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[A" dirhistory_zle_dirhistory_up # Putty
|
||||
bindkey -M $keymap "\eO3A" dirhistory_zle_dirhistory_up # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[[A" dirhistory_zle_dirhistory_up ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[A" dirhistory_zle_dirhistory_up ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcuu1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcuu1]}" dirhistory_zle_dirhistory_up # urxvt
|
||||
fi
|
||||
|
||||
# dirhistory_down
|
||||
bindkey -M $keymap "\e[3B" dirhistory_zle_dirhistory_down # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3B" dirhistory_zle_dirhistory_down # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[B" dirhistory_zle_dirhistory_down # Putty
|
||||
bindkey -M $keymap "\eO3B" dirhistory_zle_dirhistory_down # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[[B" dirhistory_zle_dirhistory_down ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[B" dirhistory_zle_dirhistory_down ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcud1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcud1]}" dirhistory_zle_dirhistory_down # urxvt
|
||||
fi
|
||||
done
|
||||
|
||||
unset keymap
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
# Django plugin
|
||||
|
||||
This plugin adds completion and hints for the [Django Project](https://www.djangoproject.com/) `manage.py` commands
|
||||
and options.
|
||||
|
||||
To use it, add `django` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... django)
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```zsh
|
||||
$> python manage.py (press <TAB> here)
|
||||
```
|
||||
|
||||
Would result in:
|
||||
|
||||
```zsh
|
||||
cleanup -- remove old data from the database
|
||||
compilemessages -- compile .po files to .mo for use with gettext
|
||||
createcachetable -- creates table for SQL cache backend
|
||||
createsuperuser -- create a superuser
|
||||
dbshell -- run command-line client for the current database
|
||||
diffsettings -- display differences between the current settings and Django defaults
|
||||
dumpdata -- output contents of database as a fixture
|
||||
flush -- execute 'sqlflush' on the current database
|
||||
inspectdb -- output Django model module for tables in database
|
||||
loaddata -- install the named fixture(s) in the database
|
||||
makemessages -- pull out all strings marked for translation
|
||||
reset -- executes 'sqlreset' for the given app(s)
|
||||
runfcgi -- run this project as a fastcgi
|
||||
runserver -- start a lightweight web server for development
|
||||
...
|
||||
```
|
||||
|
|
@ -1,404 +0,0 @@
|
|||
#compdef manage.py
|
||||
|
||||
typeset -ga nul_args
|
||||
nul_args=(
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))'
|
||||
'--settings=-[the Python path to a settings module.]:file:_files'
|
||||
'--pythonpath=-[a directory to add to the Python path.]:directory:_directories'
|
||||
'--traceback[print traceback on exception.]'
|
||||
"--no-color[Don't colorize the command output.]"
|
||||
"--version[show program's version number and exit.]"
|
||||
{-h,--help}'[show this help message and exit.]'
|
||||
)
|
||||
|
||||
typeset -ga start_args
|
||||
start_args=(
|
||||
'--template=-[The path or URL to load the template from.]:directory:_directories'
|
||||
'--extension=-[The file extension(s) to render (default: "py").]'
|
||||
'--name=-[The file name(s) to render.]:file:_files'
|
||||
)
|
||||
|
||||
typeset -ga db_args
|
||||
db_args=(
|
||||
'--database=-[Nominates a database. Defaults to the "default" database.]'
|
||||
)
|
||||
|
||||
typeset -ga noinput_args
|
||||
noinput_args=(
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]'
|
||||
)
|
||||
|
||||
typeset -ga no_init_data_args
|
||||
no_init_data_args=(
|
||||
'--no-initial-data[Tells Django not to load any initial data after database synchronization.]'
|
||||
)
|
||||
|
||||
typeset -ga tag_args
|
||||
tag_args=(
|
||||
'--tag=-[Run only checks labeled with given tag.]'
|
||||
'--list-tags[List available tags.]'
|
||||
)
|
||||
|
||||
_managepy-check(){
|
||||
_arguments -s : \
|
||||
$tag_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-changepassword(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-createcachetable(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-createsuperuser(){
|
||||
_arguments -s : \
|
||||
'--username=-[Specifies the login for the superuser.]' \
|
||||
'--email=-[Specifies the email for the superuser.]' \
|
||||
$noinput_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-collectstatic(){
|
||||
_arguments -s : \
|
||||
'--link[Create a symbolic link to each file instead of copying.]' \
|
||||
'--no-post-process[Do NOT post process collected files.]' \
|
||||
'--ignore=-[Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.]' \
|
||||
'--dry-run[Do everything except modify the filesystem.]' \
|
||||
'--clear[Clear the existing files using the storage before trying to copy or link the original file.]' \
|
||||
'--link[Create a symbolic link to each file instead of copying.]' \
|
||||
'--no-default-ignore[Do not ignore the common private glob-style patterns "CVS", ".*" and "*~".]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dbshell(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-diffsettings(){
|
||||
_arguments -s : \
|
||||
"--all[Display all settings, regardless of their value.]"
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dumpdata(){
|
||||
_arguments -s : \
|
||||
'--format=-[Specifies the output serialization format for fixtures.]:format:(json yaml xml)' \
|
||||
'--indent=-[Specifies the indent level to use when pretty-printing output.]' \
|
||||
'--exclude=-[An app_label or app_label.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).]' \
|
||||
'--natural-foreign[Use natural foreign keys if they are available.]' \
|
||||
'--natural-primary[Use natural primary keys if they are available.]' \
|
||||
"--all[Use Django's base manager to dump all models stored in the database.]" \
|
||||
'--pks=-[Only dump objects with given primary keys.]' \
|
||||
$db_args \
|
||||
$nul_args \
|
||||
'*::appname:_applist' && ret=0
|
||||
}
|
||||
|
||||
_managepy-flush(){
|
||||
_arguments -s : \
|
||||
$no_init_data_args \
|
||||
$db_args \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-help(){
|
||||
_arguments -s : \
|
||||
'*:command:_managepy_cmds' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy_cmds(){
|
||||
local line
|
||||
local -a cmd
|
||||
_call_program help-command ./manage.py help \
|
||||
|& sed -n '/^ /s/[(), ]/ /gp' \
|
||||
| while read -A line; do cmd=($line $cmd) done
|
||||
_describe -t managepy-command 'manage.py command' cmd
|
||||
}
|
||||
|
||||
_managepy-inspectdb(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-loaddata(){
|
||||
_arguments -s : \
|
||||
'--ignorenonexistent[Ignores entries in the serialized data for fields that do not currently exist on the model.]' \
|
||||
'--app=-[Only look for fixtures in the specified app.]:appname:_applist' \
|
||||
'*::file:_files' \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-makemessages(){
|
||||
_arguments -s : \
|
||||
'--locale=-[Creates or updates the message files for the given locale(s) (e.g. pt_BR).]' \
|
||||
'--domain=-[The domain of the message files (default: "django").]' \
|
||||
'--all[Updates the message files for all existing locales.]' \
|
||||
'--extension=-[The file extension(s) to examine (default: "html,txt", or "js" if the domain is "djangojs").]' \
|
||||
'--symlinks[Follows symlinks to directories when examining source code and templates for translation strings.]' \
|
||||
'--ignore=-[Ignore files or directories matching this glob-style pattern.]' \
|
||||
"--no-default-ignore[Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.]" \
|
||||
"--no-wrap[Don't break long message lines into several lines.]" \
|
||||
"--no-location[Don't write '#: filename:line' lines.]" \
|
||||
'--no-obsolete[Remove obsolete message strings.]' \
|
||||
'--keep-pot[Keep .pot file after making messages.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-makemigrations(){
|
||||
_arguments -s : \
|
||||
'--dry-run[Just show what migrations would be made]' \
|
||||
'--merge[Enable fixing of migration conflicts.]' \
|
||||
'--empty[Create an empty migration.]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-migrate(){
|
||||
_arguments -s : \
|
||||
'--fake[Mark migrations as run without actually running them]' \
|
||||
'--list[Show a list of all known migrations and which are applied]' \
|
||||
$no_init_data_args \
|
||||
$noinput_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-runfcgi(){
|
||||
local state
|
||||
|
||||
local fcgi_opts
|
||||
fcgi_opts=(
|
||||
'protocol[fcgi, scgi, ajp, ... (default fcgi)]:protocol:(fcgi scgi ajp)'
|
||||
'host[hostname to listen on..]:'
|
||||
'port[port to listen on.]:'
|
||||
'socket[UNIX socket to listen on.]:file:_files'
|
||||
'method[prefork or threaded (default prefork)]:method:(prefork threaded)'
|
||||
'maxrequests[number of requests a child handles before it is killed and a new child is forked (0 = no limit).]:'
|
||||
'maxspare[max number of spare processes / threads.]:'
|
||||
'minspare[min number of spare processes / threads.]:'
|
||||
'maxchildren[hard limit number of processes / threads.]:'
|
||||
'daemonize[whether to detach from terminal.]:boolean:(False True)'
|
||||
'pidfile[write the spawned process-id to this file.]:file:_files'
|
||||
'workdir[change to this directory when daemonizing.]:directory:_files'
|
||||
'outlog[write stdout to this file.]:file:_files'
|
||||
'errlog[write stderr to this file.]:file:_files'
|
||||
)
|
||||
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*: :_values "FCGI Setting" $fcgi_opts' && ret=0
|
||||
}
|
||||
|
||||
_managepy-runserver(){
|
||||
_arguments -s : \
|
||||
'--ipv6[Tells Django to use an IPv6 address.]' \
|
||||
'--nothreading[Tells Django to NOT use threading.]' \
|
||||
'--noreload[Tells Django to NOT use the auto-reloader.]' \
|
||||
'--nostatic[Tells Django to NOT automatically serve static files at STATIC_URL.]' \
|
||||
'--insecure[Allows serving static files even if DEBUG is False.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-shell(){
|
||||
_arguments -s : \
|
||||
'--plain[Tells Django to use plain Python, not IPython.]' \
|
||||
'--no-startup[When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.]' \
|
||||
'--interface=-[Specify an interactive interpreter interface.]:INTERFACE:((ipython bpython))' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sql(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlall(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlclear(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlcustom(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dropindexes(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlflush(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlindexes(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlinitialdata(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlsequencereset(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-squashmigrations(){
|
||||
_arguments -s : \
|
||||
'--no-optimize[Do not try to optimize the squashed operations.]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-startapp(){
|
||||
_arguments -s : \
|
||||
$start_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-startproject(){
|
||||
_arguments -s : \
|
||||
$start_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-syncdb() {
|
||||
_arguments -s : \
|
||||
$noinput_args \
|
||||
$no_init_data_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-test() {
|
||||
_arguments -s : \
|
||||
'--failfast[Tells Django to stop running the test suite after first failed test.]' \
|
||||
'--testrunner=-[Tells Django to use specified test runner class instead of the one specified by the TEST_RUNNER setting.]' \
|
||||
'--liveserver=-[Overrides the default address where the live server (used with LiveServerTestCase) is expected to run from. The default value is localhost:8081.]' \
|
||||
'--top-level-directory=-[Top level of project for unittest discovery.]' \
|
||||
'--pattern=-[The test matching pattern. Defaults to test*.py.]:' \
|
||||
$noinput_args \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-testserver() {
|
||||
_arguments -s : \
|
||||
'--addrport=-[port number or ipaddr:port to run the server on.]' \
|
||||
'--ipv6[Tells Django to use an IPv6 address.]' \
|
||||
$noinput_args \
|
||||
'*::fixture:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-validate() {
|
||||
_arguments -s : \
|
||||
$tag_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-commands() {
|
||||
local -a commands
|
||||
|
||||
commands=(
|
||||
"changepassword:Change a user's password for django.contrib.auth."
|
||||
'check:Checks the entire Django project for potential problems.'
|
||||
'compilemessages:Compiles .po files to .mo files for use with builtin gettext support.'
|
||||
'createcachetable:Creates the table needed to use the SQL cache backend.'
|
||||
'createsuperuser:Used to create a superuser.'
|
||||
'collectstatic:Collect static files in a single location.'
|
||||
'dbshell:Runs the command-line client for the current DATABASE_ENGINE.'
|
||||
"diffsettings:Displays differences between the current settings.py and Django's default settings."
|
||||
'dumpdata:Output the contents of the database as a fixture of the given format.'
|
||||
'flush:Executes ``sqlflush`` on the current database.'
|
||||
'help:manage.py help.'
|
||||
'inspectdb:Introspects the database tables in the given database and outputs a Django model module.'
|
||||
'loaddata:Installs the named fixture(s) in the database.'
|
||||
'makemessages:Runs over the entire source tree of the current directory and pulls out all strings marked for translation.'
|
||||
'makemigrations:Creates new migration(s) for apps.'
|
||||
'migrate:Updates database schema. Manages both apps with migrations and those without.'
|
||||
'runfcgi:Run this project as a fastcgi (or some other protocol supported by flup) application,'
|
||||
'runserver:Starts a lightweight Web server for development.'
|
||||
'shell:Runs a Python interactive interpreter.'
|
||||
'showmigrations:Shows all available migrations for the current project.'
|
||||
'sql:Prints the CREATE TABLE SQL statements for the given app name(s).'
|
||||
'sqlall:Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).'
|
||||
'sqlclear:Prints the DROP TABLE SQL statements for the given app name(s).'
|
||||
'sqlcustom:Prints the custom table modifying SQL statements for the given app name(s).'
|
||||
'sqldropindexes:Prints the DROP INDEX SQL statements for the given model module name(s).'
|
||||
'sqlflush:Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed.'
|
||||
'sqlindexes:Prints the CREATE INDEX SQL statements for the given model module name(s).'
|
||||
"sqlinitialdata:RENAMED: see 'sqlcustom'"
|
||||
'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).'
|
||||
'squashmigrations:Squashes an existing set of migrations (from first until specified) into a single new one.'
|
||||
"startapp:Creates a Django app directory structure for the given app name in this project's directory."
|
||||
"startproject:Creates a Django project directory structure for the given project name in this current directory."
|
||||
"syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
|
||||
'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.'
|
||||
'testserver:Runs a development server with data from the given fixture(s).'
|
||||
'validate:Validates all installed models.'
|
||||
)
|
||||
|
||||
_describe -t commands 'manage.py command' commands && ret=0
|
||||
}
|
||||
|
||||
_applist() {
|
||||
local line
|
||||
local -a apps
|
||||
_call_program help-command "python -c \"import sys; del sys.path[0];\\
|
||||
import os.path as op, re, django.conf;\\
|
||||
bn=op.basename(op.abspath(op.curdir));[sys\\
|
||||
.stdout.write(str(re.sub(r'^%s\.(.*?)$' %
|
||||
bn, r'\1', i)) + '\n') for i in django.conf.settings.\\
|
||||
INSTALLED_APPS if re.match(r'^%s' % bn, i)]\"" \
|
||||
| while read -A line; do apps=($line $apps) done
|
||||
_values 'Application' $apps && ret=0
|
||||
}
|
||||
|
||||
_managepy() {
|
||||
local curcontext=$curcontext ret=1
|
||||
|
||||
if ((CURRENT == 2)); then
|
||||
_managepy-commands
|
||||
else
|
||||
shift words
|
||||
(( CURRENT -- ))
|
||||
curcontext="${curcontext%:*:*}:managepy-$words[1]:"
|
||||
_call_function ret _managepy-$words[1]
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _managepy manage.py
|
||||
compdef _managepy django
|
||||
compdef _managepy django-admin
|
||||
compdef _managepy django-admin.py
|
||||
compdef _managepy django-manage
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
# Dnote Plugin
|
||||
|
||||
This plugin adds auto-completion for [Dnote](https://dnote.io) project.
|
||||
This plugin adds auto-completion for [Dnote](https://www.getdnote.com/), a simple command line notebook.
|
||||
|
||||
To use it, add `dnote` to the plugins array in your zshrc file:
|
||||
|
||||
|
|
|
|||
|
|
@ -11,21 +11,22 @@ plugins=(... docker-compose)
|
|||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-----------|--------------------------|------------------------------------------------------------------|
|
||||
| dco | `docker-compose` | Docker-compose main command |
|
||||
| dcb | `docker-compose build` | Build containers |
|
||||
| dce | `docker-compose exec` | Execute command inside a container |
|
||||
| dcps | `docker-compose ps` | List containers |
|
||||
| dcrestart | `docker-compose restart` | Restart container |
|
||||
| dcrm | `docker-compose rm` | Remove container |
|
||||
| dcr | `docker-compose run` | Run a command in container |
|
||||
| dcstop | `docker-compose stop` | Stop a container |
|
||||
| dcup | `docker-compose up` | Build, (re)create, start, and attach to containers for a service |
|
||||
| dcupd | `docker-compose up -d` | Same as `dcup`, but starts as daemon |
|
||||
| dcdn | `docker-compose down` | Stop and remove containers |
|
||||
| dcl | `docker-compose logs` | Show logs of container |
|
||||
| dclf | `docker-compose logs -f` | Show logs and follow output |
|
||||
| dcpull | `docker-compose pull` | Pull image of a service |
|
||||
| dcstart | `docker-compose start` | Start a container |
|
||||
| dck | `docker-compose kill` | Kills containers |
|
||||
| Alias | Command | Description |
|
||||
|-----------|--------------------------------|------------------------------------------------------------------|
|
||||
| dco | `docker-compose` | Docker-compose main command |
|
||||
| dcb | `docker-compose build` | Build containers |
|
||||
| dce | `docker-compose exec` | Execute command inside a container |
|
||||
| dcps | `docker-compose ps` | List containers |
|
||||
| dcrestart | `docker-compose restart` | Restart container |
|
||||
| dcrm | `docker-compose rm` | Remove container |
|
||||
| dcr | `docker-compose run` | Run a command in container |
|
||||
| dcstop | `docker-compose stop` | Stop a container |
|
||||
| dcup | `docker-compose up` | Build, (re)create, start, and attach to containers for a service |
|
||||
| dcupb | `docker-compose up --build` | Same as `dcup`, but build images before starting containers |
|
||||
| dcupd | `docker-compose up -d` | Same as `dcup`, but starts as daemon |
|
||||
| dcdn | `docker-compose down` | Stop and remove containers |
|
||||
| dcl | `docker-compose logs` | Show logs of container |
|
||||
| dclf | `docker-compose logs -f` | Show logs and follow output |
|
||||
| dcpull | `docker-compose pull` | Pull image of a service |
|
||||
| dcstart | `docker-compose start` | Start a container |
|
||||
| dck | `docker-compose kill` | Kills containers |
|
||||
|
|
|
|||
|
|
@ -121,12 +121,6 @@ __docker-compose_subcommand() {
|
|||
'--parallel[Build images in parallel.]' \
|
||||
'*:services:__docker-compose_services_from_build' && ret=0
|
||||
;;
|
||||
(bundle)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
'--push-images[Automatically push images for any services which have a `build` option specified.]' \
|
||||
'(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
|
||||
;;
|
||||
(config)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
|
|
@ -290,7 +284,7 @@ __docker-compose_subcommand() {
|
|||
(up)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \
|
||||
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \
|
||||
$opts_no_color \
|
||||
$opts_no_deps \
|
||||
$opts_force_recreate \
|
||||
|
|
@ -298,6 +292,7 @@ __docker-compose_subcommand() {
|
|||
$opts_no_build \
|
||||
"(--no-build)--build[Build images before starting containers.]" \
|
||||
"(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
|
||||
"(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \
|
||||
'(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
|
||||
'--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
|
||||
'--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \
|
||||
|
|
@ -341,11 +336,13 @@ _docker-compose() {
|
|||
'(- :)'{-h,--help}'[Get help]' \
|
||||
'*'{-f,--file}"[${file_description}]:file:_files -g '*.yml'" \
|
||||
'(-p --project-name)'{-p,--project-name}'[Specify an alternate project name (default: directory name)]:project name:' \
|
||||
'--env-file[Specify an alternate environment file (default: .env)]:env-file:_files' \
|
||||
"--compatibility[If set, Compose will attempt to convert keys in v3 files to their non-Swarm equivalent]" \
|
||||
'(- :)'{-v,--version}'[Print version and exit]' \
|
||||
'--verbose[Show more output]' \
|
||||
'--log-level=[Set log level]:level:(DEBUG INFO WARNING ERROR CRITICAL)' \
|
||||
'--no-ansi[Do not print ANSI control characters]' \
|
||||
'--ansi=[Control when to print ANSI control characters]:when:(never always auto)' \
|
||||
'(-H --host)'{-H,--host}'[Daemon socket to connect to]:host:' \
|
||||
'--tls[Use TLS; implied by --tlsverify]' \
|
||||
'--tlscacert=[Trust certs signed only by this CA]:ca path:' \
|
||||
|
|
@ -359,6 +356,7 @@ _docker-compose() {
|
|||
local -a relevant_compose_flags relevant_compose_repeatable_flags relevant_docker_flags compose_options docker_options
|
||||
|
||||
relevant_compose_flags=(
|
||||
"--env-file"
|
||||
"--file" "-f"
|
||||
"--host" "-H"
|
||||
"--project-name" "-p"
|
||||
|
|
|
|||
|
|
@ -1,27 +1,22 @@
|
|||
# Authors:
|
||||
# https://github.com/tristola
|
||||
#
|
||||
# Docker-compose related zsh aliases
|
||||
# support Compose v2 as docker CLI plugin
|
||||
(( ${+commands[docker-compose]} )) && dccmd='docker-compose' || dccmd='docker compose'
|
||||
|
||||
# Aliases ###################################################################
|
||||
alias dco="$dccmd"
|
||||
alias dcb="$dccmd build"
|
||||
alias dce="$dccmd exec"
|
||||
alias dcps="$dccmd ps"
|
||||
alias dcrestart="$dccmd restart"
|
||||
alias dcrm="$dccmd rm"
|
||||
alias dcr="$dccmd run"
|
||||
alias dcstop="$dccmd stop"
|
||||
alias dcup="$dccmd up"
|
||||
alias dcupb="$dccmd up --build"
|
||||
alias dcupd="$dccmd up -d"
|
||||
alias dcdn="$dccmd down"
|
||||
alias dcl="$dccmd logs"
|
||||
alias dclf="$dccmd logs -f"
|
||||
alias dcpull="$dccmd pull"
|
||||
alias dcstart="$dccmd start"
|
||||
alias dck="$dccmd kill"
|
||||
|
||||
# Use dco as alias for docker-compose, since dc on *nix is 'dc - an arbitrary precision calculator'
|
||||
# https://www.gnu.org/software/bc/manual/dc-1.05/html_mono/dc.html
|
||||
|
||||
alias dco='docker-compose'
|
||||
|
||||
alias dcb='docker-compose build'
|
||||
alias dce='docker-compose exec'
|
||||
alias dcps='docker-compose ps'
|
||||
alias dcrestart='docker-compose restart'
|
||||
alias dcrm='docker-compose rm'
|
||||
alias dcr='docker-compose run'
|
||||
alias dcstop='docker-compose stop'
|
||||
alias dcup='docker-compose up'
|
||||
alias dcupd='docker-compose up -d'
|
||||
alias dcdn='docker-compose down'
|
||||
alias dcl='docker-compose logs'
|
||||
alias dclf='docker-compose logs -f'
|
||||
alias dcpull='docker-compose pull'
|
||||
alias dcstart='docker-compose start'
|
||||
alias dck='docker-compose kill'
|
||||
unset dccmd
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ __docker-machine_filters() {
|
|||
}
|
||||
|
||||
__get_swarm_discovery() {
|
||||
declare -a masters serivces
|
||||
declare -a masters services
|
||||
local service
|
||||
services=()
|
||||
masters=($(docker-machine ls -f {{.Swarm}} |grep '(master)' |awk '{print $1}'))
|
||||
|
|
@ -169,7 +169,7 @@ __get_create_argument() {
|
|||
__docker-machine_subcommand() {
|
||||
local -a opts_help
|
||||
opts_help=("(- :)--help[Print usage]")
|
||||
local -a opts_only_host opts_driver opts_storage_driver opts_stragery
|
||||
local -a opts_only_host opts_driver opts_storage_driver opts_state
|
||||
opts_only_host=(
|
||||
"$opts_help"
|
||||
"*:host:__docker-machine_hosts_all"
|
||||
|
|
@ -330,7 +330,7 @@ _docker-machine() {
|
|||
_arguments -C \
|
||||
"(- :)"{-h,--help}"[Show help]" \
|
||||
"(-D --debug)"{-D,--debug}"[Enable debug mode]" \
|
||||
'(-s --stroage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
|
||||
'(-s --storage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
|
||||
'--tls-ca-cert[CA to verify remotes against]:file:_files' \
|
||||
'--tls-ca-key[Private key to generate certificates]:file:_files' \
|
||||
'--tls-client-cert[Client cert to use for TLS]:file:_files' \
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Docker plugin
|
||||
|
||||
This plugin adds auto-completion for [docker](https://www.docker.com/).
|
||||
This plugin adds auto-completion and aliases for [docker](https://www.docker.com/).
|
||||
|
||||
To use it add `docker` to the plugins array in your zshrc file.
|
||||
|
||||
|
|
@ -28,7 +28,46 @@ the lines below to your zshrc file**, but be aware of the side effects:
|
|||
>
|
||||
> Therefore, this behavior is disabled by default. To enable it:
|
||||
>
|
||||
> ```
|
||||
> ```sh
|
||||
> zstyle ':completion:*:*:docker:*' option-stacking yes
|
||||
> zstyle ':completion:*:*:docker-*:*' option-stacking yes
|
||||
> ```
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
| :------ | :-------------------------- | :--------------------------------------------------------------------------------------- |
|
||||
| dbl | `docker build` | Build an image from a Dockerfile |
|
||||
| dcin | `docker container inspect` | Display detailed information on one or more containers |
|
||||
| dlo | `docker container logs` | Fetch the logs of a docker container |
|
||||
| dcls | `docker container ls` | List all the running docker containers |
|
||||
| dclsa | `docker container ls -a` | List all running and stopped containers |
|
||||
| dpo | `docker container port` | List port mappings or a specific mapping for the container |
|
||||
| dpu | `docker pull` | Pull an image or a repository from a registry |
|
||||
| dr | `docker container run` | Create a new container and start it using the specified command |
|
||||
| drit | `docker container run -it` | Create a new container and start it in an interactive shell |
|
||||
| drm | `docker container rm` | Remove the specified container(s) |
|
||||
| drm! | `docker container rm -f` | Force the removal of a running container (uses SIGKILL) |
|
||||
| dst | `docker container start` | Start one or more stopped containers |
|
||||
| dstp | `docker container stop` | Stop one or more running containers |
|
||||
| dtop | `docker top` | Display the running processes of a container |
|
||||
| dxc | `docker container exec` | Run a new command in a running container |
|
||||
| dxcit | `docker container exec -it` | Run a new command in a running container in an interactive shell |
|
||||
| | | **Docker Images** |
|
||||
| dib | `docker image build` | Build an image from a Dockerfile (same as docker build) |
|
||||
| dii | `docker image inspect` | Display detailed information on one or more images |
|
||||
| dils | `docker image ls` | List docker images |
|
||||
| dipu | `docker image push` | Push an image or repository to a remote registry |
|
||||
| dirm | `docker image rm` | Remove one or more images |
|
||||
| dit | `docker image tag` | Add a name and tag to a particular image |
|
||||
| | | **Docker Network** |
|
||||
| dnc | `docker network create` | Create a new network |
|
||||
| dncn | `docker network connect` | Connect a container to a network |
|
||||
| dndcn | `docker network disconnect` | Disconnect a container from a network |
|
||||
| dni | `docker network inspect` | Return information about one or more networks |
|
||||
| dnls | `docker network ls` | List all networks the engine daemon knows about, including those spanning multiple hosts |
|
||||
| dnrm | `docker network rm` | Remove one or more networks |
|
||||
| | | **Docker Volume** |
|
||||
| dvi | `docker volume inspect` | Display detailed information about one or more volumes |
|
||||
| dvls | `docker volume ls` | List all the volumes known to docker |
|
||||
| dvprune | `docker volume prune` | Cleanup dangling volumes |
|
||||
|
|
|
|||
|
|
@ -624,7 +624,8 @@ __docker_container_subcommand() {
|
|||
"($help)--entrypoint=[Overwrite the default entrypoint of the image]:entry point: "
|
||||
"($help)*--env-file=[Read environment variables from a file]:environment file:_files"
|
||||
"($help)*--expose=[Expose a port from the container without publishing it]: "
|
||||
"($help)*--group=[Set one or more supplementary user groups for the container]:group:_groups"
|
||||
"($help)*--gpus=[GPU devices to add to the container ('all' to pass all GPUs)]:device: "
|
||||
"($help)*--group-add=[Set one or more supplementary user groups for the container]:group:_groups"
|
||||
"($help -h --hostname)"{-h=,--hostname=}"[Container host name]:hostname:_hosts"
|
||||
"($help -i --interactive)"{-i,--interactive}"[Keep stdin open even if not attached]"
|
||||
"($help)--init[Run an init inside the container that forwards signals and reaps processes]"
|
||||
|
|
@ -750,6 +751,7 @@ __docker_container_subcommand() {
|
|||
$opts_attach_exec_run_start \
|
||||
"($help -d --detach)"{-d,--detach}"[Detached mode: leave the container running in the background]" \
|
||||
"($help)*"{-e=,--env=}"[Set environment variables]:environment variable: " \
|
||||
"($help)*--env-file=[Read environment variables from a file]:environment file:_files" \
|
||||
"($help -i --interactive)"{-i,--interactive}"[Keep stdin open even if not attached]" \
|
||||
"($help)--privileged[Give extended Linux capabilities to the command]" \
|
||||
"($help -t --tty)"{-t,--tty}"[Allocate a pseudo-tty]" \
|
||||
|
|
@ -791,7 +793,7 @@ __docker_container_subcommand() {
|
|||
"($help -f --follow)"{-f,--follow}"[Follow log output]" \
|
||||
"($help -s --since)"{-s=,--since=}"[Show logs since this timestamp]:timestamp: " \
|
||||
"($help -t --timestamps)"{-t,--timestamps}"[Show timestamps]" \
|
||||
"($help)--tail=[Output the last K lines]:lines:(1 10 20 50 all)" \
|
||||
"($help -n --tail)"{-n=,--tail=}"[Number of lines to show from the end of the logs]:lines:(1 10 20 50 all)" \
|
||||
"($help -)*:containers:__docker_complete_containers" && ret=0
|
||||
;;
|
||||
(ls|list)
|
||||
|
|
@ -1018,6 +1020,7 @@ __docker_image_subcommand() {
|
|||
"($help)*--shm-size=[Size of '/dev/shm' (format is '<number><unit>')]:shm size: " \
|
||||
"($help)--squash[Squash newly built layers into a single new layer]" \
|
||||
"($help -t --tag)*"{-t=,--tag=}"[Repository, name and tag for the image]: :__docker_complete_repositories_with_tags" \
|
||||
"($help)--target=[Set the target build stage to build.]" \
|
||||
"($help)*--ulimit=[ulimit options]:ulimit: " \
|
||||
"($help)--userns=[Container user namespace]:user namespace:(host)" \
|
||||
"($help -):path or URL:_directories" && ret=0
|
||||
|
|
@ -1340,7 +1343,7 @@ __docker_node_complete_ls_filters() {
|
|||
;;
|
||||
esac
|
||||
else
|
||||
opts=('id' 'label' 'membership' 'name' 'role')
|
||||
opts=('id' 'label' 'membership' 'name' 'node.label' 'role')
|
||||
_describe -t filter-opts "filter options" opts -qS "=" && ret=0
|
||||
fi
|
||||
|
||||
|
|
@ -1958,6 +1961,8 @@ __docker_service_subcommand() {
|
|||
|
||||
opts_help=("(: -)--help[Print usage]")
|
||||
opts_create_update=(
|
||||
"($help)*--cap-add=[Add Linux capabilities]:capability: "
|
||||
"($help)*--cap-drop=[Drop Linux capabilities]:capability: "
|
||||
"($help)*--constraint=[Placement constraints]:constraint: "
|
||||
"($help)--endpoint-mode=[Placement constraints]:mode:(dnsrr vip)"
|
||||
"($help)*"{-e=,--env=}"[Set environment variables]:env: "
|
||||
|
|
@ -1970,6 +1975,7 @@ __docker_service_subcommand() {
|
|||
"($help)*--label=[Service labels]:label: "
|
||||
"($help)--limit-cpu=[Limit CPUs]:value: "
|
||||
"($help)--limit-memory=[Limit Memory]:value: "
|
||||
"($help)--limit-pids[Limit maximum number of processes (default 0 = unlimited)]"
|
||||
"($help)--log-driver=[Logging driver for service]:logging driver:__docker_complete_log_drivers"
|
||||
"($help)*--log-opt=[Logging driver options]:log driver options:__docker_complete_log_options"
|
||||
"($help)*--mount=[Attach a filesystem mount to the service]:mount: "
|
||||
|
|
@ -2012,6 +2018,7 @@ __docker_service_subcommand() {
|
|||
"($help)*--dns-option=[Set DNS options]:DNS option: " \
|
||||
"($help)*--dns-search=[Set custom DNS search domains]:DNS search: " \
|
||||
"($help)*--env-file=[Read environment variables from a file]:environment file:_files" \
|
||||
"($help)*--group=[Set one or more supplementary user groups for the container]:group: _groups " \
|
||||
"($help)--mode=[Service Mode]:mode:(global replicated)" \
|
||||
"($help)--name=[Service name]:name: " \
|
||||
"($help)*--placement-pref=[Add a placement preference]:pref:__docker_service_complete_placement_pref" \
|
||||
|
|
@ -2035,7 +2042,7 @@ __docker_service_subcommand() {
|
|||
"($help)--no-task-ids[Do not include task IDs]" \
|
||||
"($help)--no-trunc[Do not truncate output]" \
|
||||
"($help)--since=[Show logs since timestamp]:timestamp: " \
|
||||
"($help)--tail=[Number of lines to show from the end of the logs]:lines:(1 10 20 50 all)" \
|
||||
"($help -n --tail)"{-n=,--tail=}"[Number of lines to show from the end of the logs]:lines:(1 10 20 50 all)" \
|
||||
"($help -t --timestamps)"{-t,--timestamps}"[Show timestamps]" \
|
||||
"($help -)1:service:__docker_complete_services" && ret=0
|
||||
;;
|
||||
|
|
@ -2537,6 +2544,82 @@ __docker_volume_subcommand() {
|
|||
|
||||
# EO volume
|
||||
|
||||
# BO context
|
||||
|
||||
__docker_complete_contexts() {
|
||||
[[ $PREFIX = -* ]] && return 1
|
||||
integer ret=1
|
||||
declare -a contexts
|
||||
|
||||
contexts=(${(f)${:-"$(_call_program commands docker $docker_options context ls -q)"$'\n'}})
|
||||
|
||||
_describe -t context-list "context" contexts && ret=0
|
||||
return ret
|
||||
}
|
||||
|
||||
__docker_context_commands() {
|
||||
local -a _docker_context_subcommands
|
||||
_docker_context_subcommands=(
|
||||
"create:Create new context"
|
||||
"inspect:Display detailed information on one or more contexts"
|
||||
"list:List available contexts"
|
||||
"rm:Remove one or more contexts"
|
||||
"show:Print the current context"
|
||||
"update:Update a context"
|
||||
"use:Set the default context"
|
||||
)
|
||||
_describe -t docker-context-commands "docker context command" _docker_context_subcommands
|
||||
}
|
||||
|
||||
__docker_context_subcommand() {
|
||||
local -a _command_args opts_help
|
||||
local expl help="--help"
|
||||
integer ret=1
|
||||
|
||||
opts_help=("(: -)--help[Print usage]")
|
||||
|
||||
case "$words[1]" in
|
||||
(create)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help)--from=[Create context from a named context]:from:__docker_complete_contexts" \
|
||||
"($help -):name: " && ret=0
|
||||
;;
|
||||
(use)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(inspect)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(rm)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(update)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help -):name:" && ret=0
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
# EO context
|
||||
|
||||
__docker_caching_policy() {
|
||||
oldp=( "$1"(Nmh+1) ) # 1 hour
|
||||
(( $#oldp ))
|
||||
|
|
@ -2569,7 +2652,7 @@ __docker_commands() {
|
|||
then
|
||||
local -a lines
|
||||
lines=(${(f)"$(_call_program commands docker 2>&1)"})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/ ##/:})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/\*# ##/:})
|
||||
_docker_subcommands=($_docker_subcommands 'daemon:Enable daemon mode' 'help:Show help for a command')
|
||||
(( $#_docker_subcommands > 2 )) && _store_cache docker_subcommands _docker_subcommands
|
||||
fi
|
||||
|
|
@ -2624,6 +2707,23 @@ __docker_subcommand() {
|
|||
;;
|
||||
esac
|
||||
;;
|
||||
(context)
|
||||
local curcontext="$curcontext" state
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -): :->command" \
|
||||
"($help -)*:: :->option-or-argument" && ret=0
|
||||
|
||||
case $state in
|
||||
(command)
|
||||
__docker_context_commands && ret=0
|
||||
;;
|
||||
(option-or-argument)
|
||||
curcontext=${curcontext%:*:*}:docker-${words[-1]}:
|
||||
__docker_context_subcommand && ret=0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
(daemon)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
|
|
@ -2639,6 +2739,8 @@ __docker_subcommand() {
|
|||
"($help)*--cluster-store-opt=[Cluster store options]:Cluster options:->cluster-store-options" \
|
||||
"($help)--config-file=[Path to daemon configuration file]:Config File:_files" \
|
||||
"($help)--containerd=[Path to containerd socket]:socket:_files -g \"*.sock\"" \
|
||||
"($help)--containerd-namespace=[Containerd namespace to use]:containerd namespace:" \
|
||||
"($help)--containerd-plugins-namespace=[Containerd namespace to use for plugins]:containerd namespace:" \
|
||||
"($help)--data-root=[Root directory of persisted Docker data]:path:_directories" \
|
||||
"($help -D --debug)"{-D,--debug}"[Enable debug mode]" \
|
||||
"($help)--default-gateway[Container default gateway IPv4 address]:IPv4 address: " \
|
||||
|
|
@ -2689,7 +2791,8 @@ __docker_subcommand() {
|
|||
"($help)--tlsverify[Use TLS and verify the remote]" \
|
||||
"($help)--userns-remap=[User/Group setting for user namespaces]:user\:group:->users-groups" \
|
||||
"($help)--userland-proxy[Use userland proxy for loopback traffic]" \
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" && ret=0
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" \
|
||||
"($help)--validate[Validate daemon configuration and exit]" && ret=0
|
||||
|
||||
case $state in
|
||||
(cluster-store)
|
||||
|
|
|
|||
39
plugins/docker/docker.plugin.zsh
Normal file
39
plugins/docker/docker.plugin.zsh
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
alias dbl='docker build'
|
||||
alias dpu='docker pull'
|
||||
alias dtop='docker top'
|
||||
|
||||
# docker containers
|
||||
alias dcin='docker container inspect'
|
||||
alias dlo='docker container logs'
|
||||
alias dcls='docker container ls'
|
||||
alias dclsa='docker container ls -a'
|
||||
alias dpo='docker container port'
|
||||
alias dr='docker container run'
|
||||
alias drit='docker container run -it'
|
||||
alias drm='docker container rm'
|
||||
alias 'drm!'='docker container rm -f'
|
||||
alias dst='docker container start'
|
||||
alias dstp='docker container stop'
|
||||
alias dxc='docker container exec'
|
||||
alias dxcit='docker container exec -it'
|
||||
|
||||
# docker images
|
||||
alias dib='docker image build'
|
||||
alias dii='docker image inspect'
|
||||
alias dils='docker image ls'
|
||||
alias dipu='docker image push'
|
||||
alias dirm='docker image rm'
|
||||
alias dit='docker image tag'
|
||||
|
||||
# docker network
|
||||
alias dnc='docker network create'
|
||||
alias dncn='docker network connect'
|
||||
alias dndcn='docker network disconnect'
|
||||
alias dni='docker network inspect'
|
||||
alias dnls='docker network ls'
|
||||
alias dnrm='docker network rm'
|
||||
|
||||
# docker volume
|
||||
alias dvi='docker volume inspect'
|
||||
alias dvls='docker volume ls'
|
||||
alias dvprune='docker volume prune'
|
||||
|
|
@ -11,41 +11,54 @@
|
|||
## Functions
|
||||
|
||||
source_env() {
|
||||
if [[ -f $ZSH_DOTENV_FILE ]]; then
|
||||
if [[ "$ZSH_DOTENV_PROMPT" != false ]]; then
|
||||
local confirmation dirpath="${PWD:A}"
|
||||
if [[ ! -f "$ZSH_DOTENV_FILE" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# make sure there is an (dis-)allowed file
|
||||
touch "$ZSH_DOTENV_ALLOWED_LIST"
|
||||
touch "$ZSH_DOTENV_DISALLOWED_LIST"
|
||||
if [[ "$ZSH_DOTENV_PROMPT" != false ]]; then
|
||||
local confirmation dirpath="${PWD:A}"
|
||||
|
||||
# early return if disallowed
|
||||
if grep -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
return;
|
||||
fi
|
||||
# make sure there is an (dis-)allowed file
|
||||
touch "$ZSH_DOTENV_ALLOWED_LIST"
|
||||
touch "$ZSH_DOTENV_DISALLOWED_LIST"
|
||||
|
||||
# check if current directory's .env file is allowed or ask for confirmation
|
||||
if ! grep -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
# print same-line prompt and output newline character if necessary
|
||||
echo -n "dotenv: found '$ZSH_DOTENV_FILE' file. Source it? ([Y]es/[n]o/[a]lways/n[e]ver) "
|
||||
read -k 1 confirmation; [[ "$confirmation" != $'\n' ]] && echo
|
||||
|
||||
# check input
|
||||
case "$confirmation" in
|
||||
[nN]) return ;;
|
||||
[aA]) echo "$dirpath" >> "$ZSH_DOTENV_ALLOWED_LIST" ;;
|
||||
[eE]) echo "$dirpath" >> "$ZSH_DOTENV_DISALLOWED_LIST"; return ;;
|
||||
*) ;; # interpret anything else as a yes
|
||||
esac
|
||||
fi
|
||||
# early return if disallowed
|
||||
if command grep -Fx -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
return
|
||||
fi
|
||||
|
||||
# test .env syntax
|
||||
zsh -fn $ZSH_DOTENV_FILE || echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
# check if current directory's .env file is allowed or ask for confirmation
|
||||
if ! command grep -Fx -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
# get cursor column and print new line before prompt if not at line beginning
|
||||
local column
|
||||
echo -ne "\e[6n" > /dev/tty
|
||||
read -t 1 -s -d R column < /dev/tty
|
||||
column="${column##*\[*;}"
|
||||
[[ $column -eq 1 ]] || echo
|
||||
|
||||
setopt localoptions allexport
|
||||
source $ZSH_DOTENV_FILE
|
||||
# print same-line prompt and output newline character if necessary
|
||||
echo -n "dotenv: found '$ZSH_DOTENV_FILE' file. Source it? ([Y]es/[n]o/[a]lways/n[e]ver) "
|
||||
read -k 1 confirmation
|
||||
[[ "$confirmation" = $'\n' ]] || echo
|
||||
|
||||
# check input
|
||||
case "$confirmation" in
|
||||
[nN]) return ;;
|
||||
[aA]) echo "$dirpath" >> "$ZSH_DOTENV_ALLOWED_LIST" ;;
|
||||
[eE]) echo "$dirpath" >> "$ZSH_DOTENV_DISALLOWED_LIST"; return ;;
|
||||
*) ;; # interpret anything else as a yes
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
|
||||
# test .env syntax
|
||||
zsh -fn $ZSH_DOTENV_FILE || {
|
||||
echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
return 1
|
||||
}
|
||||
|
||||
setopt localoptions allexport
|
||||
source $ZSH_DOTENV_FILE
|
||||
}
|
||||
|
||||
autoload -U add-zsh-hook
|
||||
|
|
|
|||
|
|
@ -21,3 +21,4 @@ plugins=(... dotnet)
|
|||
| da | dotnet add | Add a package or reference to a .NET project. |
|
||||
| dp | dotnet pack | Create a NuGet package. |
|
||||
| dng | dotnet nuget | Provides additional NuGet commands. |
|
||||
| db | dotnet build | Build a .NET project |
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ _dotnet_zsh_complete()
|
|||
return
|
||||
fi
|
||||
|
||||
# This is not a variable assigment, don't remove spaces!
|
||||
# This is not a variable assignment, don't remove spaces!
|
||||
_values = "${(ps:\n:)completions}"
|
||||
}
|
||||
|
||||
|
|
@ -30,3 +30,4 @@ alias ds='dotnet sln'
|
|||
alias da='dotnet add'
|
||||
alias dp='dotnet pack'
|
||||
alias dng='dotnet nuget'
|
||||
alias db='dotnet build'
|
||||
|
|
|
|||
|
|
@ -1,83 +1,66 @@
|
|||
# Drush
|
||||
|
||||
## Description
|
||||
This plugin offers aliases and functions to make the work with drush easier and more productive.
|
||||
This plugin adds aliases and functions for [Drush](https://www.drush.org), a command-line shell
|
||||
and Unix scripting interface for Drupal. It also adds completion for the `drush` command.
|
||||
|
||||
To enable it, add the `drush` to your `plugins` array in `~/.zshrc`:
|
||||
To enable it, add `drush` to the plugins array in zshrc file:
|
||||
|
||||
```
|
||||
```zsh
|
||||
plugins=(... drush)
|
||||
```
|
||||
|
||||
## Aliases
|
||||
| Alias | Description | Command |
|
||||
|-------|-----------------------------------------------------------------------|-----------------------------|
|
||||
| dr | Display drush help | drush |
|
||||
| drca | Clear all drupal caches. | drush cc all |
|
||||
| drcb | Clear block cache. | drush cc block |
|
||||
| drcg | Clear registry cache. | drush cc registry |
|
||||
| drcj | Clear css-js cache. | drush cc css-js |
|
||||
| drcm | Clear menu cache. | drush cc menu |
|
||||
| drcml | Clear module-list cache. | drush cc module-list |
|
||||
| drcr | Run all cron hooks in all active modules for specified site. | drush core-cron |
|
||||
| drct | Clear theme-registry cache. | drush cc theme-registry |
|
||||
| drcv | Clear views cache. (Make sure that the views module is enabled) | drush cc views |
|
||||
| drdmp | Backup database in a new dump.sql file | drush drush sql-dump --ordered-dump --result-file=dump.sql|
|
||||
| drf | Display features status | drush features |
|
||||
| drfr | Revert a feature module on your site. | drush features-revert -y |
|
||||
| drfu | Update a feature module on your site. | drush features-update -y |
|
||||
| drfra | Revert all enabled feature module on your site. | drush features-revert-all |
|
||||
| drif | Flush all derived images. | drush image-flush --all |
|
||||
| drpm | Show a list of available modules. | drush pm-list --type=module |
|
||||
| drst | Provides a birds-eye view of the current Drupal installation, if any. | drush core-status |
|
||||
| drup | Apply any database updates required (as with running update.php). | drush updatedb |
|
||||
| drups | List any pending database updates. | drush updatedb-status |
|
||||
| drv | Show drush version. | drush version |
|
||||
| drvd | Delete a variable. | drush variable-del |
|
||||
| drvg | Get a list of some or all site variables and values. | drush variable-get |
|
||||
| drvs | Set a variable. | drush variable-set |
|
||||
|
||||
| Alias | Command | Description |
|
||||
| ------- | ----------------------------------------------------------- | -------------------------------------------------------------------- |
|
||||
| `dr` | `drush` | Display drush help |
|
||||
| `drca` | `drush cc all` | _(Deprecated in Drush 8)_ Clear all drupal caches |
|
||||
| `drcb` | `drush cc block` | _(Deprecated in Drush 8)_ Clear block cache |
|
||||
| `drcex` | `drush config:export -y` | Export Drupal configuration to a directory |
|
||||
| `drcg` | `drush cc registry` | _(Deprecated in Drush 8)_ Clear registry cache |
|
||||
| `drcim` | `drush config:import -y` | Import config from a config directory |
|
||||
| `drcj` | `drush cc css-js` | Clear css-js cache |
|
||||
| `drcm` | `drush cc menu` | Clear menu cache |
|
||||
| `drcml` | `drush cc module-list` | Clear module-list cache |
|
||||
| `drcr` | `drush core-cron` | Run all cron hooks in all active modules for specified site |
|
||||
| `drct` | `drush cc theme-registry` | Clear theme-registry cache |
|
||||
| `drcv` | `drush cc views` | Clear views cache _(make sure that the views module is enabled)_ |
|
||||
| `drdmp` | `drush drush sql-dump --ordered-dump --result-file=dumpsql` | Backup database in a new dump.sql file |
|
||||
| `drf` | `drush features` | Display features status |
|
||||
| `drfr` | `drush features-revert -y` | Revert a feature module on your site |
|
||||
| `drfra` | `drush features-revert-all` | Revert all enabled feature module on your site |
|
||||
| `drfu` | `drush features-update -y` | Update a feature module on your site |
|
||||
| `drif` | `drush image-flush --all` | Flush all derived images |
|
||||
| `drpm` | `drush pm-list --type=module` | Show a list of available modules |
|
||||
| `drst` | `drush core-status` | Provides a birds-eye view of the current Drupal installation, if any |
|
||||
| `druli` | `drush user:login` | Display a one time login link for user ID 1, or another user |
|
||||
| `drup` | `drush updatedb` | Apply any database updates required (as with running update.php) |
|
||||
| `drups` | `drush updatedb-status` | List any pending database updates |
|
||||
| `drv` | `drush version` | Show drush version |
|
||||
| `drvd` | `drush variable-del` | Delete a variable |
|
||||
| `drvg` | `drush variable-get` | Get a list of some or all site variables and values |
|
||||
| `drvs` | `drush variable-set` | Set a variable |
|
||||
| `drws` | `drush watchdog:show` | Show watchdog messages |
|
||||
| `drwse` | `drush watchdog:show --extended` | Show watchdog messages with extended information |
|
||||
| `drwst` | `drush watchdog:tail` | Tail watchdog messages |
|
||||
|
||||
## Functions
|
||||
|
||||
### dren
|
||||
Download and enable one or more extensions (modules or themes).
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`dren devel` or `dren devel module_filter views`
|
||||
- `dren`: download and enable one or more extensions (modules or themes). Must be
|
||||
invoked with one or more parameters, e.g.: `dren devel` or `dren devel module_filter views`.
|
||||
|
||||
### drf
|
||||
Edit drushrc, site alias, and Drupal settings.php files.
|
||||
Can be invoked with one or without parameters. e.g.:
|
||||
`drf 1`
|
||||
- `drf`: edit drushrc, site alias, and Drupal settings.php files.
|
||||
Can be invoked with one or without parameters, e.g.: `drf 1`.
|
||||
|
||||
### dris
|
||||
Disable one or more extensions (modules or themes)
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`dris devel` or `dris devel module_filter views`
|
||||
- `dris`: disable one or more extensions (modules or themes). Must be invoked with
|
||||
one or more parameters, e.g.: `dris devel` or `dris devel module_filter views`.
|
||||
|
||||
### drpu
|
||||
Uninstall one or more modules.
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`drpu devel` or `drpu devel module_filter views`
|
||||
- `drpu`: uninstall one or more modules. Must be invoked with one or more
|
||||
parameters, e.g.: `drpu devel` or `drpu devel module_filter views`.
|
||||
|
||||
### drnew
|
||||
Creates a brand new drupal website.
|
||||
Note: As soon as the installation is complete, drush will print a username and a random password into the terminal:
|
||||
```
|
||||
Installation complete. User name: admin User password: cf7t8yqNEm
|
||||
```
|
||||
- `drnew`: creates a brand new drupal website. Note: as soon as the installation
|
||||
is complete, `drush` will print a username and a random password into the terminal:
|
||||
|
||||
## Additional features
|
||||
|
||||
### Autocomplete
|
||||
The [completion script for drush](https://github.com/drush-ops/drush/blob/8.0.1/drush.complete.sh) comes enabled with this plugin.
|
||||
So, it is possible to type a command:
|
||||
```
|
||||
drush sql
|
||||
```
|
||||
|
||||
And as soon as the tab key is pressed, the script will display the available commands:
|
||||
```
|
||||
drush sql
|
||||
sqlc sql-conf sql-create sql-dump sql-query sql-sanitize
|
||||
sql-cli sql-connect sql-drop sqlq sqlsan sql-sync
|
||||
```
|
||||
```text
|
||||
Installation complete. User name: admin User password: cf7t8yqNEm
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
# Drush support.
|
||||
|
||||
# Functions
|
||||
function dren() {
|
||||
drush en $@ -y
|
||||
drush en "$@" -y
|
||||
}
|
||||
|
||||
function dris() {
|
||||
drush pm-disable $@ -y
|
||||
drush pm-disable "$@" -y
|
||||
}
|
||||
|
||||
function drpu() {
|
||||
drush pm-uninstall $@ -y
|
||||
drush pm-uninstall "$@" -y
|
||||
}
|
||||
|
||||
function drf() {
|
||||
if [[ $1 == "" ]] then
|
||||
if [[ -z "$1" ]] then
|
||||
drush core-config
|
||||
else
|
||||
drush core-config --choice=$1
|
||||
|
|
@ -21,62 +20,62 @@ function drf() {
|
|||
}
|
||||
|
||||
function drfi() {
|
||||
if [[ $1 == "fields" ]]; then
|
||||
drush field-info fields
|
||||
elif [[ $1 == "types" ]]; then
|
||||
drush field-info types
|
||||
else
|
||||
drush field-info
|
||||
fi
|
||||
case "$1" in
|
||||
fields) drush field-info fields ;;
|
||||
types) drush field-info types ;;
|
||||
*) drush field-info ;;
|
||||
esac
|
||||
}
|
||||
|
||||
function drnew() {
|
||||
(
|
||||
cd
|
||||
echo "Website's name: "
|
||||
read WEBSITE_NAME
|
||||
|
||||
cd ~
|
||||
echo "Website's name: "
|
||||
read WEBSITE_NAME
|
||||
HOST=http://$(hostname -i)/
|
||||
|
||||
HOST=http://$(hostname -i)/
|
||||
if [[ $WEBSITE_NAME == "" ]] then
|
||||
MINUTES=$(date +%M:%S)
|
||||
WEBSITE_NAME="Drupal-$MINUTES"
|
||||
echo "Your website will be named: $WEBSITE_NAME"
|
||||
fi
|
||||
|
||||
if [[ $WEBSITE_NAME == "" ]] then
|
||||
MINUTES=$(date +%M:%S)
|
||||
WEBSITE_NAME="Drupal-$MINUTES"
|
||||
echo "Your website will be named: $WEBSITE_NAME"
|
||||
fi
|
||||
drush dl drupal --drupal-project-rename=$WEBSITE_NAME
|
||||
|
||||
drush dl drupal --drupal-project-rename=$WEBSITE_NAME
|
||||
echo "Type your localhost directory: (Leave empty for /var/www/html/)"
|
||||
read DIRECTORY
|
||||
|
||||
echo "Type your localhost directory: (Leave empty for /var/www/html/)"
|
||||
read DIRECTORY
|
||||
if [[ $DIRECTORY == "" ]] then
|
||||
DIRECTORY="/var/www/html/"
|
||||
fi
|
||||
|
||||
if [[ $DIRECTORY == "" ]] then
|
||||
DIRECTORY="/var/www/html/"
|
||||
fi
|
||||
echo "Moving to $DIRECTORY$WEBSITE_NAME"
|
||||
sudo mv $WEBSITE_NAME $DIRECTORY
|
||||
cd $DIRECTORY$WEBSITE_NAME
|
||||
|
||||
echo "Moving to $DIRECTORY$WEBSITE_NAME"
|
||||
sudo mv $WEBSITE_NAME $DIRECTORY
|
||||
cd $DIRECTORY$WEBSITE_NAME
|
||||
echo "Database's user: "
|
||||
read DATABASE_USR
|
||||
echo "Database's password: "
|
||||
read -s DATABASE_PWD
|
||||
echo "Database's name for your project: "
|
||||
read DATABASE
|
||||
|
||||
echo "Database's user: "
|
||||
read DATABASE_USR
|
||||
echo "Database's password: "
|
||||
read -s DATABASE_PWD
|
||||
echo "Database's name for your project: "
|
||||
read DATABASE
|
||||
|
||||
DB_URL="mysql://$DATABASE_USR:$DATABASE_PWD@localhost/$DATABASE"
|
||||
drush site-install standard --db-url=$DB_URL --site-name=$WEBSITE_NAME
|
||||
|
||||
open_command $HOST$WEBSITE_NAME
|
||||
echo "Done"
|
||||
DB_URL="mysql://$DATABASE_USR:$DATABASE_PWD@localhost/$DATABASE"
|
||||
drush site-install standard --db-url=$DB_URL --site-name=$WEBSITE_NAME
|
||||
|
||||
open_command $HOST$WEBSITE_NAME
|
||||
echo "Done"
|
||||
)
|
||||
}
|
||||
|
||||
# Aliases, sorted alphabetically.
|
||||
# Aliases
|
||||
alias dr="drush"
|
||||
alias drca="drush cc all" # Deprecated for Drush 8
|
||||
alias drcb="drush cc block" # Deprecated for Drush 8
|
||||
alias drcex="drush config:export -y"
|
||||
alias drcg="drush cc registry" # Deprecated for Drush 8
|
||||
alias drcim="drush config:import -y"
|
||||
alias drcj="drush cc css-js"
|
||||
alias drcm="drush cc menu"
|
||||
alias drcml="drush cc module-list"
|
||||
|
|
@ -86,17 +85,23 @@ alias drcv="drush cc views"
|
|||
alias drdmp="drush sql-dump --ordered-dump --result-file=dump.sql"
|
||||
alias drf="drush features"
|
||||
alias drfr="drush features-revert -y"
|
||||
alias drfu="drush features-update -y"
|
||||
alias drfra="drush features-revert-all"
|
||||
alias drfu="drush features-update -y"
|
||||
alias drif="drush image-flush --all"
|
||||
alias drpm="drush pm-list --type=module"
|
||||
alias drst="drush core-status"
|
||||
alias druli="drush user:login"
|
||||
alias drup="drush updatedb"
|
||||
alias drups="drush updatedb-status"
|
||||
alias drv="drush version"
|
||||
alias drvd="drush variable-del"
|
||||
alias drvg="drush variable-get"
|
||||
alias drvs="drush variable-set"
|
||||
alias drws="drush watchdog:show"
|
||||
alias drwse="drush watchdog:show --extended"
|
||||
alias drwst="drush watchdog:tail"
|
||||
|
||||
# Enable drush autocomplete support
|
||||
autoload bashcompinit
|
||||
bashcompinit
|
||||
source $(dirname $0)/drush.complete.sh
|
||||
|
|
|
|||
|
|
@ -9,47 +9,60 @@
|
|||
# - You can share opened buffered across opened frames.
|
||||
# - Configuration changes made at runtime are applied to all frames.
|
||||
|
||||
# Require emacs version to be minimum 24
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 24 "${${(Az)"$(emacsclient --version 2>/dev/null)"}[2]}" || return 0
|
||||
|
||||
if "$ZSH/tools/require_tool.sh" emacsclient 24 2>/dev/null ; then
|
||||
export EMACS_PLUGIN_LAUNCHER="$ZSH/plugins/emacs/emacsclient.sh"
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# set EDITOR if not already defined.
|
||||
export EDITOR="${EDITOR:-${EMACS_PLUGIN_LAUNCHER}}"
|
||||
# Path to custom emacsclient launcher
|
||||
export EMACS_PLUGIN_LAUNCHER="${0:A:h}/emacsclient.sh"
|
||||
|
||||
alias emacs="$EMACS_PLUGIN_LAUNCHER --no-wait"
|
||||
alias e=emacs
|
||||
# open terminal emacsclient
|
||||
alias te="$EMACS_PLUGIN_LAUNCHER -nw"
|
||||
# set EDITOR if not already defined.
|
||||
export EDITOR="${EDITOR:-${EMACS_PLUGIN_LAUNCHER}}"
|
||||
|
||||
# same than M-x eval but from outside Emacs.
|
||||
alias eeval="$EMACS_PLUGIN_LAUNCHER --eval"
|
||||
# create a new X frame
|
||||
alias eframe='emacsclient --alternate-editor "" --create-frame'
|
||||
alias emacs="$EMACS_PLUGIN_LAUNCHER --no-wait"
|
||||
alias e=emacs
|
||||
# open terminal emacsclient
|
||||
alias te="$EMACS_PLUGIN_LAUNCHER -nw"
|
||||
|
||||
# same than M-x eval but from outside Emacs.
|
||||
alias eeval="$EMACS_PLUGIN_LAUNCHER --eval"
|
||||
# create a new X frame
|
||||
alias eframe='emacsclient --alternate-editor "" --create-frame'
|
||||
|
||||
# Write to standard output the path to the file
|
||||
# opened in the current buffer.
|
||||
function efile {
|
||||
local cmd="(buffer-file-name (window-buffer))"
|
||||
"$EMACS_PLUGIN_LAUNCHER" --eval "$cmd" | tr -d \"
|
||||
}
|
||||
# Emacs ANSI Term tracking
|
||||
if [[ -n "$INSIDE_EMACS" ]]; then
|
||||
chpwd_emacs() { print -P "\033AnSiTc %d"; }
|
||||
print -P "\033AnSiTc %d" # Track current working directory
|
||||
print -P "\033AnSiTu %n" # Track username
|
||||
|
||||
# Write to standard output the directory of the file
|
||||
# opened in the the current buffer
|
||||
function ecd {
|
||||
local cmd="(let ((buf-name (buffer-file-name (window-buffer))))
|
||||
(if buf-name (file-name-directory buf-name)))"
|
||||
|
||||
local dir="$($EMACS_PLUGIN_LAUNCHER --eval $cmd | tr -d \")"
|
||||
if [ -n "$dir" ] ;then
|
||||
echo "$dir"
|
||||
else
|
||||
echo "can not deduce current buffer filename." >/dev/stderr
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
# add chpwd hook
|
||||
autoload -Uz add-zsh-hook
|
||||
add-zsh-hook chpwd chpwd_emacs
|
||||
fi
|
||||
|
||||
## Local Variables:
|
||||
## mode: sh
|
||||
## End:
|
||||
# Write to standard output the path to the file
|
||||
# opened in the current buffer.
|
||||
function efile {
|
||||
local cmd="(buffer-file-name (window-buffer))"
|
||||
local file="$("$EMACS_PLUGIN_LAUNCHER" --eval "$cmd" | tr -d \")"
|
||||
|
||||
if [[ -z "$file" ]]; then
|
||||
echo "Can't deduce current buffer filename." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$file"
|
||||
}
|
||||
|
||||
# Write to standard output the directory of the file
|
||||
# opened in the the current buffer
|
||||
function ecd {
|
||||
local file
|
||||
file="$(efile)" || return $?
|
||||
echo "${file:h}"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,29 +1,38 @@
|
|||
#!/bin/sh
|
||||
|
||||
_emacsfun()
|
||||
{
|
||||
# get list of emacs frames.
|
||||
frameslist=`emacsclient --alternate-editor '' --eval '(frame-list)' 2>/dev/null | egrep -o '(frame)+'`
|
||||
emacsfun() {
|
||||
local cmd frames
|
||||
|
||||
if [ "$(echo "$frameslist" | sed -n '$=')" -ge 2 ] ;then
|
||||
# prevent creating another X frame if there is at least one present.
|
||||
emacsclient --alternate-editor "" "$@"
|
||||
else
|
||||
# Create one if there is no X window yet.
|
||||
emacsclient --alternate-editor "" --create-frame "$@"
|
||||
fi
|
||||
# Build the Emacs Lisp command to check for suitable frames
|
||||
# See https://www.gnu.org/software/emacs/manual/html_node/elisp/Frames.html#index-framep
|
||||
case "$*" in
|
||||
*-t*|*--tty*|*-nw*) cmd="(memq 't (mapcar 'framep (frame-list)))" ;; # if != nil, there are tty frames
|
||||
*) cmd="(delete 't (mapcar 'framep (frame-list)))" ;; # if != nil, there are graphical terminals (x, w32, ns)
|
||||
esac
|
||||
|
||||
# Check if there are suitable frames
|
||||
frames="$(emacsclient -a '' -n -e "$cmd" 2>/dev/null |sed 's/.*\x07//g' )"
|
||||
|
||||
# Only create another X frame if there isn't one present
|
||||
if [ -z "$frames" -o "$frames" = nil ]; then
|
||||
emacsclient --alternate-editor "" --create-frame "$@"
|
||||
return $?
|
||||
fi
|
||||
|
||||
emacsclient --alternate-editor "" "$@"
|
||||
}
|
||||
|
||||
|
||||
# adopted from https://github.com/davidshepherd7/emacs-read-stdin/blob/master/emacs-read-stdin.sh
|
||||
# Adapted from https://github.com/davidshepherd7/emacs-read-stdin/blob/master/emacs-read-stdin.sh
|
||||
# If the second argument is - then write stdin to a tempfile and open the
|
||||
# tempfile. (first argument will be `--no-wait` passed in by the plugin.zsh)
|
||||
if [ "$#" -ge "2" -a "$2" = "-" ]
|
||||
then
|
||||
tempfile="$(mktemp --tmpdir emacs-stdin-$USER.XXXXXXX 2>/dev/null \
|
||||
|| mktemp -t emacs-stdin-$USER)" # support BSD mktemp
|
||||
cat - > "$tempfile"
|
||||
_emacsfun --no-wait $tempfile
|
||||
else
|
||||
_emacsfun "$@"
|
||||
if [ $# -ge 2 -a "$2" = "-" ]; then
|
||||
# Create a tempfile to hold stdin
|
||||
tempfile="$(mktemp --tmpdir emacs-stdin-$USERNAME.XXXXXXX 2>/dev/null \
|
||||
|| mktemp -t emacs-stdin-$USERNAME)" # support BSD mktemp
|
||||
# Redirect stdin to the tempfile
|
||||
cat - > "$tempfile"
|
||||
# Reset $2 to the tempfile so that "$@" works as expected
|
||||
set -- "$1" "$tempfile" "${@:3}"
|
||||
fi
|
||||
|
||||
emacsfun "$@"
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue