mirror of
https://github.com/ohmyzsh/ohmyzsh.git
synced 2026-02-06 02:51:32 +01:00
Merge branch 'master' of https://github.com/ohmyzsh/ohmyzsh
This commit is contained in:
commit
62fe9a0185
401 changed files with 43763 additions and 10538 deletions
9
plugins/1password/1password.plugin.zsh
Normal file
9
plugins/1password/1password.plugin.zsh
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Do nothing if op is not installed
|
||||
(( ${+commands[op]} )) || return
|
||||
|
||||
# Load op completion
|
||||
eval "$(op completion zsh)"
|
||||
compdef _op op
|
||||
|
||||
# Load opswd function
|
||||
autoload -Uz opswd
|
||||
38
plugins/1password/README.md
Normal file
38
plugins/1password/README.md
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
# 1Password
|
||||
|
||||
This plugin adds 1Password functionality to oh-my-zsh.
|
||||
|
||||
To use, add `1password` to the list of plugins in your `.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... 1password)
|
||||
```
|
||||
|
||||
Then, you can use the command `opswd` to copy passwords for services into your
|
||||
clipboard.
|
||||
|
||||
## `opswd`
|
||||
|
||||
The `opswd` command is a wrapper around the `op` command. It takes a service
|
||||
name as an argument and copies the password for that service to the clipboard.
|
||||
|
||||
If the service also contains a TOTP, it is copied to the clipboard after 10 seconds.
|
||||
Finally, after 20 seconds, the clipboard is cleared.
|
||||
|
||||
The function has completion support, so you can use tab completion to select
|
||||
which service you want to get.
|
||||
|
||||
For example, `opswd github.com` will put your GitHub password into your clipboard, and if
|
||||
a TOTP is available, it will be copied to the clipboard after 10 seconds.
|
||||
|
||||
> NOTE: you need to be signed in for `opswd` to work. If you are using biometric unlock,
|
||||
> 1Password CLI will automatically prompt you to sign in. See:
|
||||
>
|
||||
> - [Get started with 1Password CLI 2: Sign in](https://developer.1password.com/docs/cli/get-started#sign-in)
|
||||
> - [Sign in to your 1Password account manually](https://developer.1password.com/docs/cli/sign-in-manually)
|
||||
|
||||
## Requirements
|
||||
|
||||
- [1Password CLI 2](https://developer.1password.com/docs/cli/get-started#install)
|
||||
|
||||
> NOTE: if you're using 1Password CLI 1, [see how to upgrade to CLI 2](https://developer.1password.com/docs/cli/upgrade).
|
||||
19
plugins/1password/_opswd
Normal file
19
plugins/1password/_opswd
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
#compdef opswd
|
||||
|
||||
function _opswd() {
|
||||
local -a services
|
||||
services=("${(@f)$(op item list --categories Login --cache 2>/dev/null | awk 'NR != 1 { print $2 }')}")
|
||||
[[ -z "$services" ]] || compadd -a -- services
|
||||
}
|
||||
|
||||
# TODO: 2022-03-26: Remove support for op CLI 1
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 2.0.0 $(op --version) || {
|
||||
function _opswd() {
|
||||
local -a services
|
||||
services=("${(@f)$(op list items --categories Login 2>/dev/null | op get item - --fields title 2>/dev/null)}")
|
||||
[[ -z "$services" ]] || compadd -a -- services
|
||||
}
|
||||
}
|
||||
|
||||
_opswd "$@"
|
||||
78
plugins/1password/opswd
Normal file
78
plugins/1password/opswd
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
#autoload
|
||||
|
||||
# opswd puts the password of the named service into the clipboard. If there's a
|
||||
# one time password, it will be copied into the clipboard after 10 seconds. The
|
||||
# clipboard is cleared after another 20 seconds.
|
||||
function opswd() {
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: opswd <service>"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local service=$1
|
||||
|
||||
# If not logged in, print error and return
|
||||
op user list > /dev/null || return
|
||||
|
||||
local password
|
||||
# Copy the password to the clipboard
|
||||
if ! password=$(op item get "$service" --fields password 2>/dev/null); then
|
||||
echo "error: could not obtain password for $service"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -n "$password" | clipcopy
|
||||
echo "✔ password for $service copied to clipboard"
|
||||
|
||||
# If there's a one time password, copy it to the clipboard after 10 seconds
|
||||
local totp
|
||||
if totp=$(op item get --otp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
|
||||
sleep 10 && echo -n "$totp" | clipcopy
|
||||
echo "✔ TOTP for $service copied to clipboard"
|
||||
fi
|
||||
|
||||
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
|
||||
}
|
||||
|
||||
# TODO: 2022-03-26: Remove support for op CLI 1
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 2.0.0 $(op --version) || {
|
||||
print -ru2 ${(%):-"%F{yellow}opswd: usage with op version $(op --version) is deprecated. Upgrade to CLI 2 and reload zsh.
|
||||
For instructions, see https://developer.1password.com/docs/cli/upgrade.%f"}
|
||||
|
||||
# opswd puts the password of the named service into the clipboard. If there's a
|
||||
# one time password, it will be copied into the clipboard after 10 seconds. The
|
||||
# clipboard is cleared after another 20 seconds.
|
||||
function opswd() {
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: opswd <service>"
|
||||
return 1
|
||||
fi
|
||||
|
||||
local service=$1
|
||||
|
||||
# If not logged in, print error and return
|
||||
op list users > /dev/null || return
|
||||
|
||||
local password
|
||||
# Copy the password to the clipboard
|
||||
if ! password=$(op get item "$service" --fields password 2>/dev/null); then
|
||||
echo "error: could not obtain password for $service"
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo -n "$password" | clipcopy
|
||||
echo "✔ password for $service copied to clipboard"
|
||||
|
||||
# If there's a one time password, copy it to the clipboard after 5 seconds
|
||||
local totp
|
||||
if totp=$(op get totp "$service" 2>/dev/null) && [[ -n "$totp" ]]; then
|
||||
sleep 10 && echo -n "$totp" | clipcopy
|
||||
echo "✔ TOTP for $service copied to clipboard"
|
||||
fi
|
||||
|
||||
(sleep 20 && clipcopy </dev/null 2>/dev/null) &!
|
||||
}
|
||||
}
|
||||
|
||||
opswd "$@"
|
||||
|
|
@ -5,7 +5,7 @@ This plugin provides completion support for [`ag`](https://github.com/ggreer/the
|
|||
To use it, add ag to the plugins array in your zshrc file.
|
||||
|
||||
```zsh
|
||||
plugins=(... aws)
|
||||
plugins=(... ag)
|
||||
```
|
||||
|
||||
## INSTALLATION NOTES
|
||||
|
|
|
|||
1
plugins/aliases/.gitignore
vendored
Normal file
1
plugins/aliases/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
__pycache__
|
||||
|
|
@ -1,21 +1,22 @@
|
|||
## Aliases Cheatsheet
|
||||
# Aliases cheatsheet
|
||||
|
||||
**Maintainer:** [@hqingyi](https://github.com/hqingyi)
|
||||
|
||||
With lots of 3rd-party amazing aliases installed, this plugin helps list the shortcuts
|
||||
that are currently available based on the plugins you have enabled.
|
||||
|
||||
Enable this plugin by adding it to your `plugins` definition in `~/.zshrc`.
|
||||
To use it, add `aliases` to the plugins array in your zshrc file:
|
||||
|
||||
```
|
||||
plugins=(aliases)
|
||||
```
|
||||
```zsh
|
||||
plugins=(aliases)
|
||||
```
|
||||
|
||||
Requirements: Python needs to be installed.
|
||||
|
||||
### Usage
|
||||
## Usage
|
||||
|
||||
```
|
||||
acs: group all alias
|
||||
acs $keywordquickly filter alias & highlight
|
||||
```
|
||||
- `acs`: show all aliases by group.
|
||||
|
||||
- `acs <keyword>`: filter aliases by `<keyword>` and highlight.
|
||||
|
||||

|
||||
|
|
|
|||
|
|
@ -2,9 +2,10 @@
|
|||
#
|
||||
# - acs: alias cheatsheet
|
||||
# group alias by command, pass addition argv to grep.
|
||||
ALIASES_PLUGIN_ROOT=$(cd `dirname $0` && pwd)
|
||||
function acs(){
|
||||
which python >>/dev/null
|
||||
[[ $? -eq 1 ]] && echo "[error]no python executable detected!" && return
|
||||
alias | python $ALIASES_PLUGIN_ROOT/cheatsheet.py $@
|
||||
(( $+commands[python3] )) || {
|
||||
echo "[error] No python executable detected"
|
||||
return
|
||||
}
|
||||
alias | python3 ${functions_source[$0]:h}/cheatsheet.py $@
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import itertools
|
||||
import termcolor
|
||||
|
|
@ -26,16 +26,16 @@ def cheatsheet(lines):
|
|||
target_aliases.extend(group_list)
|
||||
return cheatsheet
|
||||
|
||||
def pretty_print_group(key, aliases, hightlight=None):
|
||||
def pretty_print_group(key, aliases, highlight=None):
|
||||
if len(aliases) == 0:
|
||||
return
|
||||
group_hl_formatter = lambda g, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'red') for part in ('[%s]' % g).split(hl)])
|
||||
alias_hl_formatter = lambda alias, hl: termcolor.colored(hl, 'yellow').join([termcolor.colored(part, 'green') for part in ('\t%s = %s' % alias[0:2]).split(hl)])
|
||||
group_formatter = lambda g: termcolor.colored('[%s]' % g, 'red')
|
||||
alias_formatter = lambda alias: termcolor.colored('\t%s = %s' % alias[0:2], 'green')
|
||||
if hightlight and len(hightlight)>0:
|
||||
print (group_hl_formatter(key, hightlight))
|
||||
print ('\n'.join([alias_hl_formatter(alias, hightlight) for alias in aliases]))
|
||||
if highlight and len(highlight)>0:
|
||||
print (group_hl_formatter(key, highlight))
|
||||
print ('\n'.join([alias_hl_formatter(alias, highlight) for alias in aliases]))
|
||||
else:
|
||||
print (group_formatter(key))
|
||||
print ('\n'.join([alias_formatter(alias) for alias in aliases]))
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@
|
|||
#
|
||||
# Author: Konstantin Lepa <konstantin.lepa@gmail.com>
|
||||
|
||||
"""ANSII Color formatting for output in terminal."""
|
||||
"""ANSI Color formatting for output in terminal."""
|
||||
|
||||
from __future__ import print_function
|
||||
import os
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@
|
|||
|
||||
This plugin provides completion for [Ant](https://ant.apache.org/).
|
||||
|
||||
To use it add ant to the plugins array in your zshrc file.
|
||||
To use it, add `ant` to the plugins array in your zshrc file:
|
||||
|
||||
```bash
|
||||
```zsh
|
||||
plugins=(... ant)
|
||||
```
|
||||
|
||||
|
|
|
|||
22
plugins/ant/_ant
Normal file
22
plugins/ant/_ant
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
#compdef ant
|
||||
|
||||
_ant_does_target_list_need_generating () {
|
||||
[[ ! -f .ant_targets ]] && return 0
|
||||
[[ build.xml -nt .ant_targets ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
_ant () {
|
||||
if [[ ! -f build.xml ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
if ! _ant_does_target_list_need_generating; then
|
||||
return
|
||||
fi
|
||||
|
||||
ant -p | awk -F " " 'NR > 5 { print lastTarget } { lastTarget = $1 }' >| .ant_targets
|
||||
compadd -- "$(cat .ant_targets)"
|
||||
}
|
||||
|
||||
_ant "$@"
|
||||
|
|
@ -1,16 +1,2 @@
|
|||
_ant_does_target_list_need_generating () {
|
||||
[ ! -f .ant_targets ] && return 0;
|
||||
[ build.xml -nt .ant_targets ] && return 0;
|
||||
return 1;
|
||||
}
|
||||
|
||||
_ant () {
|
||||
if [ -f build.xml ]; then
|
||||
if _ant_does_target_list_need_generating; then
|
||||
ant -p | awk -F " " 'NR > 5 { print lastTarget }{lastTarget = $1}' > .ant_targets
|
||||
fi
|
||||
compadd -- `cat .ant_targets`
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _ant ant
|
||||
# Default to colored output
|
||||
export ANT_ARGS='-logger org.apache.tools.ant.listener.AnsiColorLogger'
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ plugins=(... arcanist)
|
|||
| ------- | ---------------------------------- |
|
||||
| ara | `arc amend` |
|
||||
| arb | `arc branch` |
|
||||
| arbl | `arc bland` |
|
||||
| arco | `arc cover` |
|
||||
| arci | `arc commit` |
|
||||
| ard | `arc diff` |
|
||||
|
|
@ -24,6 +25,7 @@ plugins=(... arcanist)
|
|||
| ardpc | `arc diff --plan-changes` |
|
||||
| are | `arc export` |
|
||||
| arh | `arc help` |
|
||||
| arho | `arc hotfix` |
|
||||
| arl | `arc land` |
|
||||
| arli | `arc lint` |
|
||||
| arls | `arc list` |
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
alias ara='arc amend'
|
||||
alias arb='arc branch'
|
||||
alias arbl='arc bland'
|
||||
alias arco='arc cover'
|
||||
alias arci='arc commit'
|
||||
|
||||
|
|
@ -17,6 +18,7 @@ alias ardp='arc diff --preview' # creates a new diff in the phab interface
|
|||
|
||||
alias are='arc export'
|
||||
alias arh='arc help'
|
||||
alias arho='arc hotfix'
|
||||
alias arl='arc land'
|
||||
alias arli='arc lint'
|
||||
alias arls='arc list'
|
||||
|
|
|
|||
|
|
@ -17,8 +17,10 @@ plugins=(... archlinux)
|
|||
| pacin | `sudo pacman -S` | Install packages from the repositories |
|
||||
| pacins | `sudo pacman -U` | Install a package from a local file |
|
||||
| pacinsd | `sudo pacman -S --asdeps` | Install packages as dependencies of another package |
|
||||
| paclean | `sudo pacman -Sc` | Clean out old and unused caches and packages |
|
||||
| pacloc | `pacman -Qi` | Display information about a package in the local database |
|
||||
| paclocs | `pacman -Qs` | Search for packages in the local database |
|
||||
| paclr | `sudo pacman -Scc` | Remove all files from the cache |
|
||||
| paclsorphans | `sudo pacman -Qdt` | List all orphaned packages |
|
||||
| pacmir | `sudo pacman -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| pacre | `sudo pacman -R` | Remove packages, keeping its settings and dependencies |
|
||||
|
|
@ -32,7 +34,7 @@ plugins=(... archlinux)
|
|||
| pacfiles | `pacman -F` | Search package file names for matching strings |
|
||||
| pacls | `pacman -Ql` | List files in a package |
|
||||
| pacown | `pacman -Qo` | Show which package owns a file |
|
||||
| upgrade[²](#f2) | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
|
||||
| upgrade[¹](#f1) | `sudo pacman -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
| Function | Description |
|
||||
|----------------|-----------------------------------------------------------|
|
||||
|
|
@ -52,6 +54,8 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
|
||||
| Alias | Command | Description |
|
||||
|---------|-------------------------------------------------|-------------------------------------------------------------------------|
|
||||
| auclean | `sudo aura -Sc` | Clean out old and unused caches and packages |
|
||||
| auclr | `sudo aura -Scc` | Remove all files from the cache |
|
||||
| auin | `sudo aura -S` | Install packages from the repositories |
|
||||
| aurin | `sudo aura -A` | Install packages from the repositories |
|
||||
| auins | `sudo aura -U` | Install a package from a local file |
|
||||
|
|
@ -73,7 +77,7 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
| auupd | `sudo aura -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| auupg | `sudo sh -c "aura -Syu && aura -Au"` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| ausu | `sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"` | Same as `auupg`, but without confirmation |
|
||||
| upgrade[²](#f2) | `sudo aura -Syu` | Sync with repositories before upgrading packages |
|
||||
| upgrade[¹](#f1) | `sudo aura -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
| Function | Description |
|
||||
|-----------------|---------------------------------------------------------------------|
|
||||
|
|
@ -84,6 +88,8 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
|
||||
| Alias | Command | Description |
|
||||
|---------|-----------------------------------|---------------------------------------------------------------------|
|
||||
| pacclean| `pacaur -Sc` | Clean out old and unused caches and packages |
|
||||
| pacclr | `pacaur -Scc` | Remove all files from the cache |
|
||||
| pain | `pacaur -S` | Install packages from the repositories |
|
||||
| pains | `pacaur -U` | Install a package from a local file |
|
||||
| painsd | `pacaur -S --asdeps` | Install packages as dependencies of another package |
|
||||
|
|
@ -99,13 +105,15 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
| paupd | `pacaur -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| paupg | `pacaur -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| pasu | `pacaur -Syua --no-confirm` | Same as `paupg`, but without confirmation |
|
||||
| upgrade[²](#f2) | `pacaur -Syu` | Sync with repositories before upgrading packages |
|
||||
| upgrade[¹](#f1) | `pacaur -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### Trizen
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|-----------------------------------|---------------------------------------------------------------------|
|
||||
| trconf | `trizen -C` | Fix all configuration files with vimdiff |
|
||||
| trclean | `trizen -Sc` | Clean out old and unused caches and packages |
|
||||
| trclr | `trizen -Scc` | Remove all files from the cache |
|
||||
| trin | `trizen -S` | Install packages from the repositories |
|
||||
| trins | `trizen -U` | Install a package from a local file |
|
||||
| trinsd | `trizen -S --asdeps` | Install packages as dependencies of another package |
|
||||
|
|
@ -121,35 +129,15 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
| trupd | `trizen -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| trupg | `trizen -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| trsu | `trizen -Syua --no-confirm` | Same as `trupg`, but without confirmation |
|
||||
| upgrade[²](#f2) | `trizen -Syu` | Sync with repositories before upgrading packages |
|
||||
| upgrade[¹](#f1) | `trizen -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### Yaourt[¹](#f1)
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|-----------------------------------|---------------------------------------------------------------------|
|
||||
| yaconf | `yaourt -C` | Fix all configuration files with vimdiff |
|
||||
| yain | `yaourt -S` | Install packages from the repositories |
|
||||
| yains | `yaourt -U` | Install a package from a local file |
|
||||
| yainsd | `yaourt -S --asdeps` | Install packages as dependencies of another package |
|
||||
| yaloc | `yaourt -Qi` | Display information about a package in the local database |
|
||||
| yalocs | `yaourt -Qs` | Search for packages in the local database |
|
||||
| yalst | `yaourt -Qe` | List installed packages including from AUR (tagged as "local") |
|
||||
| yamir | `yaourt -Syy` | Force refresh of all package lists after updating mirrorlist |
|
||||
| yaorph | `yaourt -Qtd` | Remove orphans using yaourt |
|
||||
| yare | `yaourt -R` | Remove packages, keeping its settings and dependencies |
|
||||
| yarem | `yaourt -Rns` | Remove packages, including its settings and unneeded dependencies |
|
||||
| yarep | `yaourt -Si` | Display information about a package in the repositories |
|
||||
| yareps | `yaourt -Ss` | Search for packages in the repositories |
|
||||
| yaupd | `yaourt -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupg | `yaourt -Syua` | Sync with repositories before upgrading all packages (from AUR too) |
|
||||
| yasu | `yaourt -Syua --no-confirm` | Same as `yaupg`, but without confirmation |
|
||||
| upgrade[²](#f2) | `yaourt -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
#### Yay[¹](#f1)
|
||||
#### Yay
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|--------------------------------|-------------------------------------------------------------------|
|
||||
| yaconf | `yay -Pg` | Print current configuration |
|
||||
| yaclean | `yay -Sc` | Clean out old and unused caches and packages |
|
||||
| yaclr | `yay -Scc` | Remove all files from the cache |
|
||||
| yain | `yay -S` | Install packages from the repositories |
|
||||
| yains | `yay -U` | Install a package from a local file |
|
||||
| yainsd | `yay -S --asdeps` | Install packages as dependencies of another package |
|
||||
|
|
@ -165,23 +153,19 @@ upgrades were available. Use `pacman -Que` instead.
|
|||
| yaupd | `yay -Sy` | Update and refresh local package, ABS and AUR databases |
|
||||
| yaupg | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
| yasu | `yay -Syu --no-confirm` | Same as `yaupg`, but without confirmation |
|
||||
| upgrade[²](#f2) | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
| upgrade[¹](#f1) | `yay -Syu` | Sync with repositories before upgrading packages |
|
||||
|
||||
---
|
||||
|
||||
<span id="f1">¹</span>
|
||||
Yay and Yaourt aliases overlap. If both are installed, yay will take precedence.
|
||||
|
||||
<span id="f2">²</span>
|
||||
The `upgrade` alias is set for all package managers. Its value will depend on
|
||||
whether the package manager is installed, checked in the following order:
|
||||
|
||||
1. `yay`
|
||||
2. `yaourt`
|
||||
3. `trizen`
|
||||
4. `pacaur`
|
||||
5. `aura`
|
||||
6. `pacman`
|
||||
2. `trizen`
|
||||
3. `pacaur`
|
||||
4. `aura`
|
||||
5. `pacman`
|
||||
|
||||
## Contributors
|
||||
|
||||
|
|
@ -196,3 +180,4 @@ whether the package manager is installed, checked in the following order:
|
|||
- ornicar - thibault.duplessis@gmail.com
|
||||
- Ybalrid (Arthur Brainville) - ybalrid@ybalrid.info
|
||||
- Jeff M. Hubbard - jeffmhubbard@gmail.com
|
||||
- K. Harishankar(harishnkr) - hari2menon1234@gmail.com
|
||||
|
|
|
|||
|
|
@ -5,7 +5,9 @@
|
|||
# Pacman - https://wiki.archlinux.org/index.php/Pacman_Tips
|
||||
alias pacupg='sudo pacman -Syu'
|
||||
alias pacin='sudo pacman -S'
|
||||
alias paclean='sudo pacman -Sc'
|
||||
alias pacins='sudo pacman -U'
|
||||
alias paclr='sudo pacman -Scc'
|
||||
alias pacre='sudo pacman -R'
|
||||
alias pacrem='sudo pacman -Rns'
|
||||
alias pacrep='pacman -Si'
|
||||
|
|
@ -88,6 +90,8 @@ fi
|
|||
if (( $+commands[aura] )); then
|
||||
alias auin='sudo aura -S'
|
||||
alias aurin='sudo aura -A'
|
||||
alias auclean='sudo aura -Sc'
|
||||
alias auclr='sudo aura -Scc'
|
||||
alias auins='sudo aura -U'
|
||||
alias auinsd='sudo aura -S --asdeps'
|
||||
alias aurinsd='sudo aura -A --asdeps'
|
||||
|
|
@ -104,7 +108,7 @@ if (( $+commands[aura] )); then
|
|||
alias auras='aura -As --both'
|
||||
alias auupd="sudo aura -Sy"
|
||||
alias auupg='sudo sh -c "aura -Syu && aura -Au"'
|
||||
alias ausu='sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"'
|
||||
alias ausu='sudo sh -c "aura -Syu --no-confirm && aura -Au --no-confirm"'
|
||||
alias upgrade='sudo aura -Syu'
|
||||
|
||||
# extra bonus specially for aura
|
||||
|
|
@ -115,6 +119,8 @@ if (( $+commands[aura] )); then
|
|||
fi
|
||||
|
||||
if (( $+commands[pacaur] )); then
|
||||
alias pacclean='pacaur -Sc'
|
||||
alias pacclr='pacaur -Scc'
|
||||
alias paupg='pacaur -Syu'
|
||||
alias pasu='pacaur -Syu --noconfirm'
|
||||
alias pain='pacaur -S'
|
||||
|
|
@ -138,6 +144,8 @@ if (( $+commands[trizen] )); then
|
|||
alias trupg='trizen -Syua'
|
||||
alias trsu='trizen -Syua --noconfirm'
|
||||
alias trin='trizen -S'
|
||||
alias trclean='trizen -Sc'
|
||||
alias trclr='trizen -Scc'
|
||||
alias trins='trizen -U'
|
||||
alias trre='trizen -R'
|
||||
alias trrem='trizen -Rns'
|
||||
|
|
@ -153,28 +161,10 @@ if (( $+commands[trizen] )); then
|
|||
alias upgrade='trizen -Syu'
|
||||
fi
|
||||
|
||||
if (( $+commands[yaourt] )); then
|
||||
alias yaconf='yaourt -C'
|
||||
alias yaupg='yaourt -Syua'
|
||||
alias yasu='yaourt -Syua --noconfirm'
|
||||
alias yain='yaourt -S'
|
||||
alias yains='yaourt -U'
|
||||
alias yare='yaourt -R'
|
||||
alias yarem='yaourt -Rns'
|
||||
alias yarep='yaourt -Si'
|
||||
alias yareps='yaourt -Ss'
|
||||
alias yaloc='yaourt -Qi'
|
||||
alias yalocs='yaourt -Qs'
|
||||
alias yalst='yaourt -Qe'
|
||||
alias yaorph='yaourt -Qtd'
|
||||
alias yainsd='yaourt -S --asdeps'
|
||||
alias yamir='yaourt -Syy'
|
||||
alias yaupd="yaourt -Sy"
|
||||
alias upgrade='yaourt -Syu'
|
||||
fi
|
||||
|
||||
if (( $+commands[yay] )); then
|
||||
alias yaconf='yay -Pg'
|
||||
alias yaclean='yay -Sc'
|
||||
alias yaclr='yay -Scc'
|
||||
alias yaupg='yay -Syu'
|
||||
alias yasu='yay -Syu --noconfirm'
|
||||
alias yain='yay -S'
|
||||
|
|
@ -192,4 +182,3 @@ if (( $+commands[yay] )); then
|
|||
alias yaupd="yay -Sy"
|
||||
alias upgrade='yay -Syu'
|
||||
fi
|
||||
|
||||
|
|
|
|||
|
|
@ -2,18 +2,26 @@
|
|||
ASDF_DIR="${ASDF_DIR:-$HOME/.asdf}"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR/completions"
|
||||
|
||||
# If not found, check for archlinux/AUR package (/opt/asdf-vm/)
|
||||
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && [[ -f "/opt/asdf-vm/asdf.sh" ]]; then
|
||||
ASDF_DIR="/opt/asdf-vm"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR"
|
||||
fi
|
||||
|
||||
# If not found, check for Homebrew package
|
||||
if [[ ! -f "$ASDF_DIR/asdf.sh" || ! -f "$ASDF_COMPLETIONS/asdf.bash" ]] && (( $+commands[brew] )); then
|
||||
ASDF_DIR="$(brew --prefix asdf)"
|
||||
ASDF_COMPLETIONS="$ASDF_DIR/etc/bash_completion.d"
|
||||
brew_prefix="$(brew --prefix asdf)"
|
||||
ASDF_DIR="${brew_prefix}/libexec"
|
||||
ASDF_COMPLETIONS="${brew_prefix}/etc/bash_completion.d"
|
||||
unset brew_prefix
|
||||
fi
|
||||
|
||||
# Load command
|
||||
if [[ -f "$ASDF_DIR/asdf.sh" ]]; then
|
||||
. "$ASDF_DIR/asdf.sh"
|
||||
. "$ASDF_DIR/asdf.sh"
|
||||
|
||||
# Load completions
|
||||
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
|
||||
. "$ASDF_COMPLETIONS/asdf.bash"
|
||||
fi
|
||||
# Load completions
|
||||
if [[ -f "$ASDF_COMPLETIONS/asdf.bash" ]]; then
|
||||
. "$ASDF_COMPLETIONS/asdf.bash"
|
||||
fi
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ if ! type autoenv_init >/dev/null; then
|
|||
~/.autoenv
|
||||
~/.local/bin
|
||||
/usr/local/opt/autoenv
|
||||
/opt/homebrew/opt/autoenv
|
||||
/usr/local/bin
|
||||
/usr/share/autoenv-git
|
||||
~/Library/Python/bin
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ autojump_paths=(
|
|||
/etc/profile.d/autojump.zsh # manual installation
|
||||
/etc/profile.d/autojump.sh # Gentoo installation
|
||||
/usr/local/share/autojump/autojump.zsh # FreeBSD installation
|
||||
/usr/pkg/share/autojump/autojump.zsh # NetBSD installation
|
||||
/opt/local/etc/profile.d/autojump.sh # macOS with MacPorts
|
||||
/usr/local/etc/profile.d/autojump.sh # macOS with Homebrew (default)
|
||||
/opt/homebrew/etc/profile.d/autojump.sh # macOS with Homebrew (default on M1 macs)
|
||||
)
|
||||
|
||||
for file in $autojump_paths; do
|
||||
|
|
|
|||
|
|
@ -14,11 +14,12 @@ plugins=(... aws)
|
|||
* `asp [<profile>]`: sets `$AWS_PROFILE` and `$AWS_DEFAULT_PROFILE` (legacy) to `<profile>`.
|
||||
It also sets `$AWS_EB_PROFILE` to `<profile>` for the Elastic Beanstalk CLI.
|
||||
Run `asp` without arguments to clear the profile.
|
||||
* `asp [<profile>] login`: If AWS SSO has been configured in your aws profile, it will run the `aws sso login` command following profile selection.
|
||||
|
||||
* `acp [<profile>]`: in addition to `asp` functionality, it actually changes the profile by
|
||||
assuming the role specified in the `<profile>` configuration. It supports MFA and sets
|
||||
`$AWS_ACCESS_KEY_ID`, `$AWS_SECRET_ACCESS_KEY` and `$AWS_SESSION_TOKEN`, if obtained. It
|
||||
requires the roles to be configured as per the
|
||||
* `acp [<profile>] [<mfa_token>]`: in addition to `asp` functionality, it actually changes
|
||||
the profile by assuming the role specified in the `<profile>` configuration. It supports
|
||||
MFA and sets `$AWS_ACCESS_KEY_ID`, `$AWS_SECRET_ACCESS_KEY` and `$AWS_SESSION_TOKEN`, if
|
||||
obtained. It requires the roles to be configured as per the
|
||||
[official guide](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-role.html).
|
||||
Run `acp` without arguments to clear the profile.
|
||||
|
||||
|
|
|
|||
|
|
@ -21,6 +21,10 @@ function asp() {
|
|||
export AWS_DEFAULT_PROFILE=$1
|
||||
export AWS_PROFILE=$1
|
||||
export AWS_EB_PROFILE=$1
|
||||
|
||||
if [[ "$2" == "login" ]]; then
|
||||
aws sso login
|
||||
fi
|
||||
}
|
||||
|
||||
# AWS profile switch
|
||||
|
|
@ -41,6 +45,7 @@ function acp() {
|
|||
fi
|
||||
|
||||
local profile="$1"
|
||||
local mfa_token="$2"
|
||||
|
||||
# Get fallback credentials for if the aws command fails or no command is run
|
||||
local aws_access_key_id="$(aws configure get aws_access_key_id --profile $profile)"
|
||||
|
|
@ -54,9 +59,10 @@ function acp() {
|
|||
|
||||
if [[ -n "$mfa_serial" ]]; then
|
||||
local -a mfa_opt
|
||||
local mfa_token
|
||||
echo -n "Please enter your MFA token for $mfa_serial: "
|
||||
read -r mfa_token
|
||||
if [[ -z "$mfa_token" ]]; then
|
||||
echo -n "Please enter your MFA token for $mfa_serial: "
|
||||
read -r mfa_token
|
||||
fi
|
||||
if [[ -z "$sess_duration" ]]; then
|
||||
echo -n "Please enter the session duration in seconds (900-43200; default: 3600, which is the default maximum for a role): "
|
||||
read -r sess_duration
|
||||
|
|
@ -151,8 +157,8 @@ compctl -K _aws_profiles asp acp aws_change_access_key
|
|||
|
||||
# AWS prompt
|
||||
function aws_prompt_info() {
|
||||
[[ -z $AWS_PROFILE ]] && return
|
||||
echo "${ZSH_THEME_AWS_PREFIX:=<aws:}${AWS_PROFILE}${ZSH_THEME_AWS_SUFFIX:=>}"
|
||||
[[ -n "$AWS_PROFILE" ]] || return
|
||||
echo "${ZSH_THEME_AWS_PREFIX=<aws:}${AWS_PROFILE:gs/%/%%}${ZSH_THEME_AWS_SUFFIX=>}"
|
||||
}
|
||||
|
||||
if [[ "$SHOW_AWS_PROMPT" != false && "$RPROMPT" != *'$(aws_prompt_info)'* ]]; then
|
||||
|
|
|
|||
|
|
@ -8,15 +8,22 @@ To use, add `battery` to the list of plugins in your `.zshrc` file:
|
|||
|
||||
Then, add the `battery_pct_prompt` function to your custom theme. For example:
|
||||
|
||||
```
|
||||
```zsh
|
||||
RPROMPT='$(battery_pct_prompt) ...'
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
On Linux, you must have the `acpi` tool installed on your operating system.
|
||||
- On Linux, you must have the `acpi` or `acpitool` commands installed on your operating system.
|
||||
On Debian/Ubuntu, you can do that with `sudo apt install acpi` or `sudo apt install acpitool`.
|
||||
|
||||
Here's an example of how to install with apt:
|
||||
```
|
||||
sudo apt-get install acpi
|
||||
```
|
||||
- On Android (via [Termux](https://play.google.com/store/apps/details?id=com.termux)), you must have:
|
||||
|
||||
1. The `Termux:API` addon app installed:
|
||||
[Google Play](https://play.google.com/store/apps/details?id=com.termux.api) | [F-Droid](https://f-droid.org/packages/com.termux.api/)
|
||||
|
||||
2. The `termux-api` package installed within termux:
|
||||
|
||||
```sh
|
||||
pkg install termux-api
|
||||
```
|
||||
|
|
|
|||
|
|
@ -10,17 +10,17 @@
|
|||
# Author: J (927589452) #
|
||||
# Modified to add support for FreeBSD #
|
||||
###########################################
|
||||
# Author: Avneet Singh (kalsi-avneet) #
|
||||
# Modified to add support for Android #
|
||||
###########################################
|
||||
|
||||
if [[ "$OSTYPE" = darwin* ]]; then
|
||||
|
||||
function battery_is_charging() {
|
||||
ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ Yes'
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
pmset -g batt | grep -Eo "\d+%" | cut -d% -f1
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if battery_is_charging; then
|
||||
echo "External Power"
|
||||
|
|
@ -28,7 +28,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
battery_pct
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
local smart_battery_status="$(ioreg -rc "AppleSmartBattery")"
|
||||
if [[ $(echo $smart_battery_status | command grep -c '^.*"ExternalConnected"\ =\ No') -eq 1 ]]; then
|
||||
|
|
@ -42,7 +41,6 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
echo "∞"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_prompt () {
|
||||
local battery_pct color
|
||||
if ioreg -rc AppleSmartBattery | command grep -q '^.*"ExternalConnected"\ =\ No'; then
|
||||
|
|
@ -61,17 +59,14 @@ if [[ "$OSTYPE" = darwin* ]]; then
|
|||
}
|
||||
|
||||
elif [[ "$OSTYPE" = freebsd* ]]; then
|
||||
|
||||
function battery_is_charging() {
|
||||
[[ $(sysctl -n hw.acpi.battery.state) -eq 2 ]]
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
if (( $+commands[sysctl] )); then
|
||||
sysctl -n hw.acpi.battery.life
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
|
|
@ -79,7 +74,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
local remaining_time
|
||||
remaining_time=$(sysctl -n hw.acpi.battery.time)
|
||||
|
|
@ -89,7 +83,6 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
printf %02d:%02d $hour $minute
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
|
|
@ -106,19 +99,22 @@ elif [[ "$OSTYPE" = freebsd* ]]; then
|
|||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
|
||||
elif [[ "$OSTYPE" = linux* ]]; then
|
||||
|
||||
elif [[ "$OSTYPE" = linux-android ]] && (( ${+commands[termux-battery-status]} )); then
|
||||
function battery_is_charging() {
|
||||
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
|
||||
termux-battery-status 2>/dev/null | command awk '/status/ { exit ($0 ~ /DISCHARGING/) }'
|
||||
}
|
||||
|
||||
function battery_pct() {
|
||||
if (( $+commands[acpi] )); then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -E '^Battery.*(Full|(Disc|C)harging)' | cut -f2 -d ',' | tr -cd '[:digit:]'
|
||||
fi
|
||||
# Sample output:
|
||||
# {
|
||||
# "health": "GOOD",
|
||||
# "percentage": 93,
|
||||
# "plugged": "UNPLUGGED",
|
||||
# "status": "DISCHARGING",
|
||||
# "temperature": 29.0,
|
||||
# "current": 361816
|
||||
# }
|
||||
termux-battery-status 2>/dev/null | command awk '/percentage/ { gsub(/[,]/,""); print $2}'
|
||||
}
|
||||
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
|
|
@ -126,13 +122,72 @@ elif [[ "$OSTYPE" = linux* ]]; then
|
|||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
|
||||
fi
|
||||
}
|
||||
|
||||
function battery_time_remaining() { } # Not available on android
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
if battery_is_charging; then
|
||||
echo "∞"
|
||||
else
|
||||
if [[ $battery_pct -gt 50 ]]; then
|
||||
color='green'
|
||||
elif [[ $battery_pct -gt 20 ]]; then
|
||||
color='yellow'
|
||||
else
|
||||
color='red'
|
||||
fi
|
||||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
elif [[ "$OSTYPE" = linux* ]]; then
|
||||
function battery_is_charging() {
|
||||
if (( $+commands[acpitool] )); then
|
||||
! acpitool 2>/dev/null | command grep -qE '^\s+Battery.*Discharging'
|
||||
elif (( $+commands[acpi] )); then
|
||||
! acpi 2>/dev/null | command grep -v "rate information unavailable" | command grep -q '^Battery.*Discharging'
|
||||
fi
|
||||
}
|
||||
function battery_pct() {
|
||||
if (( $+commands[acpitool] )); then
|
||||
# Sample output:
|
||||
# Battery #1 : Unknown, 99.55%
|
||||
# Battery #2 : Discharging, 49.58%, 01:12:05
|
||||
# All batteries : 62.60%, 02:03:03
|
||||
local -i pct=$(acpitool 2>/dev/null | command awk -F, '
|
||||
/^\s+All batteries/ {
|
||||
gsub(/[^0-9.]/, "", $1)
|
||||
pct=$1
|
||||
exit
|
||||
}
|
||||
!pct && /^\s+Battery/ {
|
||||
gsub(/[^0-9.]/, "", $2)
|
||||
pct=$2
|
||||
}
|
||||
END { print pct }
|
||||
')
|
||||
echo $pct
|
||||
elif (( $+commands[acpi] )); then
|
||||
# Sample output:
|
||||
# Battery 0: Discharging, 0%, rate information unavailable
|
||||
# Battery 1: Full, 100%
|
||||
acpi 2>/dev/null | command awk -F, '
|
||||
/rate information unavailable/ { next }
|
||||
/^Battery.*: /{ gsub(/[^0-9]/, "", $2); print $2; exit }
|
||||
'
|
||||
fi
|
||||
}
|
||||
function battery_pct_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
battery_pct
|
||||
else
|
||||
echo "External Power"
|
||||
fi
|
||||
}
|
||||
function battery_time_remaining() {
|
||||
if ! battery_is_charging; then
|
||||
acpi 2>/dev/null | command grep -v "rate information unavailable" | cut -f3 -d ','
|
||||
fi
|
||||
}
|
||||
function battery_pct_prompt() {
|
||||
local battery_pct color
|
||||
battery_pct=$(battery_pct_remaining)
|
||||
|
|
@ -149,7 +204,6 @@ elif [[ "$OSTYPE" = linux* ]]; then
|
|||
echo "%{$fg[$color]%}${battery_pct}%%%{$reset_color%}"
|
||||
fi
|
||||
}
|
||||
|
||||
else
|
||||
# Empty functions so we don't cause errors in prompts
|
||||
function battery_is_charging { false }
|
||||
|
|
@ -174,7 +228,7 @@ function battery_level_gauge() {
|
|||
local charging_color=${BATTERY_CHARGING_COLOR:-$color_yellow}
|
||||
local charging_symbol=${BATTERY_CHARGING_SYMBOL:-'⚡'}
|
||||
|
||||
local battery_remaining_percentage=$(battery_pct)
|
||||
local -i battery_remaining_percentage=$(battery_pct)
|
||||
local filled empty gauge_color
|
||||
|
||||
if [[ $battery_remaining_percentage =~ [0-9]+ ]]; then
|
||||
|
|
|
|||
|
|
@ -1,5 +1,14 @@
|
|||
## Bazel autocomplete plugin
|
||||
# Bazel plugin
|
||||
|
||||
A copy of the completion script from the
|
||||
[bazelbuild/bazel](https://github.com/bazelbuild/bazel/master/scripts/zsh_completion/_bazel)
|
||||
git repo.
|
||||
This plugin adds completion for [bazel](https://bazel.build), an open-source build and
|
||||
test tool that scalably supports multi-language and multi-platform projects.
|
||||
|
||||
To use it, add `bazel` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... bazel)
|
||||
```
|
||||
|
||||
The plugin has a copy of [the completion script from the git repository][1].
|
||||
|
||||
[1]: https://github.com/bazelbuild/bazel/blob/master/scripts/zsh_completion/_bazel
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ _get_build_targets() {
|
|||
;;
|
||||
esac
|
||||
completions=(${$(_bazel_b query "kind(\"${rule_re}\", ${pkg}:all)" 2>/dev/null)##*:})
|
||||
if ( (( ${#completions} > 0 )) && [[ $target_type != run ]] ); then
|
||||
if ( (( ${#completions} > 0 )) && [[ $target_type != bin ]] ); then
|
||||
completions+=(all)
|
||||
fi
|
||||
echo ${completions[*]}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,12 @@ if ! (type bgnotify_formatted | grep -q 'function'); then ## allow custom functi
|
|||
}
|
||||
fi
|
||||
|
||||
currentAppId () {
|
||||
if (( $+commands[osascript] )); then
|
||||
osascript -e 'tell application (path to frontmost application as text) to id' 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
currentWindowId () {
|
||||
if hash osascript 2>/dev/null; then #osx
|
||||
osascript -e 'tell application (path to frontmost application as text) to id of front window' 2&> /dev/null || echo "0"
|
||||
|
|
@ -32,11 +38,20 @@ currentWindowId () {
|
|||
|
||||
bgnotify () { ## args: (title, subtitle)
|
||||
if hash terminal-notifier 2>/dev/null; then #osx
|
||||
[[ "$TERM_PROGRAM" == 'iTerm.app' ]] && term_id='com.googlecode.iterm2';
|
||||
[[ "$TERM_PROGRAM" == 'Apple_Terminal' ]] && term_id='com.apple.terminal';
|
||||
local term_id="$bgnotify_appid"
|
||||
if [[ -z "$term_id" ]]; then
|
||||
case "$TERM_PROGRAM" in
|
||||
iTerm.app) term_id='com.googlecode.iterm2' ;;
|
||||
Apple_Terminal) term_id='com.apple.terminal' ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
## now call terminal-notifier, (hopefully with $term_id!)
|
||||
[ -z "$term_id" ] && terminal-notifier -message "$2" -title "$1" >/dev/null ||
|
||||
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" >/dev/null
|
||||
if [[ -z "$term_id" ]]; then
|
||||
terminal-notifier -message "$2" -title "$1" >/dev/null
|
||||
else
|
||||
terminal-notifier -message "$2" -title "$1" -activate "$term_id" -sender "$term_id" >/dev/null
|
||||
fi
|
||||
elif hash growlnotify 2>/dev/null; then #osx growl
|
||||
growlnotify -m "$1" "$2"
|
||||
elif hash notify-send 2>/dev/null; then #ubuntu gnome!
|
||||
|
|
@ -54,6 +69,7 @@ bgnotify () { ## args: (title, subtitle)
|
|||
bgnotify_begin() {
|
||||
bgnotify_timestamp=$EPOCHSECONDS
|
||||
bgnotify_lastcmd="${1:-$2}"
|
||||
bgnotify_appid="$(currentAppId)"
|
||||
bgnotify_windowid=$(currentWindowId)
|
||||
}
|
||||
|
||||
|
|
@ -62,7 +78,7 @@ bgnotify_end() {
|
|||
elapsed=$(( EPOCHSECONDS - bgnotify_timestamp ))
|
||||
past_threshold=$(( elapsed >= bgnotify_threshold ))
|
||||
if (( bgnotify_timestamp > 0 )) && (( past_threshold )); then
|
||||
if [ $(currentWindowId) != "$bgnotify_windowid" ]; then
|
||||
if [[ $(currentAppId) != "$bgnotify_appid" || $(currentWindowId) != "$bgnotify_windowid" ]]; then
|
||||
print -n "\a"
|
||||
bgnotify_formatted "$didexit" "$bgnotify_lastcmd" "$elapsed"
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
## Boot2docker autocomplete plugin
|
||||
|
||||
- Adds autocomplete options for all boot2docker commands.
|
||||
|
||||
|
||||
Maintainer : Manfred Touron ([@moul](https://github.com/moul))
|
||||
|
|
@ -1,73 +0,0 @@
|
|||
#compdef boot2docker
|
||||
|
||||
# Boot2docker autocompletion for oh-my-zsh
|
||||
# Requires: Boot2docker installed
|
||||
# Author: Manfred Touron (@moul)
|
||||
|
||||
local -a _1st_arguments
|
||||
_1st_arguments=(
|
||||
"init":"Create a new Boot2Docker VM."
|
||||
"up":"Start VM from any states."
|
||||
"start":"Start VM from any states."
|
||||
"boot":"Start VM from any states."
|
||||
"ssh":"[ssh-command] Login to VM via SSH."
|
||||
"save":"Suspend VM and save state to disk."
|
||||
"suspend":"Suspend VM and save state to disk."
|
||||
"down":"Gracefully shutdown the VM."
|
||||
"stop":"Gracefully shutdown the VM."
|
||||
"halt":"Gracefully shutdown the VM."
|
||||
"restart":"Gracefully reboot the VM."
|
||||
"poweroff":"Forcefully power off the VM (may corrupt disk image)."
|
||||
"reset":"Forcefully power cycle the VM (may corrupt disk image)."
|
||||
"delete":"Delete Boot2Docker VM and its disk image."
|
||||
"destroy":"Delete Boot2Docker VM and its disk image."
|
||||
"config":"Show selected profile file settings."
|
||||
"cfg":"Show selected profile file settings."
|
||||
"info":"Display detailed information of VM."
|
||||
"ip":"Display the IP address of the VM's Host-only network."
|
||||
"socket":"Display the DOCKER_HOST socket to connect to."
|
||||
"shellinit":"Display the shell command to set up the Docker client."
|
||||
"status":"Display current state of VM."
|
||||
"download":"Download Boot2Docker ISO image."
|
||||
"upgrade":"Upgrade the Boot2Docker ISO image (restart if running)."
|
||||
"version":"Display version information."
|
||||
)
|
||||
|
||||
_arguments \
|
||||
'(--basevmdk)--basevmdk[Path to VMDK to use as base for persistent partition]' \
|
||||
'(--cpus)'{-c,--cpus}'[number of CPUs for boot2docker.]' \
|
||||
'(--clobber)--clobber[overwrite Docker client binary on boot2docker upgrade]' \
|
||||
'(--dhcp)--dhcp[enable VirtualBox host-only network DHCP.]' \
|
||||
'(--dhcpip)--dhcpip[VirtualBox host-only network DHCP server address.]' \
|
||||
'(-s --disksize)'{-s,--disksize}'[boot2docker disk image size (in MB).]' \
|
||||
'(--dockerport)--dockerport[host Docker port (forward to port 2376 in VM). (deprecated - use with care)]' \
|
||||
'(--driver)--driver[hypervisor driver.]' \
|
||||
'(--force-upgrade-download)--force-upgrade-download[always download on boot2docker upgrade, never skip.]' \
|
||||
'(--hostip)--hostip[VirtualBox host-only network IP address.]' \
|
||||
'(--iso)--iso[path to boot2docker ISO image.]' \
|
||||
'(--iso-url)--iso-url[/api.github.com/repos/boot2docker/boot2docker/releases": source URL to provision the boot2docker ISO image.]' \
|
||||
'(--lowerip)--lowerip[VirtualBox host-only network DHCP lower bound.]' \
|
||||
'(--memory)'{-m,--memory}'[virtual machine memory size (in MB).]' \
|
||||
'(--netmask)--netmask[VirtualBox host-only network mask.]' \
|
||||
'(--no-dummy)--no-dummy[Example parameter for the dummy driver.]' \
|
||||
'(--retries)--retries[number of port knocking retries during 'start']' \
|
||||
'(--serial)--serial[try serial console to get IP address (experimental)]' \
|
||||
'(--serialfile)--serialfile[path to the serial socket/pipe.]' \
|
||||
'(--ssh)--ssh[path to SSH client utility.]' \
|
||||
'(--ssh-keygen)--ssh-keygen[path to ssh-keygen utility.]' \
|
||||
'(--sshkey)--sshkey[path to SSH key to use.]' \
|
||||
'(--sshport)--sshport[host SSH port (forward to port 22 in VM).]' \
|
||||
'(--upperip)--upperip[VirtualBox host-only network DHCP upper bound.]' \
|
||||
'(--vbm)--vbm[path to VirtualBox management utility.]' \
|
||||
'(--vbox-share)--vbox-share[(defaults to "/Users=Users" if no shares are specified; use "disable" to explicitly prevent any shares from being created) List of directories to share during "up|start|boot" via VirtualBox Guest Additions, with optional labels]' \
|
||||
'(--verbose)'{-v,--verbose}'[display verbose command invocations.]' \
|
||||
'(--vm)--vm[virtual machine name.]' \
|
||||
'(--waittime)--waittime[Time in milliseconds to wait between port knocking retries during 'start']' \
|
||||
'*:: :->subcmds' && return 0
|
||||
|
||||
#_arguments '*:: :->command'
|
||||
|
||||
if (( CURRENT == 1 )); then
|
||||
_describe -t commands "boot2docker command" _1st_arguments
|
||||
return
|
||||
fi
|
||||
|
|
@ -9,7 +9,7 @@ _bower_installed_packages () {
|
|||
}
|
||||
_bower ()
|
||||
{
|
||||
local -a _1st_arguments _no_color _dopts _save_dev _force_lastest _production
|
||||
local -a _1st_arguments _no_color _dopts _save_dev _force_latest _production
|
||||
local expl
|
||||
typeset -A opt_args
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ _bower ()
|
|||
|
||||
_save_dev=('(--save-dev)--save-dev[Save installed packages into the project"s bower.json devDependencies]')
|
||||
|
||||
_force_lastest=('(--force-latest)--force-latest[Force latest version on conflict]')
|
||||
_force_latest=('(--force-latest)--force-latest[Force latest version on conflict]')
|
||||
|
||||
_production=('(--production)--production[Do not install project devDependencies]')
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ _bower ()
|
|||
_arguments \
|
||||
$_dopts \
|
||||
$_save_dev \
|
||||
$_force_lastest \
|
||||
$_force_latest \
|
||||
$_no_color \
|
||||
$_production
|
||||
;;
|
||||
|
|
@ -62,7 +62,7 @@ _bower ()
|
|||
_arguments \
|
||||
$_dopts \
|
||||
$_no_color \
|
||||
$_force_lastest
|
||||
$_force_latest
|
||||
_bower_installed_packages
|
||||
compadd "$@" $(echo $bower_package_list)
|
||||
;;
|
||||
|
|
|
|||
|
|
@ -1,31 +1,47 @@
|
|||
# Branch
|
||||
# Branch plugin
|
||||
|
||||
Displays the current Git or Mercurial branch fast.
|
||||
This plugin displays the current Git or Mercurial branch, fast. If in a Mercurial repository,
|
||||
also display the current bookmark, if present.
|
||||
|
||||
To use it, add `branch` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... branch)
|
||||
```
|
||||
|
||||
## Speed test
|
||||
|
||||
### Mercurial
|
||||
- `hg branch`:
|
||||
|
||||
```shell
|
||||
$ time hg branch
|
||||
0.11s user 0.14s system 70% cpu 0.355 total
|
||||
```
|
||||
```console
|
||||
$ time hg branch
|
||||
0.11s user 0.14s system 70% cpu 0.355 total
|
||||
```
|
||||
|
||||
### Branch plugin
|
||||
- branch plugin:
|
||||
|
||||
```shell
|
||||
$ time zsh /tmp/branch_prompt_info_test.zsh
|
||||
0.00s user 0.01s system 78% cpu 0.014 total
|
||||
```
|
||||
```console
|
||||
$ time zsh /tmp/branch_prompt_info_test.zsh
|
||||
0.00s user 0.01s system 78% cpu 0.014 total
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Edit your theme file (eg.: `~/.oh-my-zsh/theme/robbyrussell.zsh-theme`)
|
||||
adding `$(branch_prompt_info)` in your prompt like this:
|
||||
Copy your theme to `$ZSH_CUSTOM/themes/` and modify it to add `$(branch_prompt_info)` in your prompt.
|
||||
This example is for the `robbyrussell` theme:
|
||||
|
||||
```diff
|
||||
- PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
|
||||
+ PROMPT='${ret_status}%{$fg_bold[green]%}%p %{$fg[cyan]%}%c %{$fg_bold[blue]%}$(git_prompt_info)$(branch_prompt_info)%{$fg_bold[blue]%} % %{$reset_color%}'
|
||||
diff --git a/themes/robbyrussell.zsh-theme b/themes/robbyrussell.zsh-theme
|
||||
index 2fd5f2cd..9d89a464 100644
|
||||
--- a/themes/robbyrussell.zsh-theme
|
||||
+++ b/themes/robbyrussell.zsh-theme
|
||||
@@ -1,5 +1,5 @@
|
||||
PROMPT="%(?:%{$fg_bold[green]%}➜ :%{$fg_bold[red]%}➜ )"
|
||||
-PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(git_prompt_info)'
|
||||
+PROMPT+=' %{$fg[cyan]%}%c%{$reset_color%} $(branch_prompt_info)'
|
||||
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[blue]%}git:(%{$fg[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
```
|
||||
|
||||
## Maintainer
|
||||
|
|
|
|||
|
|
@ -3,29 +3,33 @@
|
|||
# Oct 2, 2015
|
||||
|
||||
function branch_prompt_info() {
|
||||
# Defines path as current directory
|
||||
local current_dir=$PWD
|
||||
# While current path is not root path
|
||||
while [[ $current_dir != '/' ]]
|
||||
do
|
||||
# Git repository
|
||||
if [[ -d "${current_dir}/.git" ]]
|
||||
then
|
||||
echo '±' ${"$(<"$current_dir/.git/HEAD")"##*/}
|
||||
return;
|
||||
# Start checking in current working directory
|
||||
local branch="" dir="$PWD"
|
||||
while [[ "$dir" != '/' ]]; do
|
||||
# Found .git directory
|
||||
if [[ -d "${dir}/.git" ]]; then
|
||||
branch="${"$(<"${dir}/.git/HEAD")"##*/}"
|
||||
echo '±' "${branch:gs/%/%%}"
|
||||
return
|
||||
fi
|
||||
# Mercurial repository
|
||||
if [[ -d "${current_dir}/.hg" ]]
|
||||
then
|
||||
if [[ -f "$current_dir/.hg/branch" ]]
|
||||
then
|
||||
echo '☿' $(<"$current_dir/.hg/branch")
|
||||
|
||||
# Found .hg directory
|
||||
if [[ -d "${dir}/.hg" ]]; then
|
||||
if [[ -f "${dir}/.hg/branch" ]]; then
|
||||
branch="$(<"${dir}/.hg/branch")"
|
||||
else
|
||||
echo '☿ default'
|
||||
branch="default"
|
||||
fi
|
||||
return;
|
||||
|
||||
if [[ -f "${dir}/.hg/bookmarks.current" ]]; then
|
||||
branch="${branch}/$(<"${dir}/.hg/bookmarks.current")"
|
||||
fi
|
||||
|
||||
echo '☿' "${branch:gs/%/%%}"
|
||||
return
|
||||
fi
|
||||
# Defines path as parent directory and keeps looking for :)
|
||||
current_dir="${current_dir:h}"
|
||||
|
||||
# Check parent directory
|
||||
dir="${dir:h}"
|
||||
done
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
alias brewp='brew pin'
|
||||
alias brews='brew list -1'
|
||||
alias brewsp='brew list --pinned'
|
||||
alias bubo='brew update && brew outdated'
|
||||
alias bubc='brew upgrade && brew cleanup'
|
||||
|
|
@ -7,3 +6,16 @@ alias bubu='bubo && bubc'
|
|||
alias buf='brew upgrade --formula'
|
||||
alias bcubo='brew update && brew outdated --cask'
|
||||
alias bcubc='brew upgrade --cask && brew cleanup'
|
||||
|
||||
function brews() {
|
||||
local formulae="$(brew leaves | xargs brew deps --installed --for-each)"
|
||||
local casks="$(brew list --cask)"
|
||||
|
||||
local blue="$(tput setaf 4)"
|
||||
local bold="$(tput bold)"
|
||||
local off="$(tput sgr0)"
|
||||
|
||||
echo "${blue}==>${off} ${bold}Formulae${off}"
|
||||
echo "${formulae}" | sed "s/^\(.*\):\(.*\)$/\1${blue}\2${off}/"
|
||||
echo "\n${blue}==>${off} ${bold}Casks${off}\n${casks}"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ case $state in
|
|||
"check[Determine whether the requirements for your application are installed]" \
|
||||
"list[Show all of the gems in the current bundle]" \
|
||||
"show[Show the source location of a particular gem in the bundle]" \
|
||||
"info[Show details of a particular gem in the bundle]" \
|
||||
"outdated[Show all of the outdated gems in the current bundle]" \
|
||||
"console[Start an IRB session in the context of the current bundle]" \
|
||||
"open[Open an installed gem in the editor]" \
|
||||
|
|
@ -84,7 +85,7 @@ case $state in
|
|||
'(--verbose)--verbose[Enable verbose output mode]'
|
||||
ret=0
|
||||
;;
|
||||
(open|show)
|
||||
(open|show|info)
|
||||
_gems=( $(bundle show 2> /dev/null | sed -e '/^ \*/!d; s/^ \* \([^ ]*\) .*/\1/') )
|
||||
if [[ $_gems != "" ]]; then
|
||||
_values 'gems' $_gems && ret=0
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ bundle_install() {
|
|||
else
|
||||
local cores_num="$(nproc)"
|
||||
fi
|
||||
bundle install --jobs="$cores_num" "$@"
|
||||
BUNDLE_JOBS="$cores_num" bundle install "$@"
|
||||
}
|
||||
|
||||
## Gem wrapper
|
||||
|
|
@ -81,14 +81,12 @@ bundled_commands=(
|
|||
)
|
||||
|
||||
# Remove $UNBUNDLED_COMMANDS from the bundled_commands list
|
||||
for cmd in $UNBUNDLED_COMMANDS; do
|
||||
bundled_commands=(${bundled_commands#$cmd});
|
||||
done
|
||||
bundled_commands=(${bundled_commands:|UNBUNDLED_COMMANDS})
|
||||
unset UNBUNDLED_COMMANDS
|
||||
|
||||
# Add $BUNDLED_COMMANDS to the bundled_commands list
|
||||
for cmd in $BUNDLED_COMMANDS; do
|
||||
bundled_commands+=($cmd);
|
||||
done
|
||||
bundled_commands+=($BUNDLED_COMMANDS)
|
||||
unset BUNDLED_COMMANDS
|
||||
|
||||
# Check if in the root or a subdirectory of a bundled project
|
||||
_within-bundled-project() {
|
||||
|
|
@ -126,5 +124,4 @@ for cmd in $bundled_commands; do
|
|||
compdef "_$cmd" "bundled_$cmd"="$cmd"
|
||||
fi
|
||||
done
|
||||
|
||||
unset cmd bundled_commands
|
||||
|
|
|
|||
|
|
@ -1,11 +1,3 @@
|
|||
# cargo
|
||||
|
||||
This plugin adds completion for the Rust build tool [`Cargo`](https://github.com/rust-lang/cargo).
|
||||
|
||||
To use it, add `cargo` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... cargo)
|
||||
```
|
||||
|
||||
Updated on March 3rd, 2019, from [Cargo 0.34.0](https://github.com/rust-lang/cargo/releases/tag/0.34.0).
|
||||
**Deprecated: use the [`rust`](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/rust) plugin instead.**
|
||||
|
|
|
|||
|
|
@ -1,407 +0,0 @@
|
|||
#compdef cargo
|
||||
|
||||
autoload -U regexp-replace
|
||||
|
||||
_cargo() {
|
||||
local curcontext="$curcontext" ret=1
|
||||
local -a command_scope_spec common parallel features msgfmt triple target registry
|
||||
local -a state line state_descr # These are set by _arguments
|
||||
typeset -A opt_args
|
||||
|
||||
common=(
|
||||
'(-q --quiet)*'{-v,--verbose}'[use verbose output]'
|
||||
'(-q --quiet -v --verbose)'{-q,--quiet}'[no output printed to stdout]'
|
||||
'-Z+[pass unstable (nightly-only) flags to cargo]: :_cargo_unstable_flags'
|
||||
'--frozen[require that Cargo.lock and cache are up to date]'
|
||||
'--locked[require that Cargo.lock is up to date]'
|
||||
'--color=[specify colorization option]:coloring:(auto always never)'
|
||||
'(- 1 *)'{-h,--help}'[show help message]'
|
||||
)
|
||||
|
||||
# leading items in parentheses are an exclusion list for the arguments following that arg
|
||||
# See: http://zsh.sourceforge.net/Doc/Release/Completion-System.html#Completion-Functions
|
||||
# - => exclude all other options
|
||||
# 1 => exclude positional arg 1
|
||||
# * => exclude all other args
|
||||
# +blah => exclude +blah
|
||||
_arguments -s -S -C $common \
|
||||
'(- 1 *)--list[list installed commands]' \
|
||||
'(- 1 *)--explain=[provide a detailed explanation of an error message]:error code' \
|
||||
'(- 1 *)'{-V,--version}'[show version information]' \
|
||||
'(+beta +nightly)+stable[use the stable toolchain]' \
|
||||
'(+stable +nightly)+beta[use the beta toolchain]' \
|
||||
'(+stable +beta)+nightly[use the nightly toolchain]' \
|
||||
'1: :_cargo_cmds' \
|
||||
'*:: :->args'
|
||||
|
||||
# These flags are mutually exclusive specifiers for the scope of a command; as
|
||||
# they are used in multiple places without change, they are expanded into the
|
||||
# appropriate command's `_arguments` where appropriate.
|
||||
command_scope_spec=(
|
||||
'(--bin --example --test --lib)--bench=[specify benchmark name]: :_cargo_benchmark_names'
|
||||
'(--bench --bin --test --lib)--example=[specify example name]:example name'
|
||||
'(--bench --example --test --lib)--bin=[specify binary name]:binary name'
|
||||
'(--bench --bin --example --test)--lib=[specify library name]:library name'
|
||||
'(--bench --bin --example --lib)--test=[specify test name]:test name'
|
||||
)
|
||||
|
||||
parallel=(
|
||||
'(-j --jobs)'{-j+,--jobs=}'[specify number of parallel jobs]:jobs [# of CPUs]'
|
||||
)
|
||||
|
||||
features=(
|
||||
'(--all-features)--features=[specify features to activate]:feature'
|
||||
'(--features)--all-features[activate all available features]'
|
||||
"--no-default-features[don't build the default features]"
|
||||
)
|
||||
|
||||
msgfmt='--message-format=[specify error format]:error format [human]:(human json short)'
|
||||
triple='--target=[specify target triple]:target triple'
|
||||
target='--target-dir=[specify directory for all generated artifacts]:directory:_directories'
|
||||
manifest='--manifest-path=[specify path to manifest]:path:_directories'
|
||||
registry='--registry=[specify registry to use]:registry'
|
||||
|
||||
case $state in
|
||||
args)
|
||||
curcontext="${curcontext%:*}-${words[1]}:"
|
||||
case ${words[1]} in
|
||||
bench)
|
||||
_arguments -s -A "^--" $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--all-targets[benchmark all targets]' \
|
||||
"--no-run[compile but don't run]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to run benchmarks for]:package:_cargo_package_names' \
|
||||
'--exclude=[exclude packages from the benchmark]:spec' \
|
||||
'--no-fail-fast[run all benchmarks regardless of failure]' \
|
||||
'1: :_guard "^-*" "bench name"' \
|
||||
'*:args:_default'
|
||||
;;
|
||||
|
||||
build|b)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'--build-plan[output the build plan in JSON]' \
|
||||
;;
|
||||
|
||||
check|c)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--all-targets[equivalent to specifying --lib --bins --tests --benches --examples]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to check]:package:_cargo_package_names' \
|
||||
'--release[check in release mode]' \
|
||||
;;
|
||||
|
||||
clean)
|
||||
_arguments -s -S $common $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to clean]:package:_cargo_package_names' \
|
||||
'--release[clean release artifacts]' \
|
||||
'--doc[clean just the documentation directory]'
|
||||
;;
|
||||
|
||||
doc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--no-deps[do not build docs for dependencies]' \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open docs in browser after the build]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
;;
|
||||
|
||||
fetch)
|
||||
_arguments -s -S $common $triple $manifest
|
||||
;;
|
||||
|
||||
fix)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
"${command_scope_spec[@]}" \
|
||||
'--broken-code[fix code even if it already has compiler errors]' \
|
||||
'--edition[fix in preparation for the next edition]' \
|
||||
'--edition-idioms[fix warnings to migrate to the idioms of an edition]' \
|
||||
'--allow-no-vcs[fix code even if a VCS was not detected]' \
|
||||
'--allow-dirty[fix code even if the working directory is dirty]' \
|
||||
'--allow-staged[fix code even if the working directory has staged changes]'
|
||||
;;
|
||||
|
||||
generate-lockfile)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
git-checkout)
|
||||
_arguments -s -S $common \
|
||||
'--reference=:reference' \
|
||||
'--url=:url:_urls'
|
||||
;;
|
||||
|
||||
help)
|
||||
_cargo_cmds
|
||||
;;
|
||||
|
||||
init)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--edition=[specify edition to set for the crate generated]:edition:(2015 2018)' \
|
||||
'--vcs=[initialize a new repo with a given VCS]:vcs:(git hg pijul fossil none)' \
|
||||
'--name=[set the resulting package name]:name' \
|
||||
'1:path:_directories'
|
||||
;;
|
||||
|
||||
install)
|
||||
_arguments -s -S $common $parallel $features $triple $registry \
|
||||
'(-f --force)'{-f,--force}'[force overwriting of existing crates or binaries]' \
|
||||
'--bin=[only install the specified binary]:binary' \
|
||||
'--branch=[branch to use when installing from git]:branch' \
|
||||
'--debug[build in debug mode instead of release mode]' \
|
||||
'--example=[install the specified example instead of binaries]:example' \
|
||||
'--git=[specify URL from which to install the crate]:url:_urls' \
|
||||
'--path=[local filesystem path to crate to install]: :_directories' \
|
||||
'--rev=[specific commit to use when installing from git]:commit' \
|
||||
'--root=[directory to install packages into]: :_directories' \
|
||||
'--tag=[tag to use when installing from git]:tag' \
|
||||
'--vers=[version to install from crates.io]:version' \
|
||||
'--list[list all installed packages and their versions]' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
locate-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
login)
|
||||
_arguments -s -S $common $registry \
|
||||
'*: :_guard "^-*" "token"'
|
||||
;;
|
||||
|
||||
metadata)
|
||||
_arguments -s -S $common $features $manifest \
|
||||
"--no-deps[output information only about the root package and don't fetch dependencies]" \
|
||||
'--format-version=[specify format version]:version [1]:(1)'
|
||||
;;
|
||||
|
||||
new)
|
||||
_arguments -s -S $common $registry \
|
||||
'--lib[use library template]' \
|
||||
'--vcs:initialize a new repo with a given VCS:(git hg none)' \
|
||||
'--name=[set the resulting package name]'
|
||||
;;
|
||||
|
||||
owner)
|
||||
_arguments -s -S $common $registry \
|
||||
'(-a --add)'{-a,--add}'[specify name of a user or team to invite as an owner]:name' \
|
||||
'--index=[specify registry index]:index' \
|
||||
'(-l --list)'{-l,--list}'[list owners of a crate]' \
|
||||
'(-r --remove)'{-r,--remove}'[specify name of a user or team to remove as an owner]:name' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
|
||||
package)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest \
|
||||
'(-l --list)'{-l,--list}'[print files included in a package without making one]' \
|
||||
'--no-metadata[ignore warnings about a lack of human-usable metadata]' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't build to verify contents]"
|
||||
;;
|
||||
|
||||
pkgid)
|
||||
_arguments -s -S $common $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to get ID specifier for]:package:_cargo_package_names' \
|
||||
'*: :_guard "^-*" "spec"'
|
||||
;;
|
||||
|
||||
publish)
|
||||
_arguments -s -S $common $parallel $features $triple $target $manifest $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--allow-dirty[allow dirty working directories to be packaged]' \
|
||||
"--no-verify[don't verify the contents by building them]" \
|
||||
'--token=[specify token to use when uploading]:token' \
|
||||
'--dry-run[perform all checks without uploading]'
|
||||
;;
|
||||
|
||||
read-manifest)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
run|r)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--example=[name of the bin target]:name' \
|
||||
'--bin=[name of the bin target]:name' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package with the target to run]:package:_cargo_package_names' \
|
||||
'--release[build in release mode]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
rustc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to build]:package:_cargo_package_names' \
|
||||
'--profile=[specify profile to build the selected target for]:profile' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustc rustc -default-'
|
||||
;;
|
||||
|
||||
rustdoc)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--document-private-items[include non-public items in the documentation]' \
|
||||
'--open[open the docs in a browser after the operation]' \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to document]:package:_cargo_package_names' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
"${command_scope_spec[@]}" \
|
||||
'*: : _dispatch rustdoc rustdoc -default-'
|
||||
;;
|
||||
|
||||
search)
|
||||
_arguments -s -S $common $registry \
|
||||
'--index=[specify registry index]:index' \
|
||||
'--limit=[limit the number of results]:results [10]' \
|
||||
'*: :_guard "^-*" "query"'
|
||||
;;
|
||||
|
||||
test|t)
|
||||
_arguments -s -S $common $parallel $features $msgfmt $triple $target $manifest \
|
||||
'--test=[test name]: :_cargo_test_names' \
|
||||
'--no-fail-fast[run all tests regardless of failure]' \
|
||||
'--no-run[compile but do not run]' \
|
||||
'(-p --package)'{-p+,--package=}'[package to run tests for]:package:_cargo_package_names' \
|
||||
'--all[test all packages in the workspace]' \
|
||||
'--release[build artifacts in release mode, with optimizations]' \
|
||||
'1: :_cargo_test_names' \
|
||||
'(--doc --bin --example --test --bench)--lib[only test library]' \
|
||||
'(--lib --bin --example --test --bench)--doc[only test documentation]' \
|
||||
'(--lib --doc --example --test --bench)--bin=[binary name]' \
|
||||
'(--lib --doc --bin --test --bench)--example=[example name]' \
|
||||
'(--lib --doc --bin --example --bench)--test=[test name]' \
|
||||
'(--lib --doc --bin --example --test)--bench=[benchmark name]' \
|
||||
'*: :_default'
|
||||
;;
|
||||
|
||||
uninstall)
|
||||
_arguments -s -S $common \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to uninstall]:package:_cargo_package_names' \
|
||||
'--bin=[only uninstall the specified binary]:name' \
|
||||
'--root=[directory to uninstall packages from]: :_files -/' \
|
||||
'*:crate:_cargo_installed_crates -F line'
|
||||
;;
|
||||
|
||||
update)
|
||||
_arguments -s -S $common $manifest \
|
||||
'--aggressive=[force dependency update]' \
|
||||
"--dry-run[don't actually write the lockfile]" \
|
||||
'(-p --package)'{-p+,--package=}'[specify package to update]:package:_cargo_package_names' \
|
||||
'--precise=[update single dependency to precise release]:release'
|
||||
;;
|
||||
|
||||
verify-project)
|
||||
_arguments -s -S $common $manifest
|
||||
;;
|
||||
|
||||
version)
|
||||
_arguments -s -S $common
|
||||
;;
|
||||
|
||||
yank)
|
||||
_arguments -s -S $common $registry \
|
||||
'--vers=[specify yank version]:version' \
|
||||
'--undo[undo a yank, putting a version back into the index]' \
|
||||
'--index=[specify registry index to yank from]:registry index' \
|
||||
'--token=[specify API token to use when authenticating]:token' \
|
||||
'*: :_guard "^-*" "crate"'
|
||||
;;
|
||||
*)
|
||||
# allow plugins to define their own functions
|
||||
if ! _call_function ret _cargo-${words[1]}; then
|
||||
# fallback on default completion for unknown commands
|
||||
_default && ret=0
|
||||
fi
|
||||
(( ! ret ))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
_cargo_unstable_flags() {
|
||||
local flags
|
||||
flags=( help ${${${(M)${(f)"$(_call_program flags cargo -Z help)"}:#*--*}/ #-- #/:}##*-Z } )
|
||||
_describe -t flags 'unstable flag' flags
|
||||
}
|
||||
|
||||
_cargo_installed_crates() {
|
||||
local expl
|
||||
_description crates expl 'crate'
|
||||
compadd "$@" "$expl[@]" - ${${${(f)"$(cargo install --list)"}:# *}%% *}
|
||||
}
|
||||
|
||||
_cargo_cmds() {
|
||||
local -a commands
|
||||
# This uses Parameter Expansion Flags, which are a built-in Zsh feature.
|
||||
# See more: http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion-Flags
|
||||
# and http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion
|
||||
#
|
||||
# # How this work?
|
||||
#
|
||||
# First it splits the result of `cargo --list` at newline, then it removes the first line.
|
||||
# Then it removes indentation (4 whitespaces) before each items. (Note the x## pattern [1]).
|
||||
# Then it replaces those spaces between item and description with a `:`
|
||||
#
|
||||
# [1]: https://github.com/zsh-users/zsh-completions/blob/master/zsh-completions-howto.org#patterns
|
||||
commands=( ${${${(M)"${(f)$(_call_program commands cargo --list)}":# *}/ ##/}/ ##/:} )
|
||||
_describe -t commands 'command' commands
|
||||
}
|
||||
|
||||
|
||||
#FIXME: Disabled until fixed
|
||||
#gets package names from the manifest file
|
||||
_cargo_package_names() {
|
||||
_message -e packages package
|
||||
}
|
||||
|
||||
# Extracts the values of "name" from the array given in $1 and shows them as
|
||||
# command line options for completion
|
||||
_cargo_names_from_array() {
|
||||
# strip json from the path
|
||||
local manifest=${${${"$(cargo locate-project)"}%\"\}}##*\"}
|
||||
if [[ -z $manifest ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
local last_line
|
||||
local -a names;
|
||||
local in_block=false
|
||||
local block_name=$1
|
||||
names=()
|
||||
while read -r line; do
|
||||
if [[ $last_line == "[[$block_name]]" ]]; then
|
||||
in_block=true
|
||||
else
|
||||
if [[ $last_line =~ '\s*\[\[.*' ]]; then
|
||||
in_block=false
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $in_block == true ]]; then
|
||||
if [[ $line =~ '\s*name\s*=' ]]; then
|
||||
regexp-replace line '^\s*name\s*=\s*|"' ''
|
||||
names+=( "$line" )
|
||||
fi
|
||||
fi
|
||||
|
||||
last_line=$line
|
||||
done < "$manifest"
|
||||
_describe "$block_name" names
|
||||
|
||||
}
|
||||
|
||||
#Gets the test names from the manifest file
|
||||
_cargo_test_names() {
|
||||
_cargo_names_from_array "test"
|
||||
}
|
||||
|
||||
#Gets the bench names from the manifest file
|
||||
_cargo_benchmark_names() {
|
||||
_cargo_names_from_array "bench"
|
||||
}
|
||||
|
||||
_cargo
|
||||
7
plugins/cargo/cargo.plugin.zsh
Normal file
7
plugins/cargo/cargo.plugin.zsh
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
print ${(%):-'%F{yellow}The `cargo` plugin is deprecated and has been moved to the `rust` plugin.'}
|
||||
print ${(%):-'Please update your .zshrc to use the `%Brust%b` plugin instead.%f'}
|
||||
|
||||
(( ${fpath[(Ie)$ZSH/plugins/rust]} )) || {
|
||||
fpath=("$ZSH/plugins/rust" $fpath)
|
||||
source "$ZSH/plugins/rust/rust.plugin.zsh"
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# catimg script by Eduardo San Martin Morote aka Posva #
|
||||
# https://posva.net #
|
||||
# #
|
||||
# Ouput the content of an image to the stdout using the 256 colors of the #
|
||||
# Output the content of an image to the stdout using the 256 colors of the #
|
||||
# terminal. #
|
||||
# GitHub: https://github.com/posva/catimg #
|
||||
################################################################################
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
# catimg script by Eduardo San Martin Morote aka Posva #
|
||||
# https://posva.net #
|
||||
# #
|
||||
# Ouput the content of an image to the stdout using the 256 colors of the #
|
||||
# Output the content of an image to the stdout using the 256 colors of the #
|
||||
# terminal. #
|
||||
# GitHub: https://github.com/posva/catimg #
|
||||
################################################################################
|
||||
|
|
|
|||
9
plugins/charm/README.md
Normal file
9
plugins/charm/README.md
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
# Charm plugin
|
||||
|
||||
This plugin adds completion for the [charm](https://github.com/charmbracelet/charm) CLI.
|
||||
|
||||
To use it, add `charm` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... charm)
|
||||
```
|
||||
14
plugins/charm/charm.plugin.zsh
Normal file
14
plugins/charm/charm.plugin.zsh
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Autocompletion for the Charm CLI (charm).
|
||||
if (( ! $+commands[charm] )); then
|
||||
return
|
||||
fi
|
||||
|
||||
# If the completion file doesn't exist yet, we need to autoload it and
|
||||
# bind it to `charm`. Otherwise, compinit will have already done that.
|
||||
if [[ ! -f "$ZSH_CACHE_DIR/completions/_charm" ]]; then
|
||||
typeset -g -A _comps
|
||||
autoload -Uz _charm
|
||||
_comps[charm]=_charm
|
||||
fi
|
||||
|
||||
charm completion zsh >| "$ZSH_CACHE_DIR/completions/_charm" &|
|
||||
|
|
@ -5,6 +5,7 @@ current Ruby version, and completion and a prompt function to display the Ruby v
|
|||
Supports brew and manual installation of chruby.
|
||||
|
||||
To use it, add `chruby` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... chruby)
|
||||
```
|
||||
|
|
@ -14,7 +15,7 @@ plugins=(... chruby)
|
|||
If you'd prefer to specify an explicit path to load chruby from
|
||||
you can set variables like so:
|
||||
|
||||
```
|
||||
```zsh
|
||||
zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
|
||||
zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,121 +1,94 @@
|
|||
#
|
||||
# INSTRUCTIONS
|
||||
#
|
||||
# With either a manual or brew installed chruby things should just work.
|
||||
#
|
||||
# If you'd prefer to specify an explicit path to load chruby from
|
||||
# you can set variables like so:
|
||||
#
|
||||
# zstyle :omz:plugins:chruby path /local/path/to/chruby.sh
|
||||
# zstyle :omz:plugins:chruby auto /local/path/to/auto.sh
|
||||
#
|
||||
# TODO
|
||||
# - autodetermine correct source path on non OS X systems
|
||||
# - completion if ruby-install exists
|
||||
## load chruby from different locations
|
||||
|
||||
_source-from-omz-settings() {
|
||||
local _chruby_path _chruby_auto
|
||||
|
||||
zstyle -s :omz:plugins:chruby path _chruby_path || return 1
|
||||
zstyle -s :omz:plugins:chruby auto _chruby_auto || return 1
|
||||
|
||||
if [[ -r ${_chruby_path} ]]; then
|
||||
source ${_chruby_path}
|
||||
fi
|
||||
|
||||
if [[ -r ${_chruby_auto} ]]; then
|
||||
source ${_chruby_auto}
|
||||
fi
|
||||
}
|
||||
|
||||
_source-from-homebrew() {
|
||||
(( $+commands[brew] )) || return 1
|
||||
|
||||
local _brew_prefix
|
||||
# check default brew prefix
|
||||
if [[ -h /usr/local/opt/chruby ]];then
|
||||
_brew_prefix="/usr/local/opt/chruby"
|
||||
else
|
||||
# ok , it is not default prefix
|
||||
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
|
||||
_brew_prefix=$(brew --prefix chruby)
|
||||
fi
|
||||
|
||||
[[ -r "$_brew_prefix" ]] || return 1
|
||||
|
||||
source $_brew_prefix/share/chruby/chruby.sh
|
||||
source $_brew_prefix/share/chruby/auto.sh
|
||||
}
|
||||
|
||||
_load-chruby-dirs() {
|
||||
local dir
|
||||
for dir in "$HOME/.rubies" "$PREFIX/opt/rubies"; do
|
||||
if [[ -d "$dir" ]]; then
|
||||
RUBIES+=("$dir")
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Load chruby
|
||||
if _source-from-omz-settings; then
|
||||
_load-chruby-dirs
|
||||
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
|
||||
source /usr/local/share/chruby/chruby.sh
|
||||
source /usr/local/share/chruby/auto.sh
|
||||
_load-chruby-dirs
|
||||
elif _source-from-homebrew; then
|
||||
_load-chruby-dirs
|
||||
fi
|
||||
|
||||
unfunction _source-from-homebrew _source-from-omz-settings _load-chruby-dirs
|
||||
|
||||
|
||||
## chruby utility functions and aliases
|
||||
|
||||
# rvm and rbenv plugins also provide this alias
|
||||
alias rubies='chruby'
|
||||
|
||||
|
||||
_homebrew-installed() {
|
||||
whence brew &> /dev/null
|
||||
_xit=$?
|
||||
if [ $_xit -eq 0 ];then
|
||||
# ok , we have brew installed
|
||||
# speculatively we check default brew prefix
|
||||
if [ -h /usr/local/opt/chruby ];then
|
||||
_brew_prefix="/usr/local/opt/chruby"
|
||||
else
|
||||
# ok , it is not default prefix
|
||||
# this call to brew is expensive ( about 400 ms ), so at least let's make it only once
|
||||
_brew_prefix=$(brew --prefix chruby)
|
||||
fi
|
||||
return 0
|
||||
else
|
||||
return $_xit
|
||||
fi
|
||||
}
|
||||
|
||||
_chruby-from-homebrew-installed() {
|
||||
[ -r _brew_prefix ] &> /dev/null
|
||||
}
|
||||
|
||||
_ruby-build_installed() {
|
||||
whence ruby-build &> /dev/null
|
||||
}
|
||||
|
||||
_ruby-install-installed() {
|
||||
whence ruby-install &> /dev/null
|
||||
}
|
||||
|
||||
# Simple definition completer for ruby-build
|
||||
if _ruby-build_installed; then
|
||||
_ruby-build() { compadd $(ruby-build --definitions) }
|
||||
compdef _ruby-build ruby-build
|
||||
fi
|
||||
|
||||
_source_from_omz_settings() {
|
||||
local _chruby_path
|
||||
local _chruby_auto
|
||||
|
||||
zstyle -s :omz:plugins:chruby path _chruby_path
|
||||
zstyle -s :omz:plugins:chruby auto _chruby_auto
|
||||
|
||||
if [[ -r ${_chruby_path} ]]; then
|
||||
source ${_chruby_path}
|
||||
fi
|
||||
|
||||
if [[ -r ${_chruby_auto} ]]; then
|
||||
source ${_chruby_auto}
|
||||
fi
|
||||
}
|
||||
|
||||
_chruby_dirs() {
|
||||
chrubydirs=($HOME/.rubies/ $PREFIX/opt/rubies)
|
||||
for dir in chrubydirs; do
|
||||
if [[ -d $dir ]]; then
|
||||
RUBIES+=$dir
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
if _homebrew-installed && _chruby-from-homebrew-installed ; then
|
||||
source $_brew_prefix/share/chruby/chruby.sh
|
||||
source $_brew_prefix/share/chruby/auto.sh
|
||||
_chruby_dirs
|
||||
elif [[ -r "/usr/local/share/chruby/chruby.sh" ]] ; then
|
||||
source /usr/local/share/chruby/chruby.sh
|
||||
source /usr/local/share/chruby/auto.sh
|
||||
_chruby_dirs
|
||||
else
|
||||
_source_from_omz_settings
|
||||
_chruby_dirs
|
||||
fi
|
||||
|
||||
function ensure_chruby() {
|
||||
$(whence chruby)
|
||||
}
|
||||
|
||||
function current_ruby() {
|
||||
local _ruby
|
||||
_ruby="$(chruby |grep \* |tr -d '* ')"
|
||||
if [[ $(chruby |grep -c \*) -eq 1 ]]; then
|
||||
echo ${_ruby}
|
||||
else
|
||||
echo "system"
|
||||
fi
|
||||
local ruby
|
||||
ruby="$(chruby | grep \* | tr -d '* ')"
|
||||
if [[ $(chruby | grep -c \*) -eq 1 ]]; then
|
||||
echo ${ruby}
|
||||
else
|
||||
echo "system"
|
||||
fi
|
||||
}
|
||||
|
||||
function chruby_prompt_info() {
|
||||
echo "$(current_ruby)"
|
||||
echo "${$(current_ruby):gs/%/%%}"
|
||||
}
|
||||
|
||||
# complete on installed rubies
|
||||
# Complete chruby command with installed rubies
|
||||
_chruby() {
|
||||
compadd $(chruby | tr -d '* ')
|
||||
local default_path='/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin'
|
||||
if PATH=${default_path} type ruby &> /dev/null; then
|
||||
compadd system
|
||||
fi
|
||||
compadd $(chruby | tr -d '* ')
|
||||
if PATH="/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin" command ruby &>/dev/null; then
|
||||
compadd system
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _chruby chruby
|
||||
|
||||
|
||||
# Simple definition completer for ruby-build
|
||||
if command ruby-build &> /dev/null; then
|
||||
_ruby-build() { compadd $(ruby-build --definitions) }
|
||||
compdef _ruby-build ruby-build
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
# chucknorris
|
||||
|
||||
Chuck Norris fortunes plugin for oh-my-zsh. Perfectly suitable as MOTD.
|
||||
|
||||
**Maintainers**: [apjanke](https://github.com/apjanke) [maff](https://github.com/maff)
|
||||
Chuck Norris fortunes plugin for Oh My Zsh. Perfectly suitable as MOTD.
|
||||
|
||||
To use it add `chucknorris` to the plugins array in you zshrc file.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,28 +1,24 @@
|
|||
# chucknorris: Chuck Norris fortunes
|
||||
|
||||
# Automatically generate or update Chuck's compiled fortune data file
|
||||
# $0 must be used outside a local function. This variable name is unlikly to collide.
|
||||
CHUCKNORRIS_PLUGIN_DIR=${0:h}
|
||||
|
||||
() {
|
||||
local DIR=$CHUCKNORRIS_PLUGIN_DIR/fortunes
|
||||
if [[ ! -f $DIR/chucknorris.dat ]] || [[ $DIR/chucknorris.dat -ot $DIR/chucknorris ]]; then
|
||||
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
|
||||
local strfile=strfile
|
||||
if ! which strfile &>/dev/null && [[ -f /usr/sbin/strfile ]]; then
|
||||
strfile=/usr/sbin/strfile
|
||||
# %x: name of file containing code being executed
|
||||
local fortunes_dir="${${(%):-%x}:h}/fortunes"
|
||||
|
||||
# Aliases
|
||||
alias chuck="fortune -a $fortunes_dir"
|
||||
alias chuck_cow="chuck | cowthink"
|
||||
|
||||
# Automatically generate or update Chuck's compiled fortune data file
|
||||
if [[ "$fortunes_dir/chucknorris" -ot "$fortunes_dir/chucknorris.dat" ]]; then
|
||||
return
|
||||
fi
|
||||
if which $strfile &> /dev/null; then
|
||||
$strfile $DIR/chucknorris $DIR/chucknorris.dat >/dev/null
|
||||
else
|
||||
|
||||
# For some reason, Cygwin puts strfile in /usr/sbin, which is not on the path by default
|
||||
local strfile="${commands[strfile]:-/usr/sbin/strfile}"
|
||||
if [[ ! -x "$strfile" ]]; then
|
||||
echo "[oh-my-zsh] chucknorris depends on strfile, which is not installed" >&2
|
||||
echo "[oh-my-zsh] strfile is often provided as part of the 'fortune' package" >&2
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
# Aliases
|
||||
alias chuck="fortune -a $DIR"
|
||||
alias chuck_cow="chuck | cowthink"
|
||||
# Generate the compiled fortune data file
|
||||
$strfile "$fortunes_dir/chucknorris" "$fortunes_dir/chucknorris.dat" >/dev/null
|
||||
}
|
||||
|
||||
unset CHUCKNORRIS_PLUGIN_DIR
|
||||
|
|
|
|||
|
|
@ -228,7 +228,7 @@ Chuck Norris once punched the ground to stop an earthquake. The resulting afters
|
|||
%
|
||||
Chuck Norris once round-house kicked a salesman. Over the phone.
|
||||
%
|
||||
Chuck Norris once rounhouse kicked a football. The astronomical society now considers it a planet.
|
||||
Chuck Norris once roundhouse kicked a football. The astronomical society now considers it a planet.
|
||||
%
|
||||
Chuck Norris once thought he was wrong. He was, however, mistaken.
|
||||
%
|
||||
|
|
@ -342,7 +342,7 @@ Every time there's an earthquake, you know Chuck Norris is hungry. The earthquak
|
|||
%
|
||||
Evolution's driving mechanism is nature's desperate attempt to escape Chuck Norris.
|
||||
%
|
||||
Fear of spiders is arachnaphobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
|
||||
Fear of spiders is arachnophobia. Fear of tight spaces is claustrophobia. Fear of Chuck Norris is called Logic.
|
||||
%
|
||||
Fool me once, shame on you. Fool Chuck Norris once and he will roundhouse you in the face.
|
||||
%
|
||||
|
|
@ -426,7 +426,7 @@ Some people ask for a Kleenex when they sneeze, Chuck Norris asks for a body bag
|
|||
%
|
||||
Someone once videotaped Chuck Norris getting pissed off. It was called Walker: Texas Chain Saw Massacre.
|
||||
%
|
||||
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindess, and possibly foot sized brusies on the face.
|
||||
Staring at Chuck Norris for extended periods of time without proper eye protection will cause blindness, and possibly foot sized bruises on the face.
|
||||
%
|
||||
Taking Karate Lessons = $100, Buying MMA DVD's = $150, Subscribing to a UFC event = $50, Getting a Roundhouse Kick from Chuck Norris = PRICELESS.
|
||||
%
|
||||
|
|
@ -452,7 +452,7 @@ The best part of waking up is not Folgers in your cup. it's knowing that Chuck N
|
|||
%
|
||||
The chief export of Chuck Norris is pain.
|
||||
%
|
||||
The dictionary references Chuck Norris several times, he is metioned under Fear, Law, Order and Chucktatorship.
|
||||
The dictionary references Chuck Norris several times, he is mentioned under Fear, Law, Order and Chucktatorship.
|
||||
%
|
||||
The leading causes of death in the United States are: 1. Heart Disease 2. Chuck Norris 3. Cancer.
|
||||
%
|
||||
|
|
@ -468,7 +468,7 @@ The only way sharks will come near CN underwater is when CN is inside of a cage.
|
|||
%
|
||||
The only word that rhymes with orange is Chuck Norris.
|
||||
%
|
||||
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequal "The Last Skull Bender".
|
||||
The producers of the movie "The Last Airbender" are now in talks with Chuck Norris in Order to star him in their next sequel "The Last Skull Bender".
|
||||
%
|
||||
The quickest way to a man's heart is with Chuck Norris' fist.
|
||||
%
|
||||
|
|
@ -558,3 +558,11 @@ You know Chuck Norris' pet lizard, right? Last I heard, he was in the movie "God
|
|||
%
|
||||
http://chucknorrisfacts.com/ is built in Drupal because Chuck Norris knows a good CMS when he sees one.
|
||||
%
|
||||
Chuck Norris made the first Giraffe by uppercutting a horse.
|
||||
%
|
||||
Chuck Norris can hear sign language.
|
||||
%
|
||||
Chuck Norris make onions cry.
|
||||
%
|
||||
Chuck Norris doesn't shake hands, he makes them tremble.
|
||||
%
|
||||
|
|
|
|||
|
|
@ -1,26 +0,0 @@
|
|||
# CloudApp plugin
|
||||
|
||||
## The CloudApp API is deprecated, so the plugin will be removed shortly
|
||||
|
||||
[CloudApp](https://www.getcloudapp.com) brings screen recording, screenshots, and GIF creation to the cloud, in an easy-to-use enterprise-level app. The CloudApp plugin allows you to upload a file to your CloadApp account from the command line.
|
||||
|
||||
To use it, add `cloudapp` to the plugins array of your `~/.zshrc` file:
|
||||
|
||||
```zsh
|
||||
plugins=(... cloudapp)
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
1. [Aaron Russell's `cloudapp_api` gem](https://github.com/aaronrussell/cloudapp_api#installation)
|
||||
|
||||
2. That you set your CloudApp credentials in `~/.cloudapp` as a simple text file like below:
|
||||
```
|
||||
email
|
||||
password
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- `cloudapp <filename>`: uploads `<filename>` to your CloudApp account, and if you're using
|
||||
macOS, copies the URL to your clipboard.
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
print -Pn "%F{yellow}"
|
||||
print "[oh-my-zsh] The CloudApp API no longer works, so the cloudapp plugin will"
|
||||
print "[oh-my-zsh] be removed shortly. Please remove it from your plugins list."
|
||||
print -Pn "%f"
|
||||
|
|
@ -39,14 +39,14 @@
|
|||
#
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
local curcontext="$curcontext" state line ret=1 version opts first second third
|
||||
local curcontext="$curcontext" state line ret=1 version
|
||||
local -a opts
|
||||
typeset -A opt_args
|
||||
version=(${(f)"$(_call_program version $words[1] --version)"})
|
||||
version=(${(f)"$(_call_program version $words[1] --version)"}) || return ret
|
||||
version=${${(z)${version[1]}}[3]}
|
||||
first=$(echo $version|cut -d '.' -f 1)
|
||||
second=$(echo $version|cut -d '.' -f 2)
|
||||
third=$(echo $version|cut -d '.' -f 3)
|
||||
if (( $first < 2 )) && (( $second < 7 )) && (( $third < 3 ));then
|
||||
|
||||
autoload -Uz is-at-least
|
||||
if ! is-at-least 1.6.3 "$version"; then
|
||||
opts+=('(-l --lint)'{-l,--lint}'[pipe the compiled JavaScript through JavaScript Lint]'
|
||||
'(-r --require)'{-r,--require}'[require a library before executing your script]:library')
|
||||
fi
|
||||
|
|
|
|||
1
plugins/colemak/.gitignore
vendored
Normal file
1
plugins/colemak/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
.less
|
||||
|
|
@ -19,4 +19,20 @@ bindkey -a 'N' vi-join
|
|||
bindkey -a 'j' vi-forward-word-end
|
||||
bindkey -a 'J' vi-forward-blank-word-end
|
||||
|
||||
lesskey $ZSH/plugins/colemak/colemak-less
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# New less versions will read this file directly
|
||||
export LESSKEYIN="${0:h:A}/colemak-less"
|
||||
|
||||
# Only run lesskey if less version is older than v582
|
||||
less_ver=$(less --version | awk '{print $2;exit}')
|
||||
autoload -Uz is-at-least
|
||||
if ! is-at-least 582 $less_ver; then
|
||||
# Old less versions will read this transformed file
|
||||
export LESSKEY="${0:h:A}/.less"
|
||||
lesskey -o "$LESSKEY" "$LESSKEYIN" 2>/dev/null
|
||||
fi
|
||||
unset less_ver
|
||||
|
|
|
|||
|
|
@ -16,8 +16,13 @@ less_termcap[se]="${reset_color}"
|
|||
less_termcap[us]="${fg_bold[green]}"
|
||||
less_termcap[ue]="${reset_color}"
|
||||
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# Absolute path to this file's directory.
|
||||
typeset __colored_man_pages_dir="${0:A:h}"
|
||||
typeset -g __colored_man_pages_dir="${0:A:h}"
|
||||
|
||||
function colored() {
|
||||
local -a environment
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ colorize_less() {
|
|||
# This variable tells less to pipe every file through the specified command
|
||||
# (see the man page of less INPUT PREPROCESSOR).
|
||||
# 'zsh -ic "colorize_cat %s 2> /dev/null"' would not work for huge files like
|
||||
# the ~/.zsh_history. For such files the tty of the preprocessor will be supended.
|
||||
# the ~/.zsh_history. For such files the tty of the preprocessor will be suspended.
|
||||
# Therefore we must source this file to make colorize_cat available in the
|
||||
# preprocessor without the interactive mode.
|
||||
# `2>/dev/null` will suppress the error for large files 'broken pipe' of the python
|
||||
|
|
|
|||
|
|
@ -29,5 +29,6 @@ It works out of the box with the command-not-found packages for:
|
|||
- [Fedora](https://fedoraproject.org/wiki/Features/PackageKitCommandNotFound)
|
||||
- [NixOS](https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found)
|
||||
- [Termux](https://github.com/termux/command-not-found)
|
||||
- [SUSE](https://www.unix.com/man-page/suse/1/command-not-found/)
|
||||
|
||||
You can add support for other platforms by submitting a Pull Request.
|
||||
|
|
|
|||
|
|
@ -50,13 +50,20 @@ fi
|
|||
# NixOS: https://github.com/NixOS/nixpkgs/tree/master/nixos/modules/programs/command-not-found
|
||||
if [[ -x /run/current-system/sw/bin/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/run/current-system/sw/bin/command-not-found -- "$@"
|
||||
/run/current-system/sw/bin/command-not-found "$@"
|
||||
}
|
||||
fi
|
||||
|
||||
# Termux: https://github.com/termux/command-not-found
|
||||
if [[ -x /data/data/com.termux/files/usr/libexec/termux/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/data/data/com.termux/files/usr/libexec/termux/command-not-found -- "$1"
|
||||
/data/data/com.termux/files/usr/libexec/termux/command-not-found "$1"
|
||||
}
|
||||
fi
|
||||
|
||||
# SUSE and derivates: https://www.unix.com/man-page/suse/1/command-not-found/
|
||||
if [[ -x /usr/bin/command-not-found ]]; then
|
||||
command_not_found_handler() {
|
||||
/usr/bin/command-not-found "$1"
|
||||
}
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -12,51 +12,53 @@ plugins=(... common-aliases)
|
|||
|
||||
### ls command
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|---------------|--------------------------------------------------------------------------------|
|
||||
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
|
||||
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
|
||||
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
|
||||
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
|
||||
| ll | `ls -l` | List files as a long list |
|
||||
| ldot | `ls -ld .*` | List dot files as a long list |
|
||||
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
|
||||
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
|
||||
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
|
||||
| Alias | Command | Description |
|
||||
| ----- | ------------ | --------------------------------------------------------------------------- |
|
||||
| l | `ls -lFh` | List files as a long list, show size, type, human-readable |
|
||||
| la | `ls -lAFh` | List almost all files as a long list show size, type, human-readable |
|
||||
| lr | `ls -tRFh` | List files recursively sorted by date, show type, human-readable |
|
||||
| lt | `ls -ltFh` | List files as a long list sorted by date, show type, human-readable |
|
||||
| ll | `ls -l` | List files as a long list |
|
||||
| ldot | `ls -ld .*` | List dot files as a long list |
|
||||
| lS | `ls -1FSsh` | List files showing only size and name sorted by size |
|
||||
| lart | `ls -1Fcart` | List all files sorted in reverse of create/modification time (oldest first) |
|
||||
| lrt | `ls -1Fcrt` | List files sorted in reverse of create/modification time(oldest first) |
|
||||
| lsr | `ls -lARFh` | List all files and directories recursively |
|
||||
| lsn | `ls -1` | List files and directories in a single column |
|
||||
|
||||
### File handling
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------|------------------------------------------------------------------------------------|
|
||||
| rm | `rm -i` | Remove a file |
|
||||
| cp | `cp -i` | Copy a file |
|
||||
| mv | `mv -i` | Move a file |
|
||||
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
|
||||
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
|
||||
| duf | `du -sh` | Display the size of files in current location in human-readable form |
|
||||
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
|
||||
| Alias | Command | Description |
|
||||
| ----- | --------------------- | ------------------------------------------------------------------------------- |
|
||||
| rm | `rm -i` | Remove a file |
|
||||
| cp | `cp -i` | Copy a file |
|
||||
| mv | `mv -i` | Move a file |
|
||||
| zshrc | `${=EDITOR} ~/.zshrc` | Quickly access the ~/.zshrc file |
|
||||
| dud | `du -d 1 -h` | Display the size of files at depth 1 in current location in human-readable form |
|
||||
| duf | `du -sh` | Display the size of files in current location in human-readable form |
|
||||
| t | `tail -f` | Shorthand for tail which outputs the last part of a file |
|
||||
|
||||
### find and grep
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------------------------------------|-----------------------------------------|
|
||||
| fd\* | `find . -type d -name` | Find a directory with the given name |
|
||||
| ff | `find . -type f -name` | Find a file with the given name |
|
||||
| grep | `grep --color` | Searches for a query string |
|
||||
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
|
||||
| Alias | Command | Description |
|
||||
| ----- | -------------------------------------------------- | ------------------------------------ |
|
||||
| fd\* | `find . -type d -name` | Find a directory with the given name |
|
||||
| ff | `find . -type f -name` | Find a file with the given name |
|
||||
| grep | `grep --color` | Searches for a query string |
|
||||
| sgrep | `grep -R -n -H -C 5 --exclude-dir={.git,.svn,CVS}` | Useful for searching within files |
|
||||
|
||||
\* Only if the [`fd`](https://github.com/sharkdp/fd) command isn't installed.
|
||||
|
||||
### Other Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-----------|---------------------|-------------------------------------------------------------|
|
||||
| h | `history` | Lists all recently used commands |
|
||||
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
|
||||
| help | `man` | Opens up the man page for a command |
|
||||
| p | `ps -f` | Displays currently executing processes |
|
||||
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
|
||||
| unexport | `unset` | Used to unset an environment variable |
|
||||
| Alias | Command | Description |
|
||||
| -------- | ------------------ | ----------------------------------------------------------- |
|
||||
| h | `history` | Lists all recently used commands |
|
||||
| hgrep | `fc -El 0 \| grep` | Searches for a word in the list of previously used commands |
|
||||
| help | `man` | Opens up the man page for a command |
|
||||
| p | `ps -f` | Displays currently executing processes |
|
||||
| sortnr | `sort -n -r` | Used to sort the lines of a text file |
|
||||
| unexport | `unset` | Used to unset an environment variable |
|
||||
|
||||
## Global aliases
|
||||
|
||||
|
|
@ -77,7 +79,7 @@ $ find . -type f 2>/dev/null
|
|||
```
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-----------------------------|-------------------------------------------------------------|
|
||||
| ----- | --------------------------- | ----------------------------------------------------------- |
|
||||
| H | `\| head` | Pipes output to head which outputs the first part of a file |
|
||||
| T | `\| tail` | Pipes output to tail which outputs the last part of a file |
|
||||
| G | `\| grep` | Pipes output to grep to search for some word |
|
||||
|
|
@ -97,23 +99,23 @@ that file will be open with `acroread`.
|
|||
|
||||
### Reading Docs
|
||||
|
||||
| Alias | Command | Description |
|
||||
|-------|-------------|-------------------------------------|
|
||||
| pdf | `acroread` | Opens up a document using acroread |
|
||||
| ps | `gv` | Opens up a .ps file using gv |
|
||||
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
|
||||
| chm | `xchm` | Opens up a .chm file using xchm |
|
||||
| djvu | `djview` | Opens up a .djvu file using djview |
|
||||
| Alias | Command | Description |
|
||||
| ----- | ---------- | ---------------------------------- |
|
||||
| pdf | `acroread` | Opens up a document using acroread |
|
||||
| ps | `gv` | Opens up a .ps file using gv |
|
||||
| dvi | `xdvi` | Opens up a .dvi file using xdvi |
|
||||
| chm | `xchm` | Opens up a .chm file using xchm |
|
||||
| djvu | `djview` | Opens up a .djvu file using djview |
|
||||
|
||||
### Listing files inside a packed file
|
||||
|
||||
| Alias | Command | Description |
|
||||
|---------|-------------|-------------------------------------|
|
||||
| zip | `unzip -l` | Lists files inside a .zip file |
|
||||
| rar | `unrar l` | Lists files inside a .rar file |
|
||||
| tar | `tar tf` | Lists files inside a .tar file |
|
||||
| tar.gz | `echo` | Lists files inside a .tar.gz file |
|
||||
| ace | `unace l` | Lists files inside a .ace file |
|
||||
| Alias | Command | Description |
|
||||
| ------ | ---------- | --------------------------------- |
|
||||
| zip | `unzip -l` | Lists files inside a .zip file |
|
||||
| rar | `unrar l` | Lists files inside a .rar file |
|
||||
| tar | `tar tf` | Lists files inside a .tar file |
|
||||
| tar.gz | `echo` | Lists files inside a .tar.gz file |
|
||||
| ace | `unace l` | Lists files inside a .ace file |
|
||||
|
||||
### Some other features
|
||||
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ alias ldot='ls -ld .*'
|
|||
alias lS='ls -1FSsh'
|
||||
alias lart='ls -1Fcart'
|
||||
alias lrt='ls -1Fcrt'
|
||||
alias lsr='ls -lARFh' #Recursive list of files and directories
|
||||
alias lsn='ls -1' #A column contains name of files and directories
|
||||
|
||||
alias zshrc='${=EDITOR} ${ZDOTDIR:-$HOME}/.zshrc' # Quick access to the .zshrc file
|
||||
|
||||
|
|
|
|||
|
|
@ -10,22 +10,26 @@ To use it add `composer` to the plugins array in your zshrc file.
|
|||
plugins=(... composer)
|
||||
```
|
||||
|
||||
Original author: Daniel Gomes <me@danielcsgomes.com>
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
| ------ | ------------------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| `c` | `composer` | Starts composer |
|
||||
| `csu` | `composer self-update` | Updates composer to the latest version |
|
||||
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
|
||||
| `cr` | `composer require` | Adds new packages to `composer.json` |
|
||||
| `crm` | `composer remove` | Removes packages from `composer.json` |
|
||||
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
|
||||
| `ccp` | `composer create-project` | Create new project from an existing package |
|
||||
| `cdu` | `composer dump-autoload` | Updates the autoloader |
|
||||
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
|
||||
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
|
||||
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
|
||||
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
|
||||
| `cget` | `curl -s https://getcomposer.org/installer` | Installs composer in the current directory |
|
||||
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
|
||||
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
|
||||
| Alias | Command | Description |
|
||||
| ------ | ---------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| `c` | `composer` | Starts composer |
|
||||
| `ccp` | `composer create-project` | Create new project from an existing package |
|
||||
| `cdo` | `composer dump-autoload -o` | Converts PSR-0/4 autoloading to classmap for a faster autoloader (good for production) |
|
||||
| `cdu` | `composer dump-autoload` | Updates the autoloader |
|
||||
| `cget` | `curl -s <installer> \| php` | Installs composer in the current directory |
|
||||
| `cgr` | `composer global require` | Allows require command to run on COMPOSER_HOME directory |
|
||||
| `cgrm` | `composer global remove` | Allows remove command to run on COMPOSER_HOME directory |
|
||||
| `cgu` | `composer global update` | Allows update command to run on COMPOSER_HOME directory |
|
||||
| `ci` | `composer install` | Resolves and installs dependencies from `composer.json` |
|
||||
| `co` | `composer outdated` | Shows a list of installed packages with available updates |
|
||||
| `cod` | `composer outdated --direct` | Shows a list of installed packages with available updates which are direct dependencies |
|
||||
| `cr` | `composer require` | Adds new packages to `composer.json` |
|
||||
| `crm` | `composer remove` | Removes packages from `composer.json` |
|
||||
| `cs` | `composer show` | Lists available packages, with optional filtering |
|
||||
| `csu` | `composer self-update` | Updates composer to the latest version |
|
||||
| `cu` | `composer update` | Updates composer dependencies and `composer.lock` file |
|
||||
| `cuh` | `composer update -d <config-home>` | Updates globally installed packages |
|
||||
|
|
|
|||
|
|
@ -1,70 +1,76 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
# FILE: composer.plugin.zsh
|
||||
# DESCRIPTION: oh-my-zsh composer plugin file.
|
||||
# AUTHOR: Daniel Gomes (me@danielcsgomes.com)
|
||||
# VERSION: 1.0.0
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# Composer basic command completion
|
||||
_composer_get_command_list () {
|
||||
$_comp_command1 --no-ansi 2>/dev/null | sed "1,/Available commands/d" | awk '/^[ \t]*[a-z]+/ { print $1 }'
|
||||
}
|
||||
|
||||
_composer_get_required_list () {
|
||||
$_comp_command1 show -s --no-ansi 2>/dev/null | sed '1,/requires/d' | awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }'
|
||||
}
|
||||
|
||||
_composer () {
|
||||
## Basic Composer command completion
|
||||
# Since Zsh 5.7, an improved composer command completion is provided
|
||||
if ! is-at-least 5.7; then
|
||||
_composer () {
|
||||
local curcontext="$curcontext" state line
|
||||
typeset -A opt_args
|
||||
_arguments \
|
||||
'*:: :->subcmds'
|
||||
_arguments '*:: :->subcmds'
|
||||
|
||||
if (( CURRENT == 1 )) || ( ((CURRENT == 2)) && [ "$words[1]" = "global" ] ) ; then
|
||||
compadd $(_composer_get_command_list)
|
||||
if (( CURRENT == 1 )) || ( (( CURRENT == 2 )) && [[ "$words[1]" = "global" ]] ); then
|
||||
# Command list
|
||||
local -a subcmds
|
||||
subcmds=("${(@f)"$($_comp_command1 --no-ansi 2>/dev/null | awk '
|
||||
/Available commands/{ r=1 }
|
||||
r == 1 && /^[ \t]*[a-z]+/{
|
||||
gsub(/^[ \t]+/, "")
|
||||
gsub(/ +/, ":")
|
||||
print $0
|
||||
}
|
||||
')"}")
|
||||
_describe -t commands 'composer command' subcmds
|
||||
else
|
||||
compadd $(_composer_get_required_list)
|
||||
# Required list
|
||||
compadd $($_comp_command1 show -s --no-ansi 2>/dev/null \
|
||||
| sed '1,/requires/d' \
|
||||
| awk 'NF > 0 && !/^requires \(dev\)/{ print $1 }')
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
compdef _composer composer
|
||||
compdef _composer composer.phar
|
||||
compdef _composer composer
|
||||
compdef _composer composer.phar
|
||||
fi
|
||||
|
||||
# Aliases
|
||||
|
||||
## Aliases
|
||||
alias c='composer'
|
||||
alias csu='composer self-update'
|
||||
alias cu='composer update'
|
||||
alias cr='composer require'
|
||||
alias crm='composer remove'
|
||||
alias ci='composer install'
|
||||
alias ccp='composer create-project'
|
||||
alias cdu='composer dump-autoload'
|
||||
alias cdo='composer dump-autoload -o'
|
||||
alias cgu='composer global update'
|
||||
alias cdu='composer dump-autoload'
|
||||
alias cget='curl -s https://getcomposer.org/installer | php'
|
||||
alias cgr='composer global require'
|
||||
alias cgrm='composer global remove'
|
||||
alias cgu='composer global update'
|
||||
alias ci='composer install'
|
||||
alias co='composer outdated'
|
||||
alias cod='composer outdated --direct'
|
||||
alias cr='composer require'
|
||||
alias crm='composer remove'
|
||||
alias cs='composer show'
|
||||
alias csu='composer self-update'
|
||||
alias cu='composer update'
|
||||
alias cuh='composer update --working-dir=$(composer config -g home)'
|
||||
|
||||
# install composer in the current directory
|
||||
alias cget='curl -s https://getcomposer.org/installer | php'
|
||||
|
||||
# Add Composer's global binaries to PATH, using Composer if available.
|
||||
if (( $+commands[composer] )); then
|
||||
autoload -Uz _store_cache _retrieve_cache _cache_invalid
|
||||
## If Composer not found, try to add known directories to $PATH
|
||||
if (( ! $+commands[composer] )); then
|
||||
[[ -d "$HOME/.composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.composer/vendor/bin"
|
||||
[[ -d "$HOME/.config/composer/vendor/bin" ]] && export PATH="$PATH:$HOME/.config/composer/vendor/bin"
|
||||
|
||||
_retrieve_cache composer
|
||||
|
||||
if [[ -z $__composer_bin_dir ]]; then
|
||||
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
|
||||
_store_cache composer __composer_bin_dir
|
||||
fi
|
||||
|
||||
# Add Composer's global binaries to PATH
|
||||
export PATH="$PATH:$__composer_bin_dir"
|
||||
|
||||
unset __composer_bin_dir
|
||||
else
|
||||
[ -d $HOME/.composer/vendor/bin ] && export PATH=$PATH:$HOME/.composer/vendor/bin
|
||||
[ -d $HOME/.config/composer/vendor/bin ] && export PATH=$PATH:$HOME/.config/composer/vendor/bin
|
||||
# If still not found, don't do the rest of the script
|
||||
(( $+commands[composer] )) || return 0
|
||||
fi
|
||||
|
||||
|
||||
## Add Composer's global binaries to PATH
|
||||
autoload -Uz _store_cache _retrieve_cache _cache_invalid
|
||||
_retrieve_cache composer
|
||||
|
||||
if [[ -z $__composer_bin_dir ]]; then
|
||||
__composer_bin_dir=$(composer global config bin-dir --absolute 2>/dev/null)
|
||||
_store_cache composer __composer_bin_dir
|
||||
fi
|
||||
|
||||
# Add Composer's global binaries to PATH
|
||||
export PATH="$PATH:$__composer_bin_dir"
|
||||
|
||||
unset __composer_bin_dir
|
||||
|
|
|
|||
|
|
@ -11,4 +11,6 @@ copybuffer () {
|
|||
|
||||
zle -N copybuffer
|
||||
|
||||
bindkey "^O" copybuffer
|
||||
bindkey -M emacs "^O" copybuffer
|
||||
bindkey -M viins "^O" copybuffer
|
||||
bindkey -M vicmd "^O" copybuffer
|
||||
|
|
|
|||
|
|
@ -1,10 +1,3 @@
|
|||
# copydir plugin
|
||||
|
||||
Copies the path of your current folder to the system clipboard.
|
||||
|
||||
To use, add `copydir` to your plugins array:
|
||||
```
|
||||
plugins=(... copydir)
|
||||
```
|
||||
|
||||
Then use the command `copydir` to copy the $PWD.
|
||||
This plugin is deprecated. Use the [`copypath` plugin](https://github.com/ohmyzsh/ohmyzsh/tree/master/plugins/copypath) instead.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
# Copies the pathname of the current directory to the system or X Windows clipboard
|
||||
echo ${(%):-'%F{yellow}The `%Bcopydir%b` plugin is deprecated. Use the `%Bcopypath%b` plugin instead.%f'}
|
||||
source "$ZSH/plugins/copypath/copypath.plugin.zsh"
|
||||
|
||||
# TODO: 2022-02-22: Remove deprecated copydir function.
|
||||
function copydir {
|
||||
emulate -L zsh
|
||||
print -n $PWD | clipcopy
|
||||
copypath
|
||||
}
|
||||
|
|
|
|||
15
plugins/copypath/README.md
Normal file
15
plugins/copypath/README.md
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# copypath plugin
|
||||
|
||||
Copies the path of given directory or file to the system clipboard.
|
||||
|
||||
To use it, add `copypath` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... copypath)
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
- `copypath`: copies the absolute path of the current directory.
|
||||
|
||||
- `copypath <file_or_directory>`: copies the absolute path of the given file.
|
||||
15
plugins/copypath/copypath.plugin.zsh
Normal file
15
plugins/copypath/copypath.plugin.zsh
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# Copies the path of given directory or file to the system or X Windows clipboard.
|
||||
# Copy current directory if no parameter.
|
||||
function copypath {
|
||||
# If no argument passed, use current directory
|
||||
local file="${1:-.}"
|
||||
|
||||
# If argument is not an absolute path, prepend $PWD
|
||||
[[ $file = /* ]] || file="$PWD/$file"
|
||||
|
||||
# Copy the absolute path without resolving symlinks
|
||||
# If clipcopy fails, exit the function with an error
|
||||
print -n "${file:a}" | clipcopy || return 1
|
||||
|
||||
echo ${(%):-"%B${file:a}%b copied to clipboard."}
|
||||
}
|
||||
|
|
@ -25,7 +25,7 @@ The enabled options for rsync are:
|
|||
|
||||
* `-hhh`: outputs numbers in human-readable format, in units of 1024 (K, M, G, T).
|
||||
|
||||
* `--backup-dir=/tmp/rsync`: move backup copies to "/tmp/rsync".
|
||||
* `--backup-dir="/tmp/rsync-$USERNAME"`: move backup copies to "/tmp/rsync-$USERNAME".
|
||||
|
||||
* `-e /dev/null`: only work on local files (disable remote shells).
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
cpv() {
|
||||
rsync -pogbr -hhh --backup-dir=/tmp/rsync -e /dev/null --progress "$@"
|
||||
rsync -pogbr -hhh --backup-dir="/tmp/rsync-${USERNAME}" -e /dev/null --progress "$@"
|
||||
}
|
||||
compdef _files cpv
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ arguments=(
|
|||
'--reinstall[Reinstall the distribution even if you already have the latest version installed]'
|
||||
'--interactive[Turn on interactive configure]'
|
||||
|
||||
'--scandeps[Scan the depencencies of given modules and output the tree in a text format]'
|
||||
'--scandeps[Scan the dependencies of given modules and output the tree in a text format]'
|
||||
'--format[Specify what format to display the scanned dependency tree]:scandeps format:(tree json yaml dists)'
|
||||
|
||||
'--save-dists[Specify the optional directory path to copy downloaded tarballs]'
|
||||
|
|
|
|||
|
|
@ -1,80 +1,84 @@
|
|||
# Usage: dash [keyword:]query
|
||||
dash() { open dash://"$*" }
|
||||
dash() { open -a Dash.app dash://"$*" }
|
||||
compdef _dash dash
|
||||
|
||||
_dash() {
|
||||
# No sense doing this for anything except the 2nd position and if we haven't
|
||||
# specified which docset to query against
|
||||
if [[ $CURRENT -eq 2 && ! "$words[2]" =~ ":" ]]; then
|
||||
local -a _all_docsets
|
||||
_all_docsets=()
|
||||
# Use defaults to get the array of docsets from preferences
|
||||
# Have to smash it into one big line so that each docset is an element of
|
||||
# our DOCSETS array
|
||||
DOCSETS=("${(@f)$(defaults read com.kapeli.dashdoc docsets | tr -d '\n' | grep -oE '\{.*?\}')}")
|
||||
if [[ $CURRENT -ne 2 || "$words[2]" =~ ":" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# remove all newlines since defaults prints so pretty like
|
||||
# Now get each docset and output each on their own line
|
||||
for doc in "$DOCSETS[@]"; do
|
||||
# Only output docsets that are actually enabled
|
||||
if [[ "`echo $doc | grep -Eo \"isEnabled = .*?;\" | sed 's/[^01]//g'`" == "0" ]]; then
|
||||
local -aU docsets
|
||||
docsets=()
|
||||
|
||||
# Use defaults to get the array of docsets from preferences
|
||||
# Have to smash it into one big line so that each docset is an element of our docsets array
|
||||
# Only output docsets that are actually enabled
|
||||
local -a enabled_docsets
|
||||
enabled_docsets=("${(@f)$(defaults read com.kapeli.dashdoc docsets \
|
||||
| tr -d '\n' | grep -oE '\{.*?\}' | grep -E 'isEnabled = 1;')}")
|
||||
|
||||
local docset name keyword
|
||||
# Now get each docset and output each on their own line
|
||||
for docset in "$enabled_docsets[@]"; do
|
||||
keyword=''
|
||||
# Order of preference as explained to me by @kapeli via email
|
||||
for locator in keyword suggestedKeyword platform; do
|
||||
# Echo the docset, try to find the appropriate keyword
|
||||
# Strip doublequotes and colon from any keyword so that everything has the
|
||||
# same format when output (we'll add the colon in the completion)
|
||||
if [[ "$docset" =~ "$locator = ([^;]*);" ]]; then
|
||||
keyword="${match[1]//[\":]}"
|
||||
fi
|
||||
|
||||
if [[ -z "$keyword" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
keyword=''
|
||||
|
||||
# Order of preference as explained to me by @kapeli via email
|
||||
KEYWORD_LOCATORS=(keyword suggestedKeyword platform)
|
||||
for locator in "$KEYWORD_LOCATORS[@]"; do
|
||||
# Echo the docset, try to find the appropriate keyword
|
||||
# Strip doublequotes and colon from any keyword so that everything has the
|
||||
# same format when output (we'll add the colon in the completion)
|
||||
keyword=`echo $doc | grep -Eo "$locator = .*?;" | sed -e "s/$locator = \(.*\);/\1/" -e "s/[\":]//g"`
|
||||
if [[ ! -z "$keyword" ]]; then
|
||||
# if we fall back to platform, we should do some checking per @kapeli
|
||||
if [[ "$locator" == "platform" ]]; then
|
||||
# Since these are the only special cases right now, let's not do the
|
||||
# expensive processing unless we have to
|
||||
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
|
||||
docsetName=`echo $doc | grep -Eo "docsetName = .*?;" | sed -e "s/docsetName = \(.*\);/\1/" -e "s/[\":]//g"`
|
||||
case "$keyword" in
|
||||
python)
|
||||
case "$docsetName" in
|
||||
"Python 2") keyword="python2" ;;
|
||||
"Python 3") keyword="python3" ;;
|
||||
esac ;;
|
||||
java)
|
||||
case "$docsetName" in
|
||||
"Java SE7") keyword="java7" ;;
|
||||
"Java SE6") keyword="java6" ;;
|
||||
"Java SE8") keyword="java8" ;;
|
||||
esac ;;
|
||||
qt)
|
||||
case "$docsetName" in
|
||||
"Qt 5") keyword="qt5" ;;
|
||||
"Qt 4"|Qt) keyword="qt4" ;;
|
||||
esac ;;
|
||||
cocos2d)
|
||||
case "$docsetName" in
|
||||
Cocos3D) keyword="cocos3d" ;;
|
||||
esac ;;
|
||||
esac
|
||||
fi
|
||||
# if we fall back to platform, we should do some checking per @kapeli
|
||||
if [[ "$locator" == "platform" ]]; then
|
||||
# Since these are the only special cases right now, let's not do the
|
||||
# expensive processing unless we have to
|
||||
if [[ "$keyword" = (python|java|qt|cocos2d) ]]; then
|
||||
if [[ "$docset" =~ "docsetName = ([^;]*);" ]]; then
|
||||
name="${match[1]//[\":]}"
|
||||
case "$keyword" in
|
||||
python)
|
||||
case "$name" in
|
||||
"Python 2") keyword="python2" ;;
|
||||
"Python 3") keyword="python3" ;;
|
||||
esac ;;
|
||||
java)
|
||||
case "$name" in
|
||||
"Java SE7") keyword="java7" ;;
|
||||
"Java SE6") keyword="java6" ;;
|
||||
"Java SE8") keyword="java8" ;;
|
||||
esac ;;
|
||||
qt)
|
||||
case "$name" in
|
||||
"Qt 5") keyword="qt5" ;;
|
||||
"Qt 4"|Qt) keyword="qt4" ;;
|
||||
esac ;;
|
||||
cocos2d)
|
||||
case "$name" in
|
||||
Cocos3D) keyword="cocos3d" ;;
|
||||
esac ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Bail once we have a match
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# If we have a keyword, add it to the list!
|
||||
if [[ ! -z "$keyword" ]]; then
|
||||
_all_docsets+=($keyword)
|
||||
fi
|
||||
|
||||
# Bail once we have a match
|
||||
break
|
||||
done
|
||||
|
||||
# special thanks to [arx] on #zsh for getting me sorted on this piece
|
||||
compadd -qS: -- "$_all_docsets[@]"
|
||||
return
|
||||
fi
|
||||
# If we have a keyword, add it to the list!
|
||||
if [[ -n "$keyword" ]]; then
|
||||
docsets+=($keyword)
|
||||
fi
|
||||
done
|
||||
|
||||
# special thanks to [arx] on #zsh for getting me sorted on this piece
|
||||
compadd -qS: -- "$docsets[@]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,20 +33,22 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
|
|||
| Alias | Command | Description |
|
||||
| -------- | -------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------- |
|
||||
| `aac` | `sudo $apt_pref autoclean` | Clears out the local repository of retrieved package files |
|
||||
| `aar` | `sudo $apt_pref autoremove` | Removes packages installed automatically that are no longer needed |
|
||||
| `abd` | `sudo $apt_pref build-dep` | Installs all dependencies for building packages |
|
||||
| `ac` | `sudo $apt_pref clean` | Clears out the local repository of retrieved package files except lock files |
|
||||
| `ad` | `sudo $apt_pref update` | Updates the package lists for upgrades for packages |
|
||||
| `adg` | `sudo $apt_pref update && sudo $apt_pref $apt_upgr` | Update and upgrade packages |
|
||||
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
|
||||
| `adu` | `sudo $apt_pref update && sudo $apt_pref dist-upgrade` | Smart upgrade that handles dependencies |
|
||||
| `afu` | `sudo apt-file update` | Update the files in packages |
|
||||
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
|
||||
| `ai` | `sudo $apt_pref install` | Command-line tool to install package |
|
||||
| `ail` | `sed -e 's/ */ /g' -e 's/ *//' \| cut -s -d ' ' -f 1 \| xargs sudo $apt_pref install` | Install all packages given on the command line while using only the first word of each line |
|
||||
| `alu` | `sudo apt update && apt list -u && sudo apt upgrade` | Update, list and upgrade packages |
|
||||
| `ap` | `sudo $apt_pref purge` | Removes packages along with configuration files |
|
||||
| `ar` | `sudo $apt_pref remove` | Removes packages, keeps the configuration files |
|
||||
| `ads` | `sudo apt-get dselect-upgrade` | Installs packages from list and removes all not in the list |
|
||||
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
|
||||
| `au` | `sudo $apt_pref $apt_upgr` | Install package upgrades |
|
||||
| `di` | `sudo dpkg -i` | Install all .deb files in the current directory |
|
||||
| `dia` | `sudo dpkg -i ./*.deb` | Install all .deb files in the current directory |
|
||||
| `kclean` | `sudo aptitude remove -P ?and(~i~nlinux-(ima\|hea) ?not(~n$(uname -r)))` | Remove ALL kernel images and headers EXCEPT the one in use |
|
||||
|
||||
## Aliases - Commands using `su`
|
||||
|
|
@ -54,6 +56,7 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
|
|||
| Alias | Command |
|
||||
| ----- | --------------------------------------------------------- |
|
||||
| `aac` | `su -ls "$apt_pref autoclean" root` |
|
||||
| `aar` | `su -ls "$apt_pref autoremove" root` |
|
||||
| `ac` | `su -ls "$apt_pref clean" root` |
|
||||
| `ad` | `su -lc "$apt_pref update" root` |
|
||||
| `adg` | `su -lc "$apt_pref update && aptitude $apt_upgr" root` |
|
||||
|
|
@ -75,8 +78,8 @@ Set `$apt_pref` and `$apt_upgr` to whatever command you want (before sourcing Oh
|
|||
| ------------------- | --------------------------------------------------------------- |
|
||||
| `apt-copy` | Create a simple script that can be used to 'duplicate' a system |
|
||||
| `apt-history` | Displays apt history for a command |
|
||||
| `kerndeb` | Builds kernel packages |
|
||||
| `apt-list-packages` | List packages by size |
|
||||
| `kerndeb` | Builds kernel packages |
|
||||
|
||||
## Authors
|
||||
|
||||
|
|
|
|||
|
|
@ -52,13 +52,18 @@ if [[ $use_sudo -eq 1 ]]; then
|
|||
alias ai="sudo $apt_pref install"
|
||||
# Install all packages given on the command line while using only the first word of each line:
|
||||
# acs ... | ail
|
||||
|
||||
alias ail="sed -e 's/ */ /g' -e 's/ *//' | cut -s -d ' ' -f 1 | xargs sudo $apt_pref install"
|
||||
alias ap="sudo $apt_pref purge"
|
||||
alias ar="sudo $apt_pref remove"
|
||||
alias aar="sudo $apt_pref autoremove"
|
||||
|
||||
# apt-get only
|
||||
alias ads="sudo apt-get dselect-upgrade"
|
||||
|
||||
# apt only
|
||||
alias alu="sudo apt update && apt list -u && sudo apt upgrade"
|
||||
|
||||
# Install all .deb files in the current directory.
|
||||
# Warning: you will need to put the glob in single quotes if you use:
|
||||
# glob_subst
|
||||
|
|
@ -98,7 +103,11 @@ else
|
|||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
|
||||
function aar() {
|
||||
cmd="su -lc '$apt_pref -P autoremove $@' root"
|
||||
print "$cmd"
|
||||
eval "$cmd"
|
||||
}
|
||||
# Install all .deb files in the current directory
|
||||
# Assumes glob_subst is off
|
||||
alias dia='su -lc "dpkg -i ./*.deb" root'
|
||||
|
|
@ -139,6 +148,7 @@ apt_pref_compdef ai "install"
|
|||
apt_pref_compdef ail "install"
|
||||
apt_pref_compdef ap "purge"
|
||||
apt_pref_compdef ar "remove"
|
||||
apt_pref_compdef aar "autoremove"
|
||||
apt_pref_compdef ads "dselect-upgrade"
|
||||
|
||||
# Misc. #####################################################################
|
||||
|
|
|
|||
1
plugins/deno/.gitignore
vendored
1
plugins/deno/.gitignore
vendored
|
|
@ -1 +0,0 @@
|
|||
_deno
|
||||
|
|
@ -13,6 +13,7 @@ This plugin sets up completion and aliases for [Deno](https://deno.land).
|
|||
| dh | deno help |
|
||||
| dli | deno lint |
|
||||
| drn | deno run |
|
||||
| drA | deno run -A |
|
||||
| drw | deno run --watch |
|
||||
| dts | deno test |
|
||||
| dup | deno upgrade |
|
||||
|
|
|
|||
|
|
@ -6,18 +6,22 @@ alias dfmt='deno fmt'
|
|||
alias dh='deno help'
|
||||
alias dli='deno lint'
|
||||
alias drn='deno run'
|
||||
alias drA='deno run -A'
|
||||
alias drw='deno run --watch'
|
||||
alias dts='deno test'
|
||||
alias dup='deno upgrade'
|
||||
|
||||
# COMPLETION FUNCTION
|
||||
if (( $+commands[deno] )); then
|
||||
if [[ ! -f $ZSH_CACHE_DIR/deno_version ]] \
|
||||
|| [[ "$(deno --version)" != "$(< "$ZSH_CACHE_DIR/deno_version")" ]] \
|
||||
|| [[ ! -f $ZSH/plugins/deno/_deno ]]; then
|
||||
deno completions zsh > $ZSH/plugins/deno/_deno
|
||||
deno --version > $ZSH_CACHE_DIR/deno_version
|
||||
fi
|
||||
if (( ! $+commands[deno] )); then
|
||||
return
|
||||
fi
|
||||
|
||||
# If the completion file doesn't exist yet, we need to autoload it and
|
||||
# bind it to `deno`. Otherwise, compinit will have already done that.
|
||||
if [[ ! -f "$ZSH_CACHE_DIR/completions/_deno" ]]; then
|
||||
typeset -g -A _comps
|
||||
autoload -Uz _deno
|
||||
_comps[deno]=_deno
|
||||
fi
|
||||
|
||||
deno completions zsh >| "$ZSH_CACHE_DIR/completions/_deno" &|
|
||||
|
|
|
|||
|
|
@ -12,16 +12,27 @@ plugins=(... dirhistory)
|
|||
|
||||
| Shortcut | Description |
|
||||
|-----------------------------------|-----------------------------------------------------------|
|
||||
| <kbd>alt</kbd> + <kbd>left</kbd> | Go to previous directory |
|
||||
| <kbd>alt</kbd> + <kbd>right</kbd> | Undo <kbd>alt</kbd> + <kbd>left</kbd> |
|
||||
| <kbd>alt</kbd> + <kbd>up</kbd> | Move into the parent directory |
|
||||
| <kbd>alt</kbd> + <kbd>down</kbd> | Move into the first child directory by alphabetical order |
|
||||
| <kbd>Alt</kbd> + <kbd>Left</kbd> | Go to previous directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Right</kbd> | Go to next directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Up</kbd> | Move into the parent directory |
|
||||
| <kbd>Alt</kbd> + <kbd>Down</kbd> | Move into the first child directory by alphabetical order |
|
||||
|
||||
**For macOS: use the Option key (<kbd>⌥</kbd>) instead of <kbd>Alt</kbd>**.
|
||||
|
||||
> NOTE: some terminals might override the <kbd>Alt</kbd> + Arrows key bindings (e.g. Windows Terminal).
|
||||
> If these don't work check your terminal settings and change them to a different keyboard shortcut.
|
||||
|
||||
## Usage
|
||||
|
||||
This plugin allows you to navigate the history of previous current-working-directories using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT. MAC users may alternately use OPT-LEFT and OPT-RIGHT.
|
||||
This plugin allows you to navigate the history of previous working directories using <kbd>Alt</kbd> + <kbd>Left</kbd>
|
||||
and <kbd>Alt</kbd> + <kbd>Right</kbd>. <kbd>Alt</kbd> + <kbd>Left</kbd> moves to past directories, and
|
||||
<kbd>Alt</kbd> + <kbd>Right</kbd> goes back to recent directories.
|
||||
|
||||
Also, navigate directory **hierarchy** using ALT-UP and ALT-DOWN. (mac keybindings not yet implemented). ALT-UP moves to higher hierarchy (shortcut for 'cd ..'). ALT-DOWN moves into the first directory found in alphabetical order (useful to navigate long empty directories e.g. java packages)
|
||||
**NOTE: the maximum directory history size is 30.**
|
||||
|
||||
You can also navigate **directory hierarchies** using <kbd>Alt</kbd> + <kbd>Up</kbd> and <kbd>Alt</kbd> + <kbd>Down</kbd>.
|
||||
<kbd>Alt</kbd> + <kbd>Up</kbd> moves to the parent directory, while <kbd>Alt</kbd> + <kbd>Down</kbd> moves into the first
|
||||
child directory found in alphabetical order (useful to navigate long empty directories, e.g. Java packages).
|
||||
|
||||
For example, if the shell was started, and the following commands were entered:
|
||||
|
||||
|
|
@ -32,8 +43,20 @@ cd share
|
|||
cd doc
|
||||
```
|
||||
|
||||
Then entering ALT-LEFT at the prompt would change directory from /usr/share/doc to /usr/share, then if pressed again to /usr/, then ~. If ALT-RIGHT were pressed the directory would be changed to /usr/ again.
|
||||
the directory stack (`dirs -v`) would look like this:
|
||||
|
||||
After that, ALT-DOWN will probably go to /usr/bin (depends on your /usr structure), ALT-UP will return to /usr, then ALT-UP will get you to /
|
||||
```console
|
||||
$ dirs -v
|
||||
0 /usr/share/doc
|
||||
1 /usr/share
|
||||
2 /usr
|
||||
3 ~
|
||||
```
|
||||
|
||||
**Currently the max history size is 30**. The navigation should work for xterm, PuTTY xterm mode, GNU screen, and on MAC with alternate keys as mentioned above.
|
||||
then entering <kbd>Alt</kbd> + <kbd>Left</kbd> at the prompt would change directory from `/usr/share/doc` to `/usr/share`,
|
||||
then if pressed again to `/usr`, then `~`. If <kbd>Alt</kbd> + <kbd>Right</kbd> were pressed the directory would be changed
|
||||
to `/usr` again.
|
||||
|
||||
After that, <kbd>Alt</kbd> + <kbd>Down</kbd> will probably go to `/usr/bin` if `bin` is the first directory in alphabetical
|
||||
order (depends on your `/usr` folder structure). <kbd>Alt</kbd> + <kbd>Up</kbd> will return to `/usr`, and once more will get
|
||||
you to the root folder (`/`).
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
##
|
||||
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
|
||||
##
|
||||
# Navigate directory history using ALT-LEFT and ALT-RIGHT. ALT-LEFT moves back to directories
|
||||
# that the user has changed to in the past, and ALT-RIGHT undoes ALT-LEFT.
|
||||
#
|
||||
#
|
||||
# Navigate directory hierarchy using ALT-UP and ALT-DOWN.
|
||||
# ALT-UP moves to higher hierarchy (cd ..)
|
||||
# ALT-DOWN moves into the first directory found in alphabetical order
|
||||
|
|
@ -14,25 +14,25 @@ export dirhistory_future
|
|||
|
||||
export DIRHISTORY_SIZE=30
|
||||
|
||||
# Pop the last element of dirhistory_past.
|
||||
# Pass the name of the variable to return the result in.
|
||||
# Pop the last element of dirhistory_past.
|
||||
# Pass the name of the variable to return the result in.
|
||||
# Returns the element if the array was not empty,
|
||||
# otherwise returns empty string.
|
||||
function pop_past() {
|
||||
eval "$1='$dirhistory_past[$#dirhistory_past]'"
|
||||
typeset -g $1="${dirhistory_past[$#dirhistory_past]}"
|
||||
if [[ $#dirhistory_past -gt 0 ]]; then
|
||||
dirhistory_past[$#dirhistory_past]=()
|
||||
fi
|
||||
}
|
||||
|
||||
function pop_future() {
|
||||
eval "$1='$dirhistory_future[$#dirhistory_future]'"
|
||||
typeset -g $1="${dirhistory_future[$#dirhistory_future]}"
|
||||
if [[ $#dirhistory_future -gt 0 ]]; then
|
||||
dirhistory_future[$#dirhistory_future]=()
|
||||
fi
|
||||
}
|
||||
|
||||
# Push a new element onto the end of dirhistory_past. If the size of the array
|
||||
# Push a new element onto the end of dirhistory_past. If the size of the array
|
||||
# is >= DIRHISTORY_SIZE, the array is shifted
|
||||
function push_past() {
|
||||
if [[ $#dirhistory_past -ge $DIRHISTORY_SIZE ]]; then
|
||||
|
|
@ -76,7 +76,7 @@ function dirhistory_back() {
|
|||
local d=""
|
||||
# Last element in dirhistory_past is the cwd.
|
||||
|
||||
pop_past cw
|
||||
pop_past cw
|
||||
if [[ "" == "$cw" ]]; then
|
||||
# Someone overwrote our variable. Recover it.
|
||||
dirhistory_past=($PWD)
|
||||
|
|
@ -121,40 +121,43 @@ function dirhistory_zle_dirhistory_future() {
|
|||
}
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_back
|
||||
# xterm in normal mode
|
||||
bindkey "\e[3D" dirhistory_zle_dirhistory_back
|
||||
bindkey "\e[1;3D" dirhistory_zle_dirhistory_back
|
||||
# Terminal.app
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[b" dirhistory_zle_dirhistory_back
|
||||
fi
|
||||
# iTerm2
|
||||
if [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
|
||||
bindkey "^[^[[D" dirhistory_zle_dirhistory_back
|
||||
fi
|
||||
# Putty:
|
||||
bindkey "\e\e[D" dirhistory_zle_dirhistory_back
|
||||
# GNU screen:
|
||||
bindkey "\eO3D" dirhistory_zle_dirhistory_back
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[3C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\e[1;3C" dirhistory_zle_dirhistory_future
|
||||
# Terminal.app
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" ]]; then
|
||||
bindkey "^[f" dirhistory_zle_dirhistory_future
|
||||
fi
|
||||
# iTerm2
|
||||
if [[ "$TERM_PROGRAM" == "iTerm.app" ]]; then
|
||||
bindkey "^[^[[C" dirhistory_zle_dirhistory_future
|
||||
fi
|
||||
bindkey "\e\e[C" dirhistory_zle_dirhistory_future
|
||||
bindkey "\eO3C" dirhistory_zle_dirhistory_future
|
||||
|
||||
for keymap in emacs vicmd viins; do
|
||||
# dirhistory_back
|
||||
bindkey -M $keymap "\e[3D" dirhistory_zle_dirhistory_back # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3D" dirhistory_zle_dirhistory_back # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[D" dirhistory_zle_dirhistory_back # Putty
|
||||
bindkey -M $keymap "\eO3D" dirhistory_zle_dirhistory_back # GNU screen
|
||||
|
||||
#
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[b" dirhistory_zle_dirhistory_back ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[D" dirhistory_zle_dirhistory_back ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcub1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcub1]}" dirhistory_zle_dirhistory_back # urxvt
|
||||
fi
|
||||
|
||||
# dirhistory_future
|
||||
bindkey -M $keymap "\e[3C" dirhistory_zle_dirhistory_future # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3C" dirhistory_zle_dirhistory_future # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[C" dirhistory_zle_dirhistory_future # Putty
|
||||
bindkey -M $keymap "\eO3C" dirhistory_zle_dirhistory_future # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[f" dirhistory_zle_dirhistory_future ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[C" dirhistory_zle_dirhistory_future ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcuf1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcuf1]}" dirhistory_zle_dirhistory_future # urxvt
|
||||
fi
|
||||
done
|
||||
|
||||
#
|
||||
# HIERARCHY Implemented in this section, in case someone wants to split it to another plugin if it clashes bindings
|
||||
#
|
||||
#
|
||||
|
||||
# Move up in hierarchy
|
||||
function dirhistory_up() {
|
||||
|
|
@ -181,22 +184,38 @@ function dirhistory_zle_dirhistory_down() {
|
|||
}
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_up
|
||||
# xterm in normal mode
|
||||
bindkey "\e[3A" dirhistory_zle_dirhistory_up
|
||||
bindkey "\e[1;3A" dirhistory_zle_dirhistory_up
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" || "$TERM_PROGRAM" == "iTerm.app" ]]; then
|
||||
bindkey "^[[A" dirhistory_zle_dirhistory_up
|
||||
fi
|
||||
# Putty:
|
||||
bindkey "\e\e[A" dirhistory_zle_dirhistory_up
|
||||
# GNU screen:
|
||||
bindkey "\eO3A" dirhistory_zle_dirhistory_up
|
||||
|
||||
zle -N dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[3B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\e[1;3B" dirhistory_zle_dirhistory_down
|
||||
if [[ "$TERM_PROGRAM" == "Apple_Terminal" || "$TERM_PROGRAM" == "iTerm.app" ]]; then
|
||||
bindkey "^[[B" dirhistory_zle_dirhistory_down
|
||||
fi
|
||||
bindkey "\e\e[B" dirhistory_zle_dirhistory_down
|
||||
bindkey "\eO3B" dirhistory_zle_dirhistory_down
|
||||
|
||||
for keymap in emacs vicmd viins; do
|
||||
# dirhistory_up
|
||||
bindkey -M $keymap "\e[3A" dirhistory_zle_dirhistory_up # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3A" dirhistory_zle_dirhistory_up # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[A" dirhistory_zle_dirhistory_up # Putty
|
||||
bindkey -M $keymap "\eO3A" dirhistory_zle_dirhistory_up # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[[A" dirhistory_zle_dirhistory_up ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[A" dirhistory_zle_dirhistory_up ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcuu1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcuu1]}" dirhistory_zle_dirhistory_up # urxvt
|
||||
fi
|
||||
|
||||
# dirhistory_down
|
||||
bindkey -M $keymap "\e[3B" dirhistory_zle_dirhistory_down # xterm in normal mode
|
||||
bindkey -M $keymap "\e[1;3B" dirhistory_zle_dirhistory_down # xterm in normal mode
|
||||
bindkey -M $keymap "\e\e[B" dirhistory_zle_dirhistory_down # Putty
|
||||
bindkey -M $keymap "\eO3B" dirhistory_zle_dirhistory_down # GNU screen
|
||||
|
||||
case "$TERM_PROGRAM" in
|
||||
Apple_Terminal) bindkey -M $keymap "^[[B" dirhistory_zle_dirhistory_down ;; # Terminal.app
|
||||
iTerm.app) bindkey -M $keymap "^[^[[B" dirhistory_zle_dirhistory_down ;; # iTerm2
|
||||
esac
|
||||
|
||||
if (( ${+terminfo[kcud1]} )); then
|
||||
bindkey -M $keymap "^[${terminfo[kcud1]}" dirhistory_zle_dirhistory_down # urxvt
|
||||
fi
|
||||
done
|
||||
|
||||
unset keymap
|
||||
|
|
|
|||
|
|
@ -1,36 +0,0 @@
|
|||
# Django plugin
|
||||
|
||||
This plugin adds completion and hints for the [Django Project](https://www.djangoproject.com/) `manage.py` commands
|
||||
and options.
|
||||
|
||||
To use it, add `django` to the plugins array in your zshrc file:
|
||||
|
||||
```zsh
|
||||
plugins=(... django)
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```zsh
|
||||
$> python manage.py (press <TAB> here)
|
||||
```
|
||||
|
||||
Would result in:
|
||||
|
||||
```zsh
|
||||
cleanup -- remove old data from the database
|
||||
compilemessages -- compile .po files to .mo for use with gettext
|
||||
createcachetable -- creates table for SQL cache backend
|
||||
createsuperuser -- create a superuser
|
||||
dbshell -- run command-line client for the current database
|
||||
diffsettings -- display differences between the current settings and Django defaults
|
||||
dumpdata -- output contents of database as a fixture
|
||||
flush -- execute 'sqlflush' on the current database
|
||||
inspectdb -- output Django model module for tables in database
|
||||
loaddata -- install the named fixture(s) in the database
|
||||
makemessages -- pull out all strings marked for translation
|
||||
reset -- executes 'sqlreset' for the given app(s)
|
||||
runfcgi -- run this project as a fastcgi
|
||||
runserver -- start a lightweight web server for development
|
||||
...
|
||||
```
|
||||
|
|
@ -1,404 +0,0 @@
|
|||
#compdef manage.py
|
||||
|
||||
typeset -ga nul_args
|
||||
nul_args=(
|
||||
'--verbosity=-[verbosity level; 0=minimal output, 1=normal output, 2=all output.]:Verbosity:((0\:minimal 1\:normal 2\:all))'
|
||||
'--settings=-[the Python path to a settings module.]:file:_files'
|
||||
'--pythonpath=-[a directory to add to the Python path.]:directory:_directories'
|
||||
'--traceback[print traceback on exception.]'
|
||||
"--no-color[Don't colorize the command output.]"
|
||||
"--version[show program's version number and exit.]"
|
||||
{-h,--help}'[show this help message and exit.]'
|
||||
)
|
||||
|
||||
typeset -ga start_args
|
||||
start_args=(
|
||||
'--template=-[The path or URL to load the template from.]:directory:_directories'
|
||||
'--extension=-[The file extension(s) to render (default: "py").]'
|
||||
'--name=-[The file name(s) to render.]:file:_files'
|
||||
)
|
||||
|
||||
typeset -ga db_args
|
||||
db_args=(
|
||||
'--database=-[Nominates a database. Defaults to the "default" database.]'
|
||||
)
|
||||
|
||||
typeset -ga noinput_args
|
||||
noinput_args=(
|
||||
'--noinput[tells Django to NOT prompt the user for input of any kind.]'
|
||||
)
|
||||
|
||||
typeset -ga no_init_data_args
|
||||
no_init_data_args=(
|
||||
'--no-initial-data[Tells Django not to load any initial data after database synchronization.]'
|
||||
)
|
||||
|
||||
typeset -ga tag_args
|
||||
tag_args=(
|
||||
'--tag=-[Run only checks labeled with given tag.]'
|
||||
'--list-tags[List available tags.]'
|
||||
)
|
||||
|
||||
_managepy-check(){
|
||||
_arguments -s : \
|
||||
$tag_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-changepassword(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-createcachetable(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-createsuperuser(){
|
||||
_arguments -s : \
|
||||
'--username=-[Specifies the login for the superuser.]' \
|
||||
'--email=-[Specifies the email for the superuser.]' \
|
||||
$noinput_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-collectstatic(){
|
||||
_arguments -s : \
|
||||
'--link[Create a symbolic link to each file instead of copying.]' \
|
||||
'--no-post-process[Do NOT post process collected files.]' \
|
||||
'--ignore=-[Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.]' \
|
||||
'--dry-run[Do everything except modify the filesystem.]' \
|
||||
'--clear[Clear the existing files using the storage before trying to copy or link the original file.]' \
|
||||
'--link[Create a symbolic link to each file instead of copying.]' \
|
||||
'--no-default-ignore[Do not ignore the common private glob-style patterns "CVS", ".*" and "*~".]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dbshell(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-diffsettings(){
|
||||
_arguments -s : \
|
||||
"--all[Display all settings, regardless of their value.]"
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dumpdata(){
|
||||
_arguments -s : \
|
||||
'--format=-[Specifies the output serialization format for fixtures.]:format:(json yaml xml)' \
|
||||
'--indent=-[Specifies the indent level to use when pretty-printing output.]' \
|
||||
'--exclude=-[An app_label or app_label.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).]' \
|
||||
'--natural-foreign[Use natural foreign keys if they are available.]' \
|
||||
'--natural-primary[Use natural primary keys if they are available.]' \
|
||||
"--all[Use Django's base manager to dump all models stored in the database.]" \
|
||||
'--pks=-[Only dump objects with given primary keys.]' \
|
||||
$db_args \
|
||||
$nul_args \
|
||||
'*::appname:_applist' && ret=0
|
||||
}
|
||||
|
||||
_managepy-flush(){
|
||||
_arguments -s : \
|
||||
$no_init_data_args \
|
||||
$db_args \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-help(){
|
||||
_arguments -s : \
|
||||
'*:command:_managepy_cmds' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy_cmds(){
|
||||
local line
|
||||
local -a cmd
|
||||
_call_program help-command ./manage.py help \
|
||||
|& sed -n '/^ /s/[(), ]/ /gp' \
|
||||
| while read -A line; do cmd=($line $cmd) done
|
||||
_describe -t managepy-command 'manage.py command' cmd
|
||||
}
|
||||
|
||||
_managepy-inspectdb(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-loaddata(){
|
||||
_arguments -s : \
|
||||
'--ignorenonexistent[Ignores entries in the serialized data for fields that do not currently exist on the model.]' \
|
||||
'--app=-[Only look for fixtures in the specified app.]:appname:_applist' \
|
||||
'*::file:_files' \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-makemessages(){
|
||||
_arguments -s : \
|
||||
'--locale=-[Creates or updates the message files for the given locale(s) (e.g. pt_BR).]' \
|
||||
'--domain=-[The domain of the message files (default: "django").]' \
|
||||
'--all[Updates the message files for all existing locales.]' \
|
||||
'--extension=-[The file extension(s) to examine (default: "html,txt", or "js" if the domain is "djangojs").]' \
|
||||
'--symlinks[Follows symlinks to directories when examining source code and templates for translation strings.]' \
|
||||
'--ignore=-[Ignore files or directories matching this glob-style pattern.]' \
|
||||
"--no-default-ignore[Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.]" \
|
||||
"--no-wrap[Don't break long message lines into several lines.]" \
|
||||
"--no-location[Don't write '#: filename:line' lines.]" \
|
||||
'--no-obsolete[Remove obsolete message strings.]' \
|
||||
'--keep-pot[Keep .pot file after making messages.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-makemigrations(){
|
||||
_arguments -s : \
|
||||
'--dry-run[Just show what migrations would be made]' \
|
||||
'--merge[Enable fixing of migration conflicts.]' \
|
||||
'--empty[Create an empty migration.]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-migrate(){
|
||||
_arguments -s : \
|
||||
'--fake[Mark migrations as run without actually running them]' \
|
||||
'--list[Show a list of all known migrations and which are applied]' \
|
||||
$no_init_data_args \
|
||||
$noinput_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-runfcgi(){
|
||||
local state
|
||||
|
||||
local fcgi_opts
|
||||
fcgi_opts=(
|
||||
'protocol[fcgi, scgi, ajp, ... (default fcgi)]:protocol:(fcgi scgi ajp)'
|
||||
'host[hostname to listen on..]:'
|
||||
'port[port to listen on.]:'
|
||||
'socket[UNIX socket to listen on.]:file:_files'
|
||||
'method[prefork or threaded (default prefork)]:method:(prefork threaded)'
|
||||
'maxrequests[number of requests a child handles before it is killed and a new child is forked (0 = no limit).]:'
|
||||
'maxspare[max number of spare processes / threads.]:'
|
||||
'minspare[min number of spare processes / threads.]:'
|
||||
'maxchildren[hard limit number of processes / threads.]:'
|
||||
'daemonize[whether to detach from terminal.]:boolean:(False True)'
|
||||
'pidfile[write the spawned process-id to this file.]:file:_files'
|
||||
'workdir[change to this directory when daemonizing.]:directory:_files'
|
||||
'outlog[write stdout to this file.]:file:_files'
|
||||
'errlog[write stderr to this file.]:file:_files'
|
||||
)
|
||||
|
||||
_arguments -s : \
|
||||
$nul_args \
|
||||
'*: :_values "FCGI Setting" $fcgi_opts' && ret=0
|
||||
}
|
||||
|
||||
_managepy-runserver(){
|
||||
_arguments -s : \
|
||||
'--ipv6[Tells Django to use an IPv6 address.]' \
|
||||
'--nothreading[Tells Django to NOT use threading.]' \
|
||||
'--noreload[Tells Django to NOT use the auto-reloader.]' \
|
||||
'--nostatic[Tells Django to NOT automatically serve static files at STATIC_URL.]' \
|
||||
'--insecure[Allows serving static files even if DEBUG is False.]' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-shell(){
|
||||
_arguments -s : \
|
||||
'--plain[Tells Django to use plain Python, not IPython.]' \
|
||||
'--no-startup[When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.]' \
|
||||
'--interface=-[Specify an interactive interpreter interface.]:INTERFACE:((ipython bpython))' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sql(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlall(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlclear(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlcustom(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-dropindexes(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlflush(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlindexes(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlinitialdata(){
|
||||
_arguments -s : \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-sqlsequencereset(){
|
||||
_arguments -s : \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-squashmigrations(){
|
||||
_arguments -s : \
|
||||
'--no-optimize[Do not try to optimize the squashed operations.]' \
|
||||
$noinput_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-startapp(){
|
||||
_arguments -s : \
|
||||
$start_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
_managepy-startproject(){
|
||||
_arguments -s : \
|
||||
$start_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-syncdb() {
|
||||
_arguments -s : \
|
||||
$noinput_args \
|
||||
$no_init_data_args \
|
||||
$db_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-test() {
|
||||
_arguments -s : \
|
||||
'--failfast[Tells Django to stop running the test suite after first failed test.]' \
|
||||
'--testrunner=-[Tells Django to use specified test runner class instead of the one specified by the TEST_RUNNER setting.]' \
|
||||
'--liveserver=-[Overrides the default address where the live server (used with LiveServerTestCase) is expected to run from. The default value is localhost:8081.]' \
|
||||
'--top-level-directory=-[Top level of project for unittest discovery.]' \
|
||||
'--pattern=-[The test matching pattern. Defaults to test*.py.]:' \
|
||||
$noinput_args \
|
||||
'*::appname:_applist' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-testserver() {
|
||||
_arguments -s : \
|
||||
'--addrport=-[port number or ipaddr:port to run the server on.]' \
|
||||
'--ipv6[Tells Django to use an IPv6 address.]' \
|
||||
$noinput_args \
|
||||
'*::fixture:_files' \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-validate() {
|
||||
_arguments -s : \
|
||||
$tag_args \
|
||||
$nul_args && ret=0
|
||||
}
|
||||
|
||||
_managepy-commands() {
|
||||
local -a commands
|
||||
|
||||
commands=(
|
||||
"changepassword:Change a user's password for django.contrib.auth."
|
||||
'check:Checks the entire Django project for potential problems.'
|
||||
'compilemessages:Compiles .po files to .mo files for use with builtin gettext support.'
|
||||
'createcachetable:Creates the table needed to use the SQL cache backend.'
|
||||
'createsuperuser:Used to create a superuser.'
|
||||
'collectstatic:Collect static files in a single location.'
|
||||
'dbshell:Runs the command-line client for the current DATABASE_ENGINE.'
|
||||
"diffsettings:Displays differences between the current settings.py and Django's default settings."
|
||||
'dumpdata:Output the contents of the database as a fixture of the given format.'
|
||||
'flush:Executes ``sqlflush`` on the current database.'
|
||||
'help:manage.py help.'
|
||||
'inspectdb:Introspects the database tables in the given database and outputs a Django model module.'
|
||||
'loaddata:Installs the named fixture(s) in the database.'
|
||||
'makemessages:Runs over the entire source tree of the current directory and pulls out all strings marked for translation.'
|
||||
'makemigrations:Creates new migration(s) for apps.'
|
||||
'migrate:Updates database schema. Manages both apps with migrations and those without.'
|
||||
'runfcgi:Run this project as a fastcgi (or some other protocol supported by flup) application,'
|
||||
'runserver:Starts a lightweight Web server for development.'
|
||||
'shell:Runs a Python interactive interpreter.'
|
||||
'showmigrations:Shows all available migrations for the current project.'
|
||||
'sql:Prints the CREATE TABLE SQL statements for the given app name(s).'
|
||||
'sqlall:Prints the CREATE TABLE, custom SQL and CREATE INDEX SQL statements for the given model module name(s).'
|
||||
'sqlclear:Prints the DROP TABLE SQL statements for the given app name(s).'
|
||||
'sqlcustom:Prints the custom table modifying SQL statements for the given app name(s).'
|
||||
'sqldropindexes:Prints the DROP INDEX SQL statements for the given model module name(s).'
|
||||
'sqlflush:Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed.'
|
||||
'sqlindexes:Prints the CREATE INDEX SQL statements for the given model module name(s).'
|
||||
"sqlinitialdata:RENAMED: see 'sqlcustom'"
|
||||
'sqlsequencereset:Prints the SQL statements for resetting sequences for the given app name(s).'
|
||||
'squashmigrations:Squashes an existing set of migrations (from first until specified) into a single new one.'
|
||||
"startapp:Creates a Django app directory structure for the given app name in this project's directory."
|
||||
"startproject:Creates a Django project directory structure for the given project name in this current directory."
|
||||
"syncdb:Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
|
||||
'test:Runs the test suite for the specified applications, or the entire site if no apps are specified.'
|
||||
'testserver:Runs a development server with data from the given fixture(s).'
|
||||
'validate:Validates all installed models.'
|
||||
)
|
||||
|
||||
_describe -t commands 'manage.py command' commands && ret=0
|
||||
}
|
||||
|
||||
_applist() {
|
||||
local line
|
||||
local -a apps
|
||||
_call_program help-command "python -c \"import sys; del sys.path[0];\\
|
||||
import os.path as op, re, django.conf;\\
|
||||
bn=op.basename(op.abspath(op.curdir));[sys\\
|
||||
.stdout.write(str(re.sub(r'^%s\.(.*?)$' %
|
||||
bn, r'\1', i)) + '\n') for i in django.conf.settings.\\
|
||||
INSTALLED_APPS if re.match(r'^%s' % bn, i)]\"" \
|
||||
| while read -A line; do apps=($line $apps) done
|
||||
_values 'Application' $apps && ret=0
|
||||
}
|
||||
|
||||
_managepy() {
|
||||
local curcontext=$curcontext ret=1
|
||||
|
||||
if ((CURRENT == 2)); then
|
||||
_managepy-commands
|
||||
else
|
||||
shift words
|
||||
(( CURRENT -- ))
|
||||
curcontext="${curcontext%:*:*}:managepy-$words[1]:"
|
||||
_call_function ret _managepy-$words[1]
|
||||
fi
|
||||
}
|
||||
|
||||
compdef _managepy manage.py
|
||||
compdef _managepy django
|
||||
compdef _managepy django-admin
|
||||
compdef _managepy django-admin.py
|
||||
compdef _managepy django-manage
|
||||
|
|
@ -121,12 +121,6 @@ __docker-compose_subcommand() {
|
|||
'--parallel[Build images in parallel.]' \
|
||||
'*:services:__docker-compose_services_from_build' && ret=0
|
||||
;;
|
||||
(bundle)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
'--push-images[Automatically push images for any services which have a `build` option specified.]' \
|
||||
'(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
|
||||
;;
|
||||
(config)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
|
|
@ -290,7 +284,7 @@ __docker-compose_subcommand() {
|
|||
(up)
|
||||
_arguments \
|
||||
$opts_help \
|
||||
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \
|
||||
'(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \
|
||||
$opts_no_color \
|
||||
$opts_no_deps \
|
||||
$opts_force_recreate \
|
||||
|
|
@ -298,6 +292,7 @@ __docker-compose_subcommand() {
|
|||
$opts_no_build \
|
||||
"(--no-build)--build[Build images before starting containers.]" \
|
||||
"(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
|
||||
"(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \
|
||||
'(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
|
||||
'--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
|
||||
'--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \
|
||||
|
|
@ -341,11 +336,13 @@ _docker-compose() {
|
|||
'(- :)'{-h,--help}'[Get help]' \
|
||||
'*'{-f,--file}"[${file_description}]:file:_files -g '*.yml'" \
|
||||
'(-p --project-name)'{-p,--project-name}'[Specify an alternate project name (default: directory name)]:project name:' \
|
||||
'--env-file[Specify an alternate environment file (default: .env)]:env-file:_files' \
|
||||
"--compatibility[If set, Compose will attempt to convert keys in v3 files to their non-Swarm equivalent]" \
|
||||
'(- :)'{-v,--version}'[Print version and exit]' \
|
||||
'--verbose[Show more output]' \
|
||||
'--log-level=[Set log level]:level:(DEBUG INFO WARNING ERROR CRITICAL)' \
|
||||
'--no-ansi[Do not print ANSI control characters]' \
|
||||
'--ansi=[Control when to print ANSI control characters]:when:(never always auto)' \
|
||||
'(-H --host)'{-H,--host}'[Daemon socket to connect to]:host:' \
|
||||
'--tls[Use TLS; implied by --tlsverify]' \
|
||||
'--tlscacert=[Trust certs signed only by this CA]:ca path:' \
|
||||
|
|
@ -359,6 +356,7 @@ _docker-compose() {
|
|||
local -a relevant_compose_flags relevant_compose_repeatable_flags relevant_docker_flags compose_options docker_options
|
||||
|
||||
relevant_compose_flags=(
|
||||
"--env-file"
|
||||
"--file" "-f"
|
||||
"--host" "-H"
|
||||
"--project-name" "-p"
|
||||
|
|
|
|||
|
|
@ -1,28 +1,22 @@
|
|||
# Authors:
|
||||
# https://github.com/tristola
|
||||
#
|
||||
# Docker-compose related zsh aliases
|
||||
# support Compose v2 as docker CLI plugin
|
||||
(( ${+commands[docker-compose]} )) && dccmd='docker-compose' || dccmd='docker compose'
|
||||
|
||||
# Aliases ###################################################################
|
||||
alias dco="$dccmd"
|
||||
alias dcb="$dccmd build"
|
||||
alias dce="$dccmd exec"
|
||||
alias dcps="$dccmd ps"
|
||||
alias dcrestart="$dccmd restart"
|
||||
alias dcrm="$dccmd rm"
|
||||
alias dcr="$dccmd run"
|
||||
alias dcstop="$dccmd stop"
|
||||
alias dcup="$dccmd up"
|
||||
alias dcupb="$dccmd up --build"
|
||||
alias dcupd="$dccmd up -d"
|
||||
alias dcdn="$dccmd down"
|
||||
alias dcl="$dccmd logs"
|
||||
alias dclf="$dccmd logs -f"
|
||||
alias dcpull="$dccmd pull"
|
||||
alias dcstart="$dccmd start"
|
||||
alias dck="$dccmd kill"
|
||||
|
||||
# Use dco as alias for docker-compose, since dc on *nix is 'dc - an arbitrary precision calculator'
|
||||
# https://www.gnu.org/software/bc/manual/dc-1.05/html_mono/dc.html
|
||||
|
||||
alias dco='docker-compose'
|
||||
|
||||
alias dcb='docker-compose build'
|
||||
alias dce='docker-compose exec'
|
||||
alias dcps='docker-compose ps'
|
||||
alias dcrestart='docker-compose restart'
|
||||
alias dcrm='docker-compose rm'
|
||||
alias dcr='docker-compose run'
|
||||
alias dcstop='docker-compose stop'
|
||||
alias dcup='docker-compose up'
|
||||
alias dcupb='docker-compose up --build'
|
||||
alias dcupd='docker-compose up -d'
|
||||
alias dcdn='docker-compose down'
|
||||
alias dcl='docker-compose logs'
|
||||
alias dclf='docker-compose logs -f'
|
||||
alias dcpull='docker-compose pull'
|
||||
alias dcstart='docker-compose start'
|
||||
alias dck='docker-compose kill'
|
||||
unset dccmd
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ __docker-machine_filters() {
|
|||
}
|
||||
|
||||
__get_swarm_discovery() {
|
||||
declare -a masters serivces
|
||||
declare -a masters services
|
||||
local service
|
||||
services=()
|
||||
masters=($(docker-machine ls -f {{.Swarm}} |grep '(master)' |awk '{print $1}'))
|
||||
|
|
@ -169,7 +169,7 @@ __get_create_argument() {
|
|||
__docker-machine_subcommand() {
|
||||
local -a opts_help
|
||||
opts_help=("(- :)--help[Print usage]")
|
||||
local -a opts_only_host opts_driver opts_storage_driver opts_stragery
|
||||
local -a opts_only_host opts_driver opts_storage_driver opts_state
|
||||
opts_only_host=(
|
||||
"$opts_help"
|
||||
"*:host:__docker-machine_hosts_all"
|
||||
|
|
@ -330,7 +330,7 @@ _docker-machine() {
|
|||
_arguments -C \
|
||||
"(- :)"{-h,--help}"[Show help]" \
|
||||
"(-D --debug)"{-D,--debug}"[Enable debug mode]" \
|
||||
'(-s --stroage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
|
||||
'(-s --storage-path)'{-s,--storage-path}'[Configures storage path]:file:_files' \
|
||||
'--tls-ca-cert[CA to verify remotes against]:file:_files' \
|
||||
'--tls-ca-key[Private key to generate certificates]:file:_files' \
|
||||
'--tls-client-cert[Client cert to use for TLS]:file:_files' \
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Docker plugin
|
||||
|
||||
This plugin adds auto-completion for [docker](https://www.docker.com/).
|
||||
This plugin adds auto-completion and aliases for [docker](https://www.docker.com/).
|
||||
|
||||
To use it add `docker` to the plugins array in your zshrc file.
|
||||
|
||||
|
|
@ -28,7 +28,46 @@ the lines below to your zshrc file**, but be aware of the side effects:
|
|||
>
|
||||
> Therefore, this behavior is disabled by default. To enable it:
|
||||
>
|
||||
> ```
|
||||
> ```sh
|
||||
> zstyle ':completion:*:*:docker:*' option-stacking yes
|
||||
> zstyle ':completion:*:*:docker-*:*' option-stacking yes
|
||||
> ```
|
||||
|
||||
## Aliases
|
||||
|
||||
| Alias | Command | Description |
|
||||
| :------ | :-------------------------- | :--------------------------------------------------------------------------------------- |
|
||||
| dbl | `docker build` | Build an image from a Dockerfile |
|
||||
| dcin | `docker container inspect` | Display detailed information on one or more containers |
|
||||
| dlo | `docker container logs` | Fetch the logs of a docker container |
|
||||
| dcls | `docker container ls` | List all the running docker containers |
|
||||
| dclsa | `docker container ls -a` | List all running and stopped containers |
|
||||
| dpo | `docker container port` | List port mappings or a specific mapping for the container |
|
||||
| dpu | `docker pull` | Pull an image or a repository from a registry |
|
||||
| dr | `docker container run` | Create a new container and start it using the specified command |
|
||||
| drit | `docker container run -it` | Create a new container and start it in an interactive shell |
|
||||
| drm | `docker container rm` | Remove the specified container(s) |
|
||||
| drm! | `docker container rm -f` | Force the removal of a running container (uses SIGKILL) |
|
||||
| dst | `docker container start` | Start one or more stopped containers |
|
||||
| dstp | `docker container stop` | Stop one or more running containers |
|
||||
| dtop | `docker top` | Display the running processes of a container |
|
||||
| dxc | `docker container exec` | Run a new command in a running container |
|
||||
| dxcit | `docker container exec -it` | Run a new command in a running container in an interactive shell |
|
||||
| | | **Docker Images** |
|
||||
| dib | `docker image build` | Build an image from a Dockerfile (same as docker build) |
|
||||
| dii | `docker image inspect` | Display detailed information on one or more images |
|
||||
| dils | `docker image ls` | List docker images |
|
||||
| dip | `docker image push` | Push an image or repository to a remote registry |
|
||||
| dirm | `docker image rm` | Remove one or more images |
|
||||
| dit | `docker image tag` | Add a name and tag to a particular image |
|
||||
| | | **Docker Network** |
|
||||
| dnc | `docker network create` | Create a new network |
|
||||
| dncn | `docker network connect` | Connect a container to a network |
|
||||
| dndcn | `docker network disconnect` | Disconnect a container from a network |
|
||||
| dni | `docker network inspect` | Return information about one or more networks |
|
||||
| dnls | `docker network ls` | List all networks the engine daemon knows about, including those spanning multiple hosts |
|
||||
| dnrm | `docker network rm` | Remove one or more networks |
|
||||
| | | **Docker Volume** |
|
||||
| dvi | `docker volume inspect` | Display detailed information about one or more volumes |
|
||||
| dvls | `docker volume ls` | List all the volumes known to docker |
|
||||
| dvprune | `docker volume prune` | Cleanup dangling volumes |
|
||||
|
|
|
|||
|
|
@ -1343,7 +1343,7 @@ __docker_node_complete_ls_filters() {
|
|||
;;
|
||||
esac
|
||||
else
|
||||
opts=('id' 'label' 'membership' 'name' 'role')
|
||||
opts=('id' 'label' 'membership' 'name' 'node.label' 'role')
|
||||
_describe -t filter-opts "filter options" opts -qS "=" && ret=0
|
||||
fi
|
||||
|
||||
|
|
@ -2544,6 +2544,82 @@ __docker_volume_subcommand() {
|
|||
|
||||
# EO volume
|
||||
|
||||
# BO context
|
||||
|
||||
__docker_complete_contexts() {
|
||||
[[ $PREFIX = -* ]] && return 1
|
||||
integer ret=1
|
||||
declare -a contexts
|
||||
|
||||
contexts=(${(f)${:-"$(_call_program commands docker $docker_options context ls -q)"$'\n'}})
|
||||
|
||||
_describe -t context-list "context" contexts && ret=0
|
||||
return ret
|
||||
}
|
||||
|
||||
__docker_context_commands() {
|
||||
local -a _docker_context_subcommands
|
||||
_docker_context_subcommands=(
|
||||
"create:Create new context"
|
||||
"inspect:Display detailed information on one or more contexts"
|
||||
"list:List available contexts"
|
||||
"rm:Remove one or more contexts"
|
||||
"show:Print the current context"
|
||||
"update:Update a context"
|
||||
"use:Set the default context"
|
||||
)
|
||||
_describe -t docker-context-commands "docker context command" _docker_context_subcommands
|
||||
}
|
||||
|
||||
__docker_context_subcommand() {
|
||||
local -a _command_args opts_help
|
||||
local expl help="--help"
|
||||
integer ret=1
|
||||
|
||||
opts_help=("(: -)--help[Print usage]")
|
||||
|
||||
case "$words[1]" in
|
||||
(create)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help)--from=[Create context from a named context]:from:__docker_complete_contexts" \
|
||||
"($help -):name: " && ret=0
|
||||
;;
|
||||
(use)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(inspect)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(rm)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -)1:context:__docker_complete_contexts" && ret=0
|
||||
;;
|
||||
(update)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help)--default-stack-orchestrator=[Default orchestrator for stack operations to use with this context]:default-stack-orchestrator:(swarm kubernetes all)" \
|
||||
"($help)--description=[Description of the context]:description:" \
|
||||
"($help)--docker=[Set the docker endpoint]:docker:" \
|
||||
"($help)--kubernetes=[Set the kubernetes endpoint]:kubernetes:" \
|
||||
"($help -):name:" && ret=0
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
# EO context
|
||||
|
||||
__docker_caching_policy() {
|
||||
oldp=( "$1"(Nmh+1) ) # 1 hour
|
||||
(( $#oldp ))
|
||||
|
|
@ -2576,7 +2652,7 @@ __docker_commands() {
|
|||
then
|
||||
local -a lines
|
||||
lines=(${(f)"$(_call_program commands docker 2>&1)"})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/ ##/:})
|
||||
_docker_subcommands=(${${${(M)${lines[$((${lines[(i)*Commands:]} + 1)),-1]}:# *}## #}/\*# ##/:})
|
||||
_docker_subcommands=($_docker_subcommands 'daemon:Enable daemon mode' 'help:Show help for a command')
|
||||
(( $#_docker_subcommands > 2 )) && _store_cache docker_subcommands _docker_subcommands
|
||||
fi
|
||||
|
|
@ -2631,6 +2707,23 @@ __docker_subcommand() {
|
|||
;;
|
||||
esac
|
||||
;;
|
||||
(context)
|
||||
local curcontext="$curcontext" state
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
"($help -): :->command" \
|
||||
"($help -)*:: :->option-or-argument" && ret=0
|
||||
|
||||
case $state in
|
||||
(command)
|
||||
__docker_context_commands && ret=0
|
||||
;;
|
||||
(option-or-argument)
|
||||
curcontext=${curcontext%:*:*}:docker-${words[-1]}:
|
||||
__docker_context_subcommand && ret=0
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
(daemon)
|
||||
_arguments $(__docker_arguments) \
|
||||
$opts_help \
|
||||
|
|
@ -2698,7 +2791,8 @@ __docker_subcommand() {
|
|||
"($help)--tlsverify[Use TLS and verify the remote]" \
|
||||
"($help)--userns-remap=[User/Group setting for user namespaces]:user\:group:->users-groups" \
|
||||
"($help)--userland-proxy[Use userland proxy for loopback traffic]" \
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" && ret=0
|
||||
"($help)--userland-proxy-path=[Path to the userland proxy binary]:binary:_files" \
|
||||
"($help)--validate[Validate daemon configuration and exit]" && ret=0
|
||||
|
||||
case $state in
|
||||
(cluster-store)
|
||||
|
|
|
|||
39
plugins/docker/docker.plugin.zsh
Normal file
39
plugins/docker/docker.plugin.zsh
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
alias dbl='docker build'
|
||||
alias dpu='docker pull'
|
||||
alias dtop='docker top'
|
||||
|
||||
# docker containers
|
||||
alias dcin='docker container inspect'
|
||||
alias dlo='docker container logs'
|
||||
alias dcls='docker container ls'
|
||||
alias dclsa='docker container ls -a'
|
||||
alias dpo='docker container port'
|
||||
alias dr='docker container run'
|
||||
alias drit='docker container run -it'
|
||||
alias drm='docker container rm'
|
||||
alias 'drm!'='docker container rm -f'
|
||||
alias dst='docker container start'
|
||||
alias dstp='docker container stop'
|
||||
alias dxc='docker container exec'
|
||||
alias dxcit='docker container exec -it'
|
||||
|
||||
# docker images
|
||||
alias dib='docker image build'
|
||||
alias dii='docker image inspect'
|
||||
alias dils='docker image ls'
|
||||
alias dip='docker image push'
|
||||
alias dirm='docker image rm'
|
||||
alias dit='docker image tag'
|
||||
|
||||
# docker network
|
||||
alias dnc='docker network create'
|
||||
alias dncn='docker network connect'
|
||||
alias dndcn='docker network disconnect'
|
||||
alias dni='docker network inspect'
|
||||
alias dnls='docker network ls'
|
||||
alias dnrm='docker network rm'
|
||||
|
||||
# docker volume
|
||||
alias dvi='docker volume inspect'
|
||||
alias dvls='docker volume ls'
|
||||
alias dvprune='docker volume prune'
|
||||
|
|
@ -23,12 +23,12 @@ source_env() {
|
|||
touch "$ZSH_DOTENV_DISALLOWED_LIST"
|
||||
|
||||
# early return if disallowed
|
||||
if command grep -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
if command grep -Fx -q "$dirpath" "$ZSH_DOTENV_DISALLOWED_LIST" &>/dev/null; then
|
||||
return
|
||||
fi
|
||||
|
||||
# check if current directory's .env file is allowed or ask for confirmation
|
||||
if ! command grep -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
if ! command grep -Fx -q "$dirpath" "$ZSH_DOTENV_ALLOWED_LIST" &>/dev/null; then
|
||||
# get cursor column and print new line before prompt if not at line beginning
|
||||
local column
|
||||
echo -ne "\e[6n" > /dev/tty
|
||||
|
|
@ -52,7 +52,10 @@ source_env() {
|
|||
fi
|
||||
|
||||
# test .env syntax
|
||||
zsh -fn $ZSH_DOTENV_FILE || echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
zsh -fn $ZSH_DOTENV_FILE || {
|
||||
echo "dotenv: error when sourcing '$ZSH_DOTENV_FILE' file" >&2
|
||||
return 1
|
||||
}
|
||||
|
||||
setopt localoptions allexport
|
||||
source $ZSH_DOTENV_FILE
|
||||
|
|
|
|||
|
|
@ -21,3 +21,4 @@ plugins=(... dotnet)
|
|||
| da | dotnet add | Add a package or reference to a .NET project. |
|
||||
| dp | dotnet pack | Create a NuGet package. |
|
||||
| dng | dotnet nuget | Provides additional NuGet commands. |
|
||||
| db | dotnet build | Build a .NET project |
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ _dotnet_zsh_complete()
|
|||
return
|
||||
fi
|
||||
|
||||
# This is not a variable assigment, don't remove spaces!
|
||||
# This is not a variable assignment, don't remove spaces!
|
||||
_values = "${(ps:\n:)completions}"
|
||||
}
|
||||
|
||||
|
|
@ -30,3 +30,4 @@ alias ds='dotnet sln'
|
|||
alias da='dotnet add'
|
||||
alias dp='dotnet pack'
|
||||
alias dng='dotnet nuget'
|
||||
alias db='dotnet build'
|
||||
|
|
|
|||
|
|
@ -1,83 +1,66 @@
|
|||
# Drush
|
||||
|
||||
## Description
|
||||
This plugin offers aliases and functions to make the work with drush easier and more productive.
|
||||
This plugin adds aliases and functions for [Drush](https://www.drush.org), a command-line shell
|
||||
and Unix scripting interface for Drupal. It also adds completion for the `drush` command.
|
||||
|
||||
To enable it, add the `drush` to your `plugins` array in `~/.zshrc`:
|
||||
To enable it, add `drush` to the plugins array in zshrc file:
|
||||
|
||||
```
|
||||
```zsh
|
||||
plugins=(... drush)
|
||||
```
|
||||
|
||||
## Aliases
|
||||
| Alias | Description | Command |
|
||||
|-------|-----------------------------------------------------------------------|-----------------------------|
|
||||
| dr | Display drush help | drush |
|
||||
| drca | Clear all drupal caches. | drush cc all |
|
||||
| drcb | Clear block cache. | drush cc block |
|
||||
| drcg | Clear registry cache. | drush cc registry |
|
||||
| drcj | Clear css-js cache. | drush cc css-js |
|
||||
| drcm | Clear menu cache. | drush cc menu |
|
||||
| drcml | Clear module-list cache. | drush cc module-list |
|
||||
| drcr | Run all cron hooks in all active modules for specified site. | drush core-cron |
|
||||
| drct | Clear theme-registry cache. | drush cc theme-registry |
|
||||
| drcv | Clear views cache. (Make sure that the views module is enabled) | drush cc views |
|
||||
| drdmp | Backup database in a new dump.sql file | drush drush sql-dump --ordered-dump --result-file=dump.sql|
|
||||
| drf | Display features status | drush features |
|
||||
| drfr | Revert a feature module on your site. | drush features-revert -y |
|
||||
| drfu | Update a feature module on your site. | drush features-update -y |
|
||||
| drfra | Revert all enabled feature module on your site. | drush features-revert-all |
|
||||
| drif | Flush all derived images. | drush image-flush --all |
|
||||
| drpm | Show a list of available modules. | drush pm-list --type=module |
|
||||
| drst | Provides a birds-eye view of the current Drupal installation, if any. | drush core-status |
|
||||
| drup | Apply any database updates required (as with running update.php). | drush updatedb |
|
||||
| drups | List any pending database updates. | drush updatedb-status |
|
||||
| drv | Show drush version. | drush version |
|
||||
| drvd | Delete a variable. | drush variable-del |
|
||||
| drvg | Get a list of some or all site variables and values. | drush variable-get |
|
||||
| drvs | Set a variable. | drush variable-set |
|
||||
|
||||
| Alias | Command | Description |
|
||||
| ------- | ----------------------------------------------------------- | -------------------------------------------------------------------- |
|
||||
| `dr` | `drush` | Display drush help |
|
||||
| `drca` | `drush cc all` | _(Deprecated in Drush 8)_ Clear all drupal caches |
|
||||
| `drcb` | `drush cc block` | _(Deprecated in Drush 8)_ Clear block cache |
|
||||
| `drcex` | `drush config:export -y` | Export Drupal configuration to a directory |
|
||||
| `drcg` | `drush cc registry` | _(Deprecated in Drush 8)_ Clear registry cache |
|
||||
| `drcim` | `drush config:import -y` | Import config from a config directory |
|
||||
| `drcj` | `drush cc css-js` | Clear css-js cache |
|
||||
| `drcm` | `drush cc menu` | Clear menu cache |
|
||||
| `drcml` | `drush cc module-list` | Clear module-list cache |
|
||||
| `drcr` | `drush core-cron` | Run all cron hooks in all active modules for specified site |
|
||||
| `drct` | `drush cc theme-registry` | Clear theme-registry cache |
|
||||
| `drcv` | `drush cc views` | Clear views cache _(make sure that the views module is enabled)_ |
|
||||
| `drdmp` | `drush drush sql-dump --ordered-dump --result-file=dumpsql` | Backup database in a new dump.sql file |
|
||||
| `drf` | `drush features` | Display features status |
|
||||
| `drfr` | `drush features-revert -y` | Revert a feature module on your site |
|
||||
| `drfra` | `drush features-revert-all` | Revert all enabled feature module on your site |
|
||||
| `drfu` | `drush features-update -y` | Update a feature module on your site |
|
||||
| `drif` | `drush image-flush --all` | Flush all derived images |
|
||||
| `drpm` | `drush pm-list --type=module` | Show a list of available modules |
|
||||
| `drst` | `drush core-status` | Provides a birds-eye view of the current Drupal installation, if any |
|
||||
| `druli` | `drush user:login` | Display a one time login link for user ID 1, or another user |
|
||||
| `drup` | `drush updatedb` | Apply any database updates required (as with running update.php) |
|
||||
| `drups` | `drush updatedb-status` | List any pending database updates |
|
||||
| `drv` | `drush version` | Show drush version |
|
||||
| `drvd` | `drush variable-del` | Delete a variable |
|
||||
| `drvg` | `drush variable-get` | Get a list of some or all site variables and values |
|
||||
| `drvs` | `drush variable-set` | Set a variable |
|
||||
| `drws` | `drush watchdog:show` | Show watchdog messages |
|
||||
| `drwse` | `drush watchdog:show --extended` | Show watchdog messages with extended information |
|
||||
| `drwst` | `drush watchdog:tail` | Tail watchdog messages |
|
||||
|
||||
## Functions
|
||||
|
||||
### dren
|
||||
Download and enable one or more extensions (modules or themes).
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`dren devel` or `dren devel module_filter views`
|
||||
- `dren`: download and enable one or more extensions (modules or themes). Must be
|
||||
invoked with one or more parameters, e.g.: `dren devel` or `dren devel module_filter views`.
|
||||
|
||||
### drf
|
||||
Edit drushrc, site alias, and Drupal settings.php files.
|
||||
Can be invoked with one or without parameters. e.g.:
|
||||
`drf 1`
|
||||
- `drf`: edit drushrc, site alias, and Drupal settings.php files.
|
||||
Can be invoked with one or without parameters, e.g.: `drf 1`.
|
||||
|
||||
### dris
|
||||
Disable one or more extensions (modules or themes)
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`dris devel` or `dris devel module_filter views`
|
||||
- `dris`: disable one or more extensions (modules or themes). Must be invoked with
|
||||
one or more parameters, e.g.: `dris devel` or `dris devel module_filter views`.
|
||||
|
||||
### drpu
|
||||
Uninstall one or more modules.
|
||||
Must be invoked with one or more parameters. e.g.:
|
||||
`drpu devel` or `drpu devel module_filter views`
|
||||
- `drpu`: uninstall one or more modules. Must be invoked with one or more
|
||||
parameters, e.g.: `drpu devel` or `drpu devel module_filter views`.
|
||||
|
||||
### drnew
|
||||
Creates a brand new drupal website.
|
||||
Note: As soon as the installation is complete, drush will print a username and a random password into the terminal:
|
||||
```
|
||||
Installation complete. User name: admin User password: cf7t8yqNEm
|
||||
```
|
||||
- `drnew`: creates a brand new drupal website. Note: as soon as the installation
|
||||
is complete, `drush` will print a username and a random password into the terminal:
|
||||
|
||||
## Additional features
|
||||
|
||||
### Autocomplete
|
||||
The [completion script for drush](https://github.com/drush-ops/drush/blob/8.0.1/drush.complete.sh) comes enabled with this plugin.
|
||||
So, it is possible to type a command:
|
||||
```
|
||||
drush sql
|
||||
```
|
||||
|
||||
And as soon as the tab key is pressed, the script will display the available commands:
|
||||
```
|
||||
drush sql
|
||||
sqlc sql-conf sql-create sql-dump sql-query sql-sanitize
|
||||
sql-cli sql-connect sql-drop sqlq sqlsan sql-sync
|
||||
```
|
||||
```text
|
||||
Installation complete. User name: admin User password: cf7t8yqNEm
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
# Drush support.
|
||||
|
||||
# Functions
|
||||
function dren() {
|
||||
drush en $@ -y
|
||||
drush en "$@" -y
|
||||
}
|
||||
|
||||
function dris() {
|
||||
drush pm-disable $@ -y
|
||||
drush pm-disable "$@" -y
|
||||
}
|
||||
|
||||
function drpu() {
|
||||
drush pm-uninstall $@ -y
|
||||
drush pm-uninstall "$@" -y
|
||||
}
|
||||
|
||||
function drf() {
|
||||
if [[ $1 == "" ]] then
|
||||
if [[ -z "$1" ]] then
|
||||
drush core-config
|
||||
else
|
||||
drush core-config --choice=$1
|
||||
|
|
@ -21,62 +20,62 @@ function drf() {
|
|||
}
|
||||
|
||||
function drfi() {
|
||||
if [[ $1 == "fields" ]]; then
|
||||
drush field-info fields
|
||||
elif [[ $1 == "types" ]]; then
|
||||
drush field-info types
|
||||
else
|
||||
drush field-info
|
||||
fi
|
||||
case "$1" in
|
||||
fields) drush field-info fields ;;
|
||||
types) drush field-info types ;;
|
||||
*) drush field-info ;;
|
||||
esac
|
||||
}
|
||||
|
||||
function drnew() {
|
||||
(
|
||||
cd
|
||||
echo "Website's name: "
|
||||
read WEBSITE_NAME
|
||||
|
||||
cd ~
|
||||
echo "Website's name: "
|
||||
read WEBSITE_NAME
|
||||
HOST=http://$(hostname -i)/
|
||||
|
||||
HOST=http://$(hostname -i)/
|
||||
if [[ $WEBSITE_NAME == "" ]] then
|
||||
MINUTES=$(date +%M:%S)
|
||||
WEBSITE_NAME="Drupal-$MINUTES"
|
||||
echo "Your website will be named: $WEBSITE_NAME"
|
||||
fi
|
||||
|
||||
if [[ $WEBSITE_NAME == "" ]] then
|
||||
MINUTES=$(date +%M:%S)
|
||||
WEBSITE_NAME="Drupal-$MINUTES"
|
||||
echo "Your website will be named: $WEBSITE_NAME"
|
||||
fi
|
||||
drush dl drupal --drupal-project-rename=$WEBSITE_NAME
|
||||
|
||||
drush dl drupal --drupal-project-rename=$WEBSITE_NAME
|
||||
echo "Type your localhost directory: (Leave empty for /var/www/html/)"
|
||||
read DIRECTORY
|
||||
|
||||
echo "Type your localhost directory: (Leave empty for /var/www/html/)"
|
||||
read DIRECTORY
|
||||
if [[ $DIRECTORY == "" ]] then
|
||||
DIRECTORY="/var/www/html/"
|
||||
fi
|
||||
|
||||
if [[ $DIRECTORY == "" ]] then
|
||||
DIRECTORY="/var/www/html/"
|
||||
fi
|
||||
echo "Moving to $DIRECTORY$WEBSITE_NAME"
|
||||
sudo mv $WEBSITE_NAME $DIRECTORY
|
||||
cd $DIRECTORY$WEBSITE_NAME
|
||||
|
||||
echo "Moving to $DIRECTORY$WEBSITE_NAME"
|
||||
sudo mv $WEBSITE_NAME $DIRECTORY
|
||||
cd $DIRECTORY$WEBSITE_NAME
|
||||
echo "Database's user: "
|
||||
read DATABASE_USR
|
||||
echo "Database's password: "
|
||||
read -s DATABASE_PWD
|
||||
echo "Database's name for your project: "
|
||||
read DATABASE
|
||||
|
||||
echo "Database's user: "
|
||||
read DATABASE_USR
|
||||
echo "Database's password: "
|
||||
read -s DATABASE_PWD
|
||||
echo "Database's name for your project: "
|
||||
read DATABASE
|
||||
|
||||
DB_URL="mysql://$DATABASE_USR:$DATABASE_PWD@localhost/$DATABASE"
|
||||
drush site-install standard --db-url=$DB_URL --site-name=$WEBSITE_NAME
|
||||
|
||||
open_command $HOST$WEBSITE_NAME
|
||||
echo "Done"
|
||||
DB_URL="mysql://$DATABASE_USR:$DATABASE_PWD@localhost/$DATABASE"
|
||||
drush site-install standard --db-url=$DB_URL --site-name=$WEBSITE_NAME
|
||||
|
||||
open_command $HOST$WEBSITE_NAME
|
||||
echo "Done"
|
||||
)
|
||||
}
|
||||
|
||||
# Aliases, sorted alphabetically.
|
||||
# Aliases
|
||||
alias dr="drush"
|
||||
alias drca="drush cc all" # Deprecated for Drush 8
|
||||
alias drcb="drush cc block" # Deprecated for Drush 8
|
||||
alias drcex="drush config:export -y"
|
||||
alias drcg="drush cc registry" # Deprecated for Drush 8
|
||||
alias drcim="drush config:import -y"
|
||||
alias drcj="drush cc css-js"
|
||||
alias drcm="drush cc menu"
|
||||
alias drcml="drush cc module-list"
|
||||
|
|
@ -86,17 +85,21 @@ alias drcv="drush cc views"
|
|||
alias drdmp="drush sql-dump --ordered-dump --result-file=dump.sql"
|
||||
alias drf="drush features"
|
||||
alias drfr="drush features-revert -y"
|
||||
alias drfu="drush features-update -y"
|
||||
alias drfra="drush features-revert-all"
|
||||
alias drfu="drush features-update -y"
|
||||
alias drif="drush image-flush --all"
|
||||
alias drpm="drush pm-list --type=module"
|
||||
alias drst="drush core-status"
|
||||
alias druli="drush user:login"
|
||||
alias drup="drush updatedb"
|
||||
alias drups="drush updatedb-status"
|
||||
alias drv="drush version"
|
||||
alias drvd="drush variable-del"
|
||||
alias drvg="drush variable-get"
|
||||
alias drvs="drush variable-set"
|
||||
alias drws="drush watchdog:show"
|
||||
alias drwse="drush watchdog:show --extended"
|
||||
alias drwst="drush watchdog:tail"
|
||||
|
||||
# Enable drush autocomplete support
|
||||
autoload bashcompinit
|
||||
|
|
|
|||
|
|
@ -9,57 +9,60 @@
|
|||
# - You can share opened buffered across opened frames.
|
||||
# - Configuration changes made at runtime are applied to all frames.
|
||||
|
||||
# Require emacs version to be minimum 24
|
||||
autoload -Uz is-at-least
|
||||
is-at-least 24 "${${(Az)"$(emacsclient --version 2>/dev/null)"}[2]}" || return 0
|
||||
|
||||
if "$ZSH/tools/require_tool.sh" emacsclient 24 2>/dev/null ; then
|
||||
export EMACS_PLUGIN_LAUNCHER="$ZSH/plugins/emacs/emacsclient.sh"
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
# set EDITOR if not already defined.
|
||||
export EDITOR="${EDITOR:-${EMACS_PLUGIN_LAUNCHER}}"
|
||||
# Path to custom emacsclient launcher
|
||||
export EMACS_PLUGIN_LAUNCHER="${0:A:h}/emacsclient.sh"
|
||||
|
||||
alias emacs="$EMACS_PLUGIN_LAUNCHER --no-wait"
|
||||
alias e=emacs
|
||||
# open terminal emacsclient
|
||||
alias te="$EMACS_PLUGIN_LAUNCHER -nw"
|
||||
# set EDITOR if not already defined.
|
||||
export EDITOR="${EDITOR:-${EMACS_PLUGIN_LAUNCHER}}"
|
||||
|
||||
# same than M-x eval but from outside Emacs.
|
||||
alias eeval="$EMACS_PLUGIN_LAUNCHER --eval"
|
||||
# create a new X frame
|
||||
alias eframe='emacsclient --alternate-editor "" --create-frame'
|
||||
alias emacs="$EMACS_PLUGIN_LAUNCHER --no-wait"
|
||||
alias e=emacs
|
||||
# open terminal emacsclient
|
||||
alias te="$EMACS_PLUGIN_LAUNCHER -nw"
|
||||
|
||||
# Emacs ANSI Term tracking
|
||||
if [[ -n "$INSIDE_EMACS" ]]; then
|
||||
chpwd_emacs() { print -P "\033AnSiTc %d"; }
|
||||
print -P "\033AnSiTc %d" # Track current working directory
|
||||
print -P "\033AnSiTu %n" # Track username
|
||||
# same than M-x eval but from outside Emacs.
|
||||
alias eeval="$EMACS_PLUGIN_LAUNCHER --eval"
|
||||
# create a new X frame
|
||||
alias eframe='emacsclient --alternate-editor "" --create-frame'
|
||||
|
||||
# add chpwd hook
|
||||
autoload -Uz add-zsh-hook
|
||||
add-zsh-hook chpwd chpwd_emacs
|
||||
fi
|
||||
# Emacs ANSI Term tracking
|
||||
if [[ -n "$INSIDE_EMACS" ]]; then
|
||||
chpwd_emacs() { print -P "\033AnSiTc %d"; }
|
||||
print -P "\033AnSiTc %d" # Track current working directory
|
||||
print -P "\033AnSiTu %n" # Track username
|
||||
|
||||
# Write to standard output the path to the file
|
||||
# opened in the current buffer.
|
||||
function efile {
|
||||
local cmd="(buffer-file-name (window-buffer))"
|
||||
"$EMACS_PLUGIN_LAUNCHER" --eval "$cmd" | tr -d \"
|
||||
}
|
||||
|
||||
# Write to standard output the directory of the file
|
||||
# opened in the the current buffer
|
||||
function ecd {
|
||||
local cmd="(let ((buf-name (buffer-file-name (window-buffer))))
|
||||
(if buf-name (file-name-directory buf-name)))"
|
||||
|
||||
local dir="$($EMACS_PLUGIN_LAUNCHER --eval $cmd | tr -d \")"
|
||||
if [ -n "$dir" ] ;then
|
||||
echo "$dir"
|
||||
else
|
||||
echo "can not deduce current buffer filename." >/dev/stderr
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
# add chpwd hook
|
||||
autoload -Uz add-zsh-hook
|
||||
add-zsh-hook chpwd chpwd_emacs
|
||||
fi
|
||||
|
||||
## Local Variables:
|
||||
## mode: sh
|
||||
## End:
|
||||
# Write to standard output the path to the file
|
||||
# opened in the current buffer.
|
||||
function efile {
|
||||
local cmd="(buffer-file-name (window-buffer))"
|
||||
local file="$("$EMACS_PLUGIN_LAUNCHER" --eval "$cmd" | tr -d \")"
|
||||
|
||||
if [[ -z "$file" ]]; then
|
||||
echo "Can't deduce current buffer filename." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "$file"
|
||||
}
|
||||
|
||||
# Write to standard output the directory of the file
|
||||
# opened in the the current buffer
|
||||
function ecd {
|
||||
local file
|
||||
file="$(efile)" || return $?
|
||||
echo "${file:h}"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,29 +1,38 @@
|
|||
#!/bin/sh
|
||||
|
||||
_emacsfun()
|
||||
{
|
||||
# get list of emacs frames.
|
||||
frameslist=`emacsclient --alternate-editor '' --eval '(frame-list)' 2>/dev/null | egrep -o '(frame)+'`
|
||||
emacsfun() {
|
||||
local cmd frames
|
||||
|
||||
if [ "$(echo "$frameslist" | sed -n '$=')" -ge 2 ] ;then
|
||||
# prevent creating another X frame if there is at least one present.
|
||||
emacsclient --alternate-editor "" "$@"
|
||||
else
|
||||
# Create one if there is no X window yet.
|
||||
emacsclient --alternate-editor "" --create-frame "$@"
|
||||
fi
|
||||
# Build the Emacs Lisp command to check for suitable frames
|
||||
# See https://www.gnu.org/software/emacs/manual/html_node/elisp/Frames.html#index-framep
|
||||
case "$*" in
|
||||
*-t*|*--tty*|*-nw*) cmd="(memq 't (mapcar 'framep (frame-list)))" ;; # if != nil, there are tty frames
|
||||
*) cmd="(delete 't (mapcar 'framep (frame-list)))" ;; # if != nil, there are graphical terminals (x, w32, ns)
|
||||
esac
|
||||
|
||||
# Check if there are suitable frames
|
||||
frames="$(emacsclient -a '' -n -e "$cmd" 2>/dev/null)"
|
||||
|
||||
# Only create another X frame if there isn't one present
|
||||
if [ -z "$frames" -o "$frames" = nil ]; then
|
||||
emacsclient --alternate-editor "" --create-frame "$@"
|
||||
return $?
|
||||
fi
|
||||
|
||||
emacsclient --alternate-editor "" "$@"
|
||||
}
|
||||
|
||||
|
||||
# adopted from https://github.com/davidshepherd7/emacs-read-stdin/blob/master/emacs-read-stdin.sh
|
||||
# Adapted from https://github.com/davidshepherd7/emacs-read-stdin/blob/master/emacs-read-stdin.sh
|
||||
# If the second argument is - then write stdin to a tempfile and open the
|
||||
# tempfile. (first argument will be `--no-wait` passed in by the plugin.zsh)
|
||||
if [ "$#" -ge "2" -a "$2" = "-" ]
|
||||
then
|
||||
tempfile="$(mktemp --tmpdir emacs-stdin-$USERNAME.XXXXXXX 2>/dev/null \
|
||||
|| mktemp -t emacs-stdin-$USERNAME)" # support BSD mktemp
|
||||
cat - > "$tempfile"
|
||||
_emacsfun --no-wait $tempfile
|
||||
else
|
||||
_emacsfun "$@"
|
||||
if [ $# -ge 2 -a "$2" = "-" ]; then
|
||||
# Create a tempfile to hold stdin
|
||||
tempfile="$(mktemp --tmpdir emacs-stdin-$USERNAME.XXXXXXX 2>/dev/null \
|
||||
|| mktemp -t emacs-stdin-$USERNAME)" # support BSD mktemp
|
||||
# Redirect stdin to the tempfile
|
||||
cat - > "$tempfile"
|
||||
# Reset $2 to the tempfile so that "$@" works as expected
|
||||
set -- "$1" "$tempfile" "${@:3}"
|
||||
fi
|
||||
|
||||
emacsfun "$@"
|
||||
|
|
|
|||
|
|
@ -1,22 +1,33 @@
|
|||
# Ember CLI
|
||||
|
||||
**Maintainers:** [BilalBudhani](https://github.com/BilalBudhani), [eubenesa](https://github.com/eubenesa), [scottkidder](https://github.com/scottkidder]
|
||||
This plugin adds completion and aliases for using [`ember-cli`](https://cli.emberjs.com/).
|
||||
|
||||
Ember CLI (https://www.ember-cli.com/)
|
||||
To use it, add `ember-cli` to the plugins array in your zshrc file:
|
||||
|
||||
### List of Aliases
|
||||
```zsh
|
||||
plugins=(... ember-cli)
|
||||
```
|
||||
|
||||
Alias | Ember-CLI command
|
||||
----- | -----------------
|
||||
**es** | *ember serve*
|
||||
**ea** | *ember addon*
|
||||
**eb** | *ember build*
|
||||
**ed** | *ember destroy*
|
||||
**eg** | *ember generate*
|
||||
**eh** | *ember help*
|
||||
**ein** | *ember init*
|
||||
**ei** | *ember install*
|
||||
**et** | *ember test*
|
||||
**ets** | *ember test --serve*
|
||||
**eu** | *ember update*
|
||||
**ev** | *ember version*
|
||||
## Aliases
|
||||
|
||||
| Alias | Command |
|
||||
| ----- | -------------------- |
|
||||
| `ea` | `ember addon` |
|
||||
| `eb` | `ember build` |
|
||||
| `ed` | `ember destroy` |
|
||||
| `eg` | `ember generate` |
|
||||
| `eh` | `ember help` |
|
||||
| `ei` | `ember install` |
|
||||
| `ein` | `ember init` |
|
||||
| `es` | `ember serve` |
|
||||
| `et` | `ember test` |
|
||||
| `ets` | `ember test --serve` |
|
||||
| `eu` | `ember update` |
|
||||
| `ev` | `ember version` |
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [BilalBudhani](https://github.com/BilalBudhani)
|
||||
- [eubenesa](https://github.com/eubenesa)
|
||||
- [scottkidder](https://github.com/scottkidder]
|
||||
- [t-sauer](https://www.github.com/t-sauer)
|
||||
|
|
|
|||
189
plugins/ember-cli/_ember-cli
Normal file
189
plugins/ember-cli/_ember-cli
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
#compdef ember
|
||||
|
||||
local curcontext="$curcontext" state line ret=1
|
||||
|
||||
_arguments -C -A "--version" -A "--help" \
|
||||
'(- 1 *)--help' \
|
||||
'(- 1 *)--version' \
|
||||
'1: :->cmds' \
|
||||
'*:: :->args' && ret=0
|
||||
|
||||
case $state in
|
||||
cmds)
|
||||
_values "ember command" \
|
||||
"addon[Generates a new folder structure for building an addon, complete with test harness]" \
|
||||
"asset-sizes[Shows the sizes of your asset files]" \
|
||||
"build[Builds your app and places it into the output path (dist/ by default)]" \
|
||||
"destroy[Destroys code generated by generate command]" \
|
||||
"generate[Generates new code from blueprints]" \
|
||||
"help[Outputs the usage instructions for all commands or the provided command]" \
|
||||
"init[Creates a new ember-cli project in the current folder]" \
|
||||
"install[Installs an ember-cli addon from npm]" \
|
||||
"new[Creates a new directory and runs ember init in it]" \
|
||||
"serve[Builds and serves your app, rebuilding on file changes]" \
|
||||
"test[Runs your app's test suite]" \
|
||||
"version[outputs ember-cli version]"
|
||||
ret=0
|
||||
;;
|
||||
args)
|
||||
case $line[1] in
|
||||
help)
|
||||
_values 'commands' \
|
||||
'addon' \
|
||||
'asset-sizes' \
|
||||
'build' \
|
||||
'destroy' \
|
||||
'generate' \
|
||||
'help' \
|
||||
'init' \
|
||||
'install' \
|
||||
'new' \
|
||||
'serve' \
|
||||
'test' \
|
||||
'vesion' && ret=0
|
||||
;;
|
||||
addon)
|
||||
_arguments \
|
||||
'(--blueprint)--blueprint=-' \
|
||||
'(--directory)--directory=-' \
|
||||
'(--dry-run)--dry-run' \
|
||||
'(--skip-bower)--skip-bower' \
|
||||
'(--skip-git)--skip-git' \
|
||||
'(--skip-npm)--skip-npm' \
|
||||
'(--verbose)--verbose'
|
||||
;;
|
||||
asset-sizes)
|
||||
_arguments \
|
||||
'(--output-path)--output-path=-'
|
||||
;;
|
||||
build)
|
||||
_arguments \
|
||||
'(--environment)--environment=-' \
|
||||
'(--output-path)--output-path=-' \
|
||||
'(--output-path)--suppress-sizes' \
|
||||
'(--watch)--watch' \
|
||||
'(--watcher)--watcher=-' \
|
||||
'(-dev)-dev' \
|
||||
'(-prod)-prod'
|
||||
;;
|
||||
destroy|generate)
|
||||
_values 'arguments' \
|
||||
'(--classic)--classic' \
|
||||
'(--dry-run)--dry-run' \
|
||||
'(--dummy)--dummy' \
|
||||
'(--in-repo-addon)--in-repo-addon-=' \
|
||||
'(--pod)--pod' \
|
||||
'(--verbose)--verbose' && ret=0
|
||||
_values 'blueprints' \
|
||||
'acceptance-test' \
|
||||
'adapter' \
|
||||
'adapter-test' \
|
||||
'component' \
|
||||
'component-addon' \
|
||||
'component-test' \
|
||||
'controller' \
|
||||
'controller-test' \
|
||||
'helper' \
|
||||
'helper-addon' \
|
||||
'helper-test' \
|
||||
'initializer' \
|
||||
'initializer-addon' \
|
||||
'initializer-test' \
|
||||
'instance-initializer' \
|
||||
'instance-initializer-addon' \
|
||||
'instance-initializer-test' \
|
||||
'mixin' \
|
||||
'mixin-test' \
|
||||
'model' \
|
||||
'model-test' \
|
||||
'resource' \
|
||||
'route' \
|
||||
'route-addon' \
|
||||
'route-test' \
|
||||
'serializer' \
|
||||
'serializer-test' \
|
||||
'service' \
|
||||
'service-test' \
|
||||
'template' \
|
||||
'test-helper' \
|
||||
'transform' \
|
||||
'transform-test' \
|
||||
'util' \
|
||||
'util-test' \
|
||||
'view' \
|
||||
'view-test' \
|
||||
'addon' \
|
||||
'addon-import' \
|
||||
'app' \
|
||||
'blueprint' \
|
||||
'http-mock' \
|
||||
'http-proxy' \
|
||||
'in-repo-addon' \
|
||||
'lib' \
|
||||
'server' \
|
||||
'vendor-shim' && ret=0
|
||||
;;
|
||||
init)
|
||||
_arguments \
|
||||
'(--blueprint)--blueprint=-' \
|
||||
'(--name)--name=-' \
|
||||
'(--dry-run)--dry-run' \
|
||||
'(--skip-bower)--skip-bower' \
|
||||
'(--skip-npm)--skip-npm' \
|
||||
'(--verbose)--verbose'
|
||||
;;
|
||||
install)
|
||||
_arguments \
|
||||
'(--save-dev)--save-dev' \
|
||||
'(--save)--save'
|
||||
;;
|
||||
new)
|
||||
_arguments \
|
||||
'(--blueprint)--blueprint=-' \
|
||||
'(--directory)--directory=-' \
|
||||
'(--dry-run)--dry-run' \
|
||||
'(--skip-bower)--skip-bower' \
|
||||
'(--skip-git)--skip-git' \
|
||||
'(--skip-npm)--skip-npm' \
|
||||
'(--verbose)--verbose'
|
||||
;;
|
||||
serve)
|
||||
_arguments \
|
||||
'(--port)--port=-[To use a port different than 4200. Pass 0 to automatically pick an available port.]' \
|
||||
'(--host)--host=-[Listens on all interfaces by default]' \
|
||||
'(--proxy)--proxy=-' \
|
||||
'(--secure-proxy)--secure-proxy[Set to false to proxy self-signed SSL certificates]' \
|
||||
'(--transparent-proxy)--transparent-proxy[Set to false to omit x-forwarded-* headers when proxying]' \
|
||||
'(--watcher)--watcher=-' \
|
||||
'(--live-reload)--live-reload' \
|
||||
'(--live-reload-host)--live-reload-host=-[Defaults to host]' \
|
||||
'(--live-reload-base-url)--live-reload-base-url=-[Defaults to baseURL]' \
|
||||
'(--live-reload-port)--live-reload-port=-[Defaults to port number within \[49152...65535\]]' \
|
||||
'(--environment)--environment=-' \
|
||||
'(--output-path)--output-path=-' \
|
||||
'(--ssl)--ssl' \
|
||||
'(--ssl-key)--ssl-key=-' \
|
||||
'(--ssl-cert)--ssl-cert=-'
|
||||
;;
|
||||
test)
|
||||
_arguments \
|
||||
'(--environment)--environment=-' \
|
||||
'(--config-file)--config-file=-' \
|
||||
'(--server)--server' \
|
||||
'(--host)--host=-' \
|
||||
'(--test-port)--test-port=-[The test port to use when running with --server.]' \
|
||||
'(--filter)--filter=-[A string to filter tests to run]' \
|
||||
'(--module)--module=-[The name of a test module to run]' \
|
||||
'(--watcher)--watcher=-' \
|
||||
'(--launch)--launch=-[A comma separated list of browsers to launch for tests.]' \
|
||||
'(--reporter)--reporter=-[Test reporter to use \[tap|dot|xunit\] (default: tap)]' \
|
||||
'(--silent)--silent[Suppress any output except for the test report]' \
|
||||
'(--test-page)--test-page=-[Test page to invoke]' \
|
||||
'(--path)--path=-[Reuse an existing build at given path.]' \
|
||||
'(--query)--query=-[A query string to append to the test page URL.]'
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
|
|
@ -1,17 +1,12 @@
|
|||
# Ember CLI
|
||||
# Visit https://www.ember-cli.com/ to view user guide
|
||||
|
||||
alias es='ember serve'
|
||||
alias ea='ember addon'
|
||||
alias eb='ember build'
|
||||
alias ed='ember destroy'
|
||||
alias eg='ember generate'
|
||||
alias eh='ember help'
|
||||
alias ein='ember init'
|
||||
alias ei='ember install'
|
||||
alias ein='ember init'
|
||||
alias es='ember serve'
|
||||
alias et='ember test'
|
||||
alias ets='ember test --serve'
|
||||
alias eu='ember update'
|
||||
|
||||
# version
|
||||
alias ev='ember version'
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ This plugin provides support for working with Unicode emoji characters in `zsh`
|
|||
|
||||
Variable | Description
|
||||
----------------- | --------------------------------
|
||||
$emoji | Maps emoji names to characters
|
||||
$emoji | Maps emoji names to characters (except flags)
|
||||
$emoji_flags | Maps country names to flag characters (using region indicators)
|
||||
$emoji_groups | Named groups of emoji. Keys are group names; values are whitespace-separated lists of character names
|
||||
|
||||
|
|
@ -55,10 +55,8 @@ The defined group names can be found with `echo ${(k)emoji_groups}`.
|
|||
To list all available emoji with their names, use:
|
||||
```
|
||||
$> display_emoji
|
||||
$> display_emoji fruits
|
||||
$> display_emoji animals
|
||||
$> display_emoji vehicles
|
||||
$> display_emoji faces
|
||||
$> display_emoji people
|
||||
```
|
||||
|
||||
To use emoji in a prompt:
|
||||
|
|
@ -73,13 +71,13 @@ PROMPT="$surfer > "
|
|||
|
||||
The emoji names and codes are sourced from Unicode Technical Report \#51, which provides information on emoji support in Unicode. It can be found at https://www.unicode.org/reports/tr51/index.html.
|
||||
|
||||
The group definitions are added by this OMZ plugin. They are not based on external definitions. (As far as I can tell. -apjanke)
|
||||
The group definitions are added by this OMZ plugin. They are not based on external definitions.
|
||||
|
||||
The values in the `$emoji*` maps are the emoji characters themselves, not escape sequences or other forms that require interpretation. They can be used in any context and do not require escape sequence support from commands like `echo` or `print`.
|
||||
|
||||
The emoji in the main `$emoji` map are standalone character sequences which can all be output on their own, without worrying about combining characters. The values may actually be multi-code-point sequences, instead of a single code point, and may include combining characters in those sequences. But they're arranged so their effects do not extend beyond that sequence.
|
||||
|
||||
The exception to this is the skin tone variation selectors. These are included in the main `$emoji` map because they can be displayed on their own, as well as used as combining characters. (If they follow a character that is not one of the emoji characters they combine with, they are displayed as color swatches.)
|
||||
The exception to this is the skin tone / hair style variation selectors. These are included in the main `$emoji` map because they can be displayed on their own, as well as used as combining characters. (If they follow a character that is not one of the emoji characters they combine with, they are displayed as color swatches.)
|
||||
|
||||
|
||||
## Experimental Features
|
||||
|
|
@ -90,7 +88,6 @@ Variables:
|
|||
|
||||
Variable | Description
|
||||
----------------- | --------------------------------
|
||||
$emoji2 | Auxiliary and combining characters
|
||||
$emoji_skintone | Skin tone modifiers (from Unicode 8.0)
|
||||
|
||||
|
||||
|
|
@ -105,31 +102,26 @@ The "variation selectors" are combining characters which change the appearance o
|
|||
The `$emoji_skintone` associative array maps skin tone IDs to the variation selector characters. To use one, output it immediately following a smiley or other human emoji.
|
||||
|
||||
```
|
||||
echo "$emoji[smiling_face_with_open_mouth]$emoji_skintone[4]"
|
||||
echo $emoji[waving_hand]$emoji_skintone[5]
|
||||
```
|
||||
|
||||
Note that `$emoji_skintone` is an associative array, and its keys are the *names* of "Fitzpatrick Skin Type" groups, not linear indexes into a normal array. The names are `1_2`, `3`, `4`, `5`, and `6`. (Types 1 and 2 are combined into a single color.) See the [Diversity section in Unicode TR 51](https://www.unicode.org/reports/tr51/index.html#Diversity) for details.
|
||||
|
||||
#### Gemoji support
|
||||
|
||||
The [gemoji project](https://github.com/github/gemoji) seems to be the de facto main source for short names and other emoji-related metadata that isn't included in the official Unicode reports. So, our list of emojis incorporates some of their aliases to make your life more convenient:
|
||||
|
||||
```
|
||||
echo $emoji[grinning_face_with_smiling_eyes]
|
||||
echo $emoji[smile]
|
||||
```
|
||||
|
||||
These two commands yield the same emoji (😄). The first name is the official one, in the Unicode reference, and the second one is the alias that was in Gemoji's database.
|
||||
|
||||
## TODO
|
||||
|
||||
These are things that could be enhanced in future revisions of the plugin.
|
||||
|
||||
* Incorporate CLDR data for ordering and groupings
|
||||
* Short :bracket: style names (from gemoji)
|
||||
* Incorporate `gemoji` data
|
||||
* Country codes for flags
|
||||
* ZWJ combining function?
|
||||
|
||||
#### Gemoji support
|
||||
|
||||
The [gemoji project](https://github.com/github/gemoji) seems to be the de facto main source for short names and other emoji-related metadata that isn't included in the official Unicode reports. (I'm saying this just from looking at the google results for "emoji short names" and related searches. -apjanke)
|
||||
|
||||
If this plugin is updated to provide short names, CLDR sorting data, and similar stuff, it should probably be changed to use the Gemoji project, and the `update_emoji.pl` script be rewritten in Ruby so it can use the Gemoji library directly instead of parsing its data files.
|
||||
|
||||
This does *not* mean that it should use Gemoji at run time. None of the `zsh` plugin stuff should call Gemoji or Ruby code. Rather, the "build time" `update_emoji.pl` script should be rewritten to use Gemoji to generate a pure-native-`zsh` character definition file which would be checked in to the repo and can be called by OMZ users without having Gemoji installed.
|
||||
|
||||
#### ZWJ combining function
|
||||
|
||||
One of the newer features of Unicode emoji is the ability to use the "Zero-Width Joiner" character to compose multiple emoji characters in to a single "emoji ligature" glyph. For example, this is [how Apple supports "family" emoji with various genders and skin tones](https://www.unicode.org/reports/tr51/index.html#ZWJ_Sequences).
|
||||
|
||||
These are a pain to write out (and probably worse to read), and it might be convenient to have a couple functions for concisely composing them, if wider support for them appears.
|
||||
* ZWJ combining function?
|
||||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -4,15 +4,17 @@
|
|||
#
|
||||
# See the README for documentation.
|
||||
|
||||
# Handle $0 according to the standard:
|
||||
# https://zdharma-continuum.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html
|
||||
0="${${ZERO:-${0:#$ZSH_ARGZERO}}:-${(%):-%N}}"
|
||||
0="${${(M)0:#/*}:-$PWD/$0}"
|
||||
|
||||
_omz_emoji_plugin_dir="${0:h}"
|
||||
|
||||
() {
|
||||
|
||||
local LC_ALL=en_US.UTF-8
|
||||
|
||||
typeset -gAH emoji_groups
|
||||
typeset -gAH emoji_con
|
||||
typeset -gAH emoji2
|
||||
typeset -gAH emoji_skintone
|
||||
|
||||
source "$_omz_emoji_plugin_dir/emoji-char-definitions.zsh"
|
||||
|
|
@ -20,12 +22,11 @@ unset _omz_emoji_plugin_dir
|
|||
|
||||
# These additional emoji are not in the definition file, but are useful in conjunction with it
|
||||
|
||||
# This is a combinin character that can be placed after any other character to surround
|
||||
# This is a combining character that can be placed after any other character to surround
|
||||
# it in a "keycap" symbol.
|
||||
# The digits 0-9 are already in the emoji table as keycap_digit_<N>, keycap_ten, etc.
|
||||
# It's unclear whether this should be in the $emoji array, because those characters are all ones
|
||||
# which can be displayed on their own.
|
||||
#emoji[combining_enclosing_keycap]="\U20E3"
|
||||
|
||||
emoji[regional_indicator_symbol_letter_d_regional_indicator_symbol_letter_e]=$'\xF0\x9F\x87\xA9\xF0\x9F\x87\xAA'
|
||||
emoji[regional_indicator_symbol_letter_g_regional_indicator_symbol_letter_b]=$'\xF0\x9F\x87\xAC\xF0\x9F\x87\xA7'
|
||||
|
|
@ -38,209 +39,12 @@ emoji[regional_indicator_symbol_letter_i_regional_indicator_symbol_letter_t]=$'\
|
|||
emoji[regional_indicator_symbol_letter_u_regional_indicator_symbol_letter_s]=$'\xF0\x9F\x87\xBA\xF0\x9F\x87\xB8'
|
||||
emoji[regional_indicator_symbol_letter_r_regional_indicator_symbol_letter_u]=$'\xF0\x9F\x87\xB7\xF0\x9F\x87\xBA'
|
||||
|
||||
# Nonstandard alias names
|
||||
emoji[vulcan_salute]=$'\U1F596'
|
||||
|
||||
|
||||
# Emoji combining and auxiliary characters
|
||||
|
||||
# "Variation Selectors" for controlling text vs emoji style presentation
|
||||
# These apply to the immediately preceding character
|
||||
emoji2[text_style]=$'\UFE0E'
|
||||
emoji2[emoji_style]=$'\UFE0F'
|
||||
# Joiner that indicates a single combined-form glyph (ligature) should be used
|
||||
emoji2[zero_width_joiner]=$'\U200D'
|
||||
# Skin tone modifiers
|
||||
emoji2[emoji_modifier_fitzpatrick_type_1_2]=$'\U1F3FB'
|
||||
emoji2[emoji_modifier_fitzpatrick_type_3]=$'\U1F3FC'
|
||||
emoji2[emoji_modifier_fitzpatrick_type_4]=$'\U1F3FD'
|
||||
emoji2[emoji_modifier_fitzpatrick_type_5]=$'\U1F3FE'
|
||||
emoji2[emoji_modifier_fitzpatrick_type_6]=$'\U1F3FF'
|
||||
# Various other combining characters. (Incomplete list; I selected ones that sound useful)
|
||||
emoji2[combining_enclosing_circle]=$'\U20DD'
|
||||
emoji2[combining_enclosing_square]=$'\U20DE'
|
||||
emoji2[combining_enclosing_diamond]=$'\U20DF'
|
||||
emoji2[combining_enclosing_circle_backslash]=$'\U20E0'
|
||||
emoji2[combining_enclosing_screen]=$'\U20E2'
|
||||
emoji2[combining_enclosing_keycap]=$'\U20E3'
|
||||
emoji2[combining_enclosing_upward_pointing_triangle]=$'\U20E4'
|
||||
|
||||
# Easier access to skin tone modifiers
|
||||
emoji_skintone[1_2]=$'\U1F3FB'
|
||||
emoji_skintone[3]=$'\U1F3FC'
|
||||
emoji_skintone[4]=$'\U1F3FD'
|
||||
emoji_skintone[5]=$'\U1F3FE'
|
||||
emoji_skintone[6]=$'\U1F3FF'
|
||||
|
||||
# Emoji groups
|
||||
# These are stored in a single associative array, $emoji_groups, to avoid cluttering up the global
|
||||
# namespace, and to allow adding additional group definitions at run time.
|
||||
# The keys are the group names, and the values are whitespace-separated lists of emoji character names.
|
||||
|
||||
emoji_groups[fruits]="
|
||||
tomato
|
||||
aubergine
|
||||
grapes
|
||||
melon
|
||||
watermelon
|
||||
tangerine
|
||||
banana
|
||||
pineapple
|
||||
red_apple
|
||||
green_apple
|
||||
peach
|
||||
cherries
|
||||
strawberry
|
||||
lemon
|
||||
pear
|
||||
"
|
||||
|
||||
emoji_groups[vehicles]="
|
||||
airplane
|
||||
rocket
|
||||
railway_car
|
||||
high_speed_train
|
||||
high_speed_train_with_bullet_nose
|
||||
bus
|
||||
ambulance
|
||||
fire_engine
|
||||
police_car
|
||||
taxi
|
||||
automobile
|
||||
recreational_vehicle
|
||||
delivery_truck
|
||||
ship
|
||||
speedboat
|
||||
bicycle
|
||||
helicopter
|
||||
steam_locomotive
|
||||
train
|
||||
light_rail
|
||||
tram
|
||||
oncoming_bus
|
||||
trolleybus
|
||||
minibus
|
||||
oncoming_police_car
|
||||
oncoming_taxi
|
||||
oncoming_automobile
|
||||
articulated_lorry
|
||||
tractor
|
||||
monorail
|
||||
mountain_railway
|
||||
suspension_railway
|
||||
mountain_cableway
|
||||
aerial_tramway
|
||||
rowboat
|
||||
bicyclist
|
||||
mountain_bicyclist
|
||||
sailboat
|
||||
"
|
||||
|
||||
emoji_groups[animals]="
|
||||
snail
|
||||
snake
|
||||
horse
|
||||
sheep
|
||||
monkey
|
||||
chicken
|
||||
boar
|
||||
elephant
|
||||
octopus
|
||||
spiral_shell
|
||||
bug
|
||||
ant
|
||||
honeybee
|
||||
lady_beetle
|
||||
fish
|
||||
tropical_fish
|
||||
blowfish
|
||||
turtle
|
||||
hatching_chick
|
||||
baby_chick
|
||||
front_facing_baby_chick
|
||||
bird
|
||||
penguin
|
||||
koala
|
||||
poodle
|
||||
bactrian_camel
|
||||
dolphin
|
||||
mouse_face
|
||||
cow_face
|
||||
tiger_face
|
||||
rabbit_face
|
||||
cat_face
|
||||
dragon_face
|
||||
spouting_whale
|
||||
horse_face
|
||||
monkey_face
|
||||
dog_face
|
||||
pig_face
|
||||
frog_face
|
||||
hamster_face
|
||||
wolf_face
|
||||
bear_face
|
||||
panda_face
|
||||
rat
|
||||
mouse
|
||||
ox
|
||||
water_buffalo
|
||||
cow
|
||||
tiger
|
||||
leopard
|
||||
rabbit
|
||||
cat
|
||||
dragon
|
||||
crocodile
|
||||
whale
|
||||
ram
|
||||
goat
|
||||
rooster
|
||||
dog
|
||||
pig
|
||||
dromedary_camel
|
||||
"
|
||||
|
||||
emoji_groups[faces]="
|
||||
grinning_face_with_smiling_eyes
|
||||
face_with_tears_of_joy
|
||||
smiling_face_with_open_mouth
|
||||
smiling_face_with_open_mouth_and_smiling_eyes
|
||||
smiling_face_with_open_mouth_and_cold_sweat
|
||||
smiling_face_with_open_mouth_and_tightly_closed_eyes
|
||||
winking_face
|
||||
smiling_face_with_smiling_eyes
|
||||
face_savouring_delicious_food
|
||||
relieved_face
|
||||
smiling_face_with_heart_shaped_eyes
|
||||
smirking_face
|
||||
unamused_face
|
||||
face_with_cold_sweat
|
||||
pensive_face
|
||||
confounded_face
|
||||
face_throwing_a_kiss
|
||||
kissing_face_with_closed_eyes
|
||||
face_with_stuck_out_tongue_and_winking_eye
|
||||
face_with_stuck_out_tongue_and_tightly_closed_eyes
|
||||
disappointed_face
|
||||
angry_face
|
||||
pouting_face
|
||||
crying_face
|
||||
persevering_face
|
||||
face_with_look_of_triumph
|
||||
disappointed_but_relieved_face
|
||||
fearful_face
|
||||
weary_face
|
||||
sleepy_face
|
||||
tired_face
|
||||
loudly_crying_face
|
||||
face_with_open_mouth_and_cold_sweat
|
||||
face_screaming_in_fear
|
||||
astonished_face
|
||||
flushed_face
|
||||
dizzy_face
|
||||
face_with_medical_mask
|
||||
"
|
||||
|
||||
}
|
||||
|
||||
# Prints a random emoji character
|
||||
|
|
@ -259,7 +63,11 @@ function random_emoji() {
|
|||
[[ $list_size -eq 0 ]] && return 1
|
||||
local random_index=$(( ( RANDOM % $list_size ) + 1 ))
|
||||
local name=${names[$random_index]}
|
||||
echo ${emoji[$name]}
|
||||
if [[ "$group" == "flags" ]]; then
|
||||
echo ${emoji_flags[$name]}
|
||||
else
|
||||
echo ${emoji[$name]}
|
||||
fi
|
||||
}
|
||||
|
||||
# Displays a listing of emoji with their names
|
||||
|
|
@ -276,12 +84,26 @@ function display_emoji() {
|
|||
fi
|
||||
# The extra spaces in output here are a hack for readability, since some
|
||||
# terminals treat these emoji chars as single-width.
|
||||
local counter=1
|
||||
for i in $names; do
|
||||
printf '%s ' "$emoji[$i]"
|
||||
if [[ "$group" == "flags" ]]; then
|
||||
printf '%s ' "$emoji_flags[$i]"
|
||||
else
|
||||
printf '%s ' "$emoji[$i]"
|
||||
fi
|
||||
# New line every 20 emoji, to avoid weirdnesses
|
||||
if (($counter % 20 == 0)); then
|
||||
printf "\n"
|
||||
fi
|
||||
let counter=$counter+1
|
||||
done
|
||||
print
|
||||
for i in $names; do
|
||||
echo "${emoji[$i]} = $i"
|
||||
if [[ "$group" == "flags" ]]; then
|
||||
echo "${emoji_flags[$i]} = $i"
|
||||
else
|
||||
echo "${emoji[$i]} = $i"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
|
|
|
|||
21538
plugins/emoji/gemoji_db.json
Normal file
21538
plugins/emoji/gemoji_db.json
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,113 +0,0 @@
|
|||
#!/usr/bin/perl -w
|
||||
#
|
||||
# update_emoji.pl
|
||||
#
|
||||
# This script generates the emoji.plugin.zsh emoji definitions from the Unicode
|
||||
# character data for the emoji characters.
|
||||
#
|
||||
# The data file can be found at https://unicode.org/Public/emoji/latest/emoji-data.txt
|
||||
# as referenced in Unicode TR51 (https://www.unicode.org/reports/tr51/index.html).
|
||||
#
|
||||
# This is known to work with the data file from version 1.0. It may not work with later
|
||||
# versions if the format changes. In particular, this reads line comments to get the
|
||||
# emoji character name and unicode version.
|
||||
#
|
||||
# Country names have punctuation and other non-letter characters removed from their name,
|
||||
# to avoid possible complications with having to escape the strings when using them as
|
||||
# array subscripts. The definition file seems to use some combining characters like accents
|
||||
# that get stripped during this process.
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use 5.010;
|
||||
use autodie;
|
||||
|
||||
use Path::Class;
|
||||
use File::Copy;
|
||||
|
||||
# Parse definitions out of the data file and convert
|
||||
sub process_emoji_data_file {
|
||||
my ( $infile, $outfilename ) = @_;
|
||||
my $file = file($infile);
|
||||
my $outfile = file($outfilename);
|
||||
my $outfilebase = $outfile->basename();
|
||||
my $tempfilename = "$outfilename.tmp";
|
||||
my $tempfile = file($tempfilename);
|
||||
my $outfh = $tempfile->openw();
|
||||
$outfh->print("
|
||||
# $outfilebase - Emoji character definitions for oh-my-zsh emoji plugin
|
||||
#
|
||||
# This file is auto-generated by update_emoji.pl. Do not edit it manually.
|
||||
#
|
||||
# This contains the definition for:
|
||||
# \$emoji - which maps character names to Unicode characters
|
||||
# \$emoji_flags - maps country names to Unicode flag characters using region indicators
|
||||
|
||||
# Main emoji
|
||||
typeset -gAH emoji
|
||||
# National flags
|
||||
typeset -gAH emoji_flags
|
||||
# Combining modifiers
|
||||
typeset -gAH emoji_mod
|
||||
|
||||
");
|
||||
|
||||
my $fh = $file->openr();
|
||||
my $line_num = 0;
|
||||
while ( my $line = $fh->getline() ) {
|
||||
$line_num++;
|
||||
$_ = $line;
|
||||
# Skip all-comment lines (from the header) and blank lines
|
||||
# (But don't strip comments on normal lines; we need to parse those for
|
||||
# the emoji names.)
|
||||
next if /^\s*#/ or /^\s*$/;
|
||||
|
||||
if (/^(\S.*?\S)\s*;\s*(\w+)\s*;\s*(\w+)\s*;\s*(\w+)\s*;\s*(\w.*?)\s*#\s*V(\S+)\s\(.*?\)\s*(\w.*\S)\s*$/) {
|
||||
my ($code, $style, $level, $modifier_status, $sources, $version, $keycap_name)
|
||||
= ($1, $2, $3, $4, $5, $6, $7);
|
||||
#print "code=$code style=$style level=$level modifier_status=$modifier_status sources=$sources version=$version name=$keycap_name\n";
|
||||
my @code_points = split /\s+/, $code;
|
||||
my @sources = split /\s+/, $sources;
|
||||
|
||||
my $flag_country = "";
|
||||
if ( $keycap_name =~ /^flag for (\S.*?)\s*$/) {
|
||||
$flag_country = $1;
|
||||
}
|
||||
|
||||
my $zsh_code = join '', map { "\\U$_" } @code_points;
|
||||
# Convert keycap names to valid associative array names that do not require any
|
||||
# quoting. Works fine for most stuff, but is clumsy for flags.
|
||||
my $omz_name = lc($keycap_name);
|
||||
$omz_name =~ s/[^A-Za-z0-9]/_/g;
|
||||
my $zsh_flag_country = $flag_country;
|
||||
$zsh_flag_country =~ s/[^\p{Letter}]/_/g;
|
||||
if ($flag_country) {
|
||||
$outfh->print("emoji_flags[$zsh_flag_country]=\$'$zsh_code'\n");
|
||||
} else {
|
||||
$outfh->print("emoji[$omz_name]=\$'$zsh_code'\n");
|
||||
}
|
||||
# Modifiers are included in both the main set and their separate map,
|
||||
# because they have a standalone representation as a color swatch.
|
||||
if ( $modifier_status eq "modifier" ) {
|
||||
$outfh->print("emoji_mod[$omz_name]=\$'$zsh_code'\n");
|
||||
}
|
||||
} else {
|
||||
die "Failed parsing line $line_num: '$_'";
|
||||
}
|
||||
}
|
||||
$fh->close();
|
||||
$outfh->print("\n");
|
||||
$outfh->close();
|
||||
|
||||
move($tempfilename, $outfilename)
|
||||
or die "Failed moving temp file to $outfilename: $!";
|
||||
}
|
||||
|
||||
my $datafile = "emoji-data.txt";
|
||||
my $zsh_def_file = "emoji-char-definitions.zsh";
|
||||
process_emoji_data_file($datafile, $zsh_def_file);
|
||||
|
||||
print "Updated definition file $zsh_def_file\n";
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue