mirror of
https://github.com/jakejarvis/dotfiles.git
synced 2025-09-15 23:15:30 -04:00
randomness
This commit is contained in:
13
zsh/.zshrc
13
zsh/.zshrc
@@ -10,8 +10,8 @@ export ZSH=$HOME/.oh-my-zsh
|
||||
# Default to nano 'cause I'm a wimp
|
||||
export EDITOR="nano"
|
||||
export VISUAL="code"
|
||||
export BROWSER="/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome"
|
||||
# export BROWSER="/Applications/Firefox.app/Contents/MacOS/firefox"
|
||||
# export BROWSER="/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome"
|
||||
export BROWSER="/Applications/Firefox.app/Contents/MacOS/firefox"
|
||||
|
||||
# Adjust history for speed
|
||||
HISTFILE=~/.zsh_history
|
||||
@@ -68,14 +68,13 @@ export GATSBY_TELEMETRY_DISABLED=1
|
||||
export DOTNET_CLI_TELEMETRY_OPTOUT=1
|
||||
export SAM_CLI_TELEMETRY=0
|
||||
export AZURE_CORE_COLLECT_TELEMETRY=0
|
||||
export CHECKPOINT_DISABLE=1 # prisma
|
||||
export CHECKPOINT_DISABLE=1
|
||||
|
||||
# TEMPORARY: fixes some breakage with node 17
|
||||
#export NODE_OPTIONS=--openssl-legacy-provider
|
||||
|
||||
# 1Password SSH integration
|
||||
# 1Password integrations
|
||||
# https://developer.1password.com/docs/ssh/get-started/#step-4-configure-your-ssh-or-git-client
|
||||
export SSH_AUTH_SOCK=~/Library/Group\ Containers/2BUA8C4S2C.com.1password/t/agent.sock
|
||||
# https://developer.1password.com/docs/cli/shell-plugins/
|
||||
source $HOME/.config/op/plugins.sh
|
||||
|
||||
# iTerm2
|
||||
test -e "${HOME}/.iterm2_shell_integration.zsh" && source "${HOME}/.iterm2_shell_integration.zsh"
|
||||
|
@@ -1,25 +1,20 @@
|
||||
# Enable aliases to be sudo'ed
|
||||
alias sudo="sudo "
|
||||
|
||||
alias reload="source ~/.zshrc"
|
||||
|
||||
# restore colors to GNU ls
|
||||
alias ls="ls -G --color=tty"
|
||||
alias ll="ls -lah"
|
||||
alias la="ls -a"
|
||||
alias l="ls -lh"
|
||||
|
||||
# typos
|
||||
alias cd..="cd .."
|
||||
alias ..="cd .."
|
||||
alias ...="cd ../.."
|
||||
alias ....="cd ../../.."
|
||||
alias ~="cd ~"
|
||||
|
||||
alias hosts="sudo $EDITOR /etc/hosts"
|
||||
alias digg="dig @1.1.1.1 +nocmd any +multiline +noall +answer"
|
||||
# alias speed="wget -O /dev/null http://cachefly.cachefly.net/100mb.test"
|
||||
|
||||
#
|
||||
# Git
|
||||
#
|
||||
alias g="git"
|
||||
alias gc="git commit -m" # + commit message
|
||||
alias gca="git add . && git commit -m" # + commit message
|
||||
@@ -40,8 +35,6 @@ alias gundo="git reset --soft HEAD~1"
|
||||
alias greset="git reset"
|
||||
alias github="gh repo view --web"
|
||||
alias gist="gh gist create --web"
|
||||
alias ghnew="gh repo create"
|
||||
alias ghfork="gh repo fork"
|
||||
glall() {
|
||||
# pull all remote branches
|
||||
# https://stackoverflow.com/a/10312587
|
||||
@@ -50,9 +43,7 @@ glall() {
|
||||
git pull --all
|
||||
}
|
||||
|
||||
#
|
||||
# Docker
|
||||
#
|
||||
alias d="docker"
|
||||
alias dps="docker ps -a"
|
||||
# build and run:
|
||||
@@ -73,16 +64,12 @@ alias dcd="docker-compose down"
|
||||
alias dcr="docker-compose down && docker-compose up -d"
|
||||
alias dcl="docker-compose logs -f"
|
||||
|
||||
#
|
||||
# Node/NPM/Yarn
|
||||
#
|
||||
alias npr="npm run"
|
||||
alias fresh_npm="rm -rf node_modules package-lock.json && npm install"
|
||||
alias fresh_yarn="rm -rf node_modules yarn.lock && yarn install"
|
||||
|
||||
#
|
||||
# Hugo
|
||||
#
|
||||
make_hugo() {
|
||||
# parentheses lets us cd to Hugo path without changing our current location
|
||||
(
|
||||
@@ -101,15 +88,15 @@ alias hugo_brew="brew upgrade hugo --fetch-HEAD --build-from-source"
|
||||
# run `hugo config` first to make sure we're in a Hugo directory:
|
||||
alias hugo_clean="hugo config 1>/dev/null && rm -rf public/ resources/ build/"
|
||||
|
||||
# SSH
|
||||
alias sshalt="ssh -p 2222"
|
||||
alias moshalt="mosh --ssh=\"ssh -p 2222\""
|
||||
|
||||
alias pubkey="more ~/.ssh/id_ed25519.pub | pbcopy | echo '=> Public key copied to clipboard.'"
|
||||
alias pubkey_rsa="more ~/.ssh/id_rsa.pub | pbcopy | echo '=> Public key copied to clipboard.'"
|
||||
|
||||
# youtube-dl
|
||||
alias ytdl="youtube-dl -f bestvideo+bestaudio"
|
||||
alias ytmp3="youtube-dl -f bestaudio -x --audio-format mp3 --audio-quality 320K"
|
||||
|
||||
alias weather="curl 'https://wttr.in/mht?format=v2'"
|
||||
|
||||
alias shellcheckd="docker run --rm -v \"$PWD:/mnt\" koalaman/shellcheck:latest"
|
||||
# fun
|
||||
alias weather="curl 'https://wttr.in/?format=v2'"
|
||||
|
@@ -35,11 +35,6 @@ extract() {
|
||||
fi
|
||||
}
|
||||
|
||||
# Find the real location of a short URL
|
||||
unshort() {
|
||||
curl -sIL $1 | sed -n 's/Location: *//p'
|
||||
}
|
||||
|
||||
# Create a git.io short URL (custom slug optional)
|
||||
# ex: gitio https://github.com/jakejarvis/dotfiles [jakesdotfiles] => https://git.io/jakesdotfiles
|
||||
# https://blog.github.com/2011-11-10-git-io-github-url-shortener
|
||||
@@ -50,18 +45,6 @@ gitio() {
|
||||
echo "${RESPONSE//Location: /}"
|
||||
}
|
||||
|
||||
# Transfers text file as sharable link.
|
||||
# See https://transfer.sh/
|
||||
transfer() {
|
||||
if [ $# -eq 0 ]; then echo "No arguments specified. Usage:\necho transfer /tmp/test.md\ncat /tmp/test.md | transfer test.md"; return 1; fi
|
||||
tmpfile=$( mktemp -t transferXXX ); if tty -s; then basefile=$(basename "$1" | sed -e 's/[^a-zA-Z0-9._-]/-/g'); curl --progress-bar --upload-file "$1" "https://transfer.sh/$basefile" >> $tmpfile; else curl --progress-bar --upload-file "-" "https://transfer.sh/$1" >> $tmpfile ; fi; cat $tmpfile; rm -f $tmpfile;
|
||||
}
|
||||
|
||||
# List files in an S3 bucket
|
||||
s3ls() {
|
||||
aws s3 ls s3://$1
|
||||
}
|
||||
|
||||
# Push a local SSH public key to another machine
|
||||
# https://github.com/rtomayko/dotfiles/blob/rtomayko/.bashrc
|
||||
push_ssh_cert() {
|
||||
@@ -73,3 +56,116 @@ push_ssh_cert() {
|
||||
ssh $_host 'cat >> ~/.ssh/authorized_keys' < ~/.ssh/id_ed25519.pub
|
||||
done
|
||||
}
|
||||
|
||||
# upload file(s) to https://transfer.sh
|
||||
# https://github.com/dutchcoders/transfer.sh#bash-and-zsh-with-delete-url-delete-token-output-and-prompt-before-uploading
|
||||
transfer()
|
||||
{
|
||||
local file
|
||||
declare -a file_array
|
||||
file_array=("${@}")
|
||||
|
||||
if [[ "${file_array[@]}" == "" || "${1}" == "--help" || "${1}" == "-h" ]]
|
||||
then
|
||||
echo "${0} - Upload arbitrary files to \"transfer.sh\"."
|
||||
echo ""
|
||||
echo "Usage: ${0} [options] [<file>]..."
|
||||
echo ""
|
||||
echo "OPTIONS:"
|
||||
echo " -h, --help"
|
||||
echo " show this message"
|
||||
echo ""
|
||||
echo "EXAMPLES:"
|
||||
echo " Upload a single file from the current working directory:"
|
||||
echo " ${0} \"image.img\""
|
||||
echo ""
|
||||
echo " Upload multiple files from the current working directory:"
|
||||
echo " ${0} \"image.img\" \"image2.img\""
|
||||
echo ""
|
||||
echo " Upload a file from a different directory:"
|
||||
echo " ${0} \"/tmp/some_file\""
|
||||
echo ""
|
||||
echo " Upload all files from the current working directory. Be aware of the webserver's rate limiting!:"
|
||||
echo " ${0} *"
|
||||
echo ""
|
||||
echo " Upload a single file from the current working directory and filter out the delete token and download link:"
|
||||
echo " ${0} \"image.img\" | awk --field-separator=\": \" '/Delete token:/ { print \$2 } /Download link:/ { print \$2 }'"
|
||||
echo ""
|
||||
echo " Show help text from \"transfer.sh\":"
|
||||
echo " curl --request GET \"https://transfer.sh\""
|
||||
return 0
|
||||
else
|
||||
for file in "${file_array[@]}"
|
||||
do
|
||||
if [[ ! -f "${file}" ]]
|
||||
then
|
||||
echo -e "\e[01;31m'${file}' could not be found or is not a file.\e[0m" >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
unset file
|
||||
fi
|
||||
|
||||
local upload_files
|
||||
local curl_output
|
||||
local awk_output
|
||||
|
||||
du --total --block-size="K" --dereference "${file_array[@]}" >&2
|
||||
# be compatible with "bash"
|
||||
if [[ "${ZSH_NAME}" == "zsh" ]]
|
||||
then
|
||||
read $'upload_files?\e[01;31mDo you really want to upload the above files ('"${#file_array[@]}"$') to "transfer.sh"? (Y/n): \e[0m'
|
||||
elif [[ "${BASH}" == *"bash"* ]]
|
||||
then
|
||||
read -p $'\e[01;31mDo you really want to upload the above files ('"${#file_array[@]}"$') to "transfer.sh"? (Y/n): \e[0m' upload_files
|
||||
fi
|
||||
|
||||
case "${upload_files:-y}" in
|
||||
"y"|"Y")
|
||||
# for the sake of the progress bar, execute "curl" for each file.
|
||||
# the parameters "--include" and "--form" will suppress the progress bar.
|
||||
for file in "${file_array[@]}"
|
||||
do
|
||||
# show delete link and filter out the delete token from the response header after upload.
|
||||
# it is important to save "curl's" "stdout" via a subshell to a variable or redirect it to another command,
|
||||
# which just redirects to "stdout" in order to have a sane output afterwards.
|
||||
# the progress bar is redirected to "stderr" and is only displayed,
|
||||
# if "stdout" is redirected to something; e.g. ">/dev/null", "tee /dev/null" or "| <some_command>".
|
||||
# the response header is redirected to "stdout", so redirecting "stdout" to "/dev/null" does not make any sense.
|
||||
# redirecting "curl's" "stderr" to "stdout" ("2>&1") will suppress the progress bar.
|
||||
curl_output=$(curl --request PUT --progress-bar --dump-header - --upload-file "${file}" "https://transfer.sh/")
|
||||
awk_output=$(awk \
|
||||
'gsub("\r", "", $0) && tolower($1) ~ /x-url-delete/ \
|
||||
{
|
||||
delete_link=$2;
|
||||
print "Delete command: curl --request DELETE " "\""delete_link"\"";
|
||||
|
||||
gsub(".*/", "", delete_link);
|
||||
delete_token=delete_link;
|
||||
print "Delete token: " delete_token;
|
||||
}
|
||||
|
||||
END{
|
||||
print "Download link: " $0;
|
||||
}' <<< "${curl_output}")
|
||||
|
||||
# return the results via "stdout", "awk" does not do this for some reason.
|
||||
echo -e "${awk_output}\n"
|
||||
|
||||
# avoid rate limiting as much as possible; nginx: too many requests.
|
||||
if (( ${#file_array[@]} > 4 ))
|
||||
then
|
||||
sleep 5
|
||||
fi
|
||||
done
|
||||
;;
|
||||
|
||||
"n"|"N")
|
||||
return 1
|
||||
;;
|
||||
|
||||
*)
|
||||
echo -e "\e[01;31mWrong input: '${upload_files}'.\e[0m" >&2
|
||||
return 1
|
||||
esac
|
||||
}
|
||||
|
@@ -2,22 +2,6 @@
|
||||
alias intel="arch -x86_64 /bin/zsh"
|
||||
alias arm="arch -arm64 /opt/homebrew/bin/zsh"
|
||||
|
||||
# Remap macOS core utils to GNU
|
||||
alias grep="ggrep"
|
||||
alias which="gwhich"
|
||||
alias awk="gawk"
|
||||
#alias sed="gsed"
|
||||
#alias find="gfind"
|
||||
#alias make="gmake"
|
||||
#alias tar="gtar"
|
||||
|
||||
# macOS has no `md5sum`, so use `md5` as a fallback
|
||||
command -v md5sum > /dev/null || alias md5sum="md5"
|
||||
|
||||
# system python -> homebrew python3
|
||||
# alias python="python3"
|
||||
# alias pip="pip3"
|
||||
|
||||
# My own creation! See: https://github.com/jakejarvis/simpip
|
||||
alias ipv4="curl -4 simpip.com --max-time 1 --proto-default https --silent"
|
||||
alias ipv6="curl -6 simpip.com --max-time 1 --proto-default https --silent"
|
||||
@@ -33,7 +17,7 @@ alias dns-set-google="dns-set 8.8.8.8 8.8.4.4"
|
||||
alias flush="sudo killall -HUP mDNSResponder; sudo killall mDNSResponderHelper; sudo dscacheutil -flushcache"
|
||||
|
||||
# Update: brew, npm, gems, pip, app store, macos
|
||||
update() {
|
||||
system_update() {
|
||||
NC="\033[0m"
|
||||
YELLOW="\033[0;33m"
|
||||
|
||||
@@ -76,7 +60,6 @@ alias unhide="defaults write com.apple.finder AppleShowAllFiles -bool true && ki
|
||||
alias forcetrash="sudo rm -rf ~/.Trash /Volumes/*/.Trashes"
|
||||
alias unq="sudo xattr -rd com.apple.quarantine"
|
||||
alias verify_sign="codesign --verify --deep --verbose"
|
||||
alias afk="/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend"
|
||||
|
||||
alias gpu="system_profiler SPDisplaysDataType"
|
||||
alias cpu="sysctl -n machdep.cpu.brand_string"
|
||||
|
26
zsh/path.zsh
26
zsh/path.zsh
@@ -3,9 +3,9 @@ if test ! "$(uname)" = "Darwin"; then
|
||||
fi
|
||||
|
||||
# Default paths
|
||||
export PATH="$HOME/bin:/opt/local/bin:/opt/local/sbin:/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin:$PATH"
|
||||
export PATH="/opt/local/bin:/opt/local/sbin:/usr/local/bin:/usr/bin:/bin:/usr/local/sbin:/usr/sbin:/sbin${PATH+:$PATH}"
|
||||
|
||||
# homebrew
|
||||
# Homebrew
|
||||
export HOMEBREW_PREFIX="/opt/homebrew"
|
||||
export HOMEBREW_CELLAR="/opt/homebrew/Cellar"
|
||||
export HOMEBREW_REPOSITORY="/opt/homebrew"
|
||||
@@ -13,18 +13,15 @@ export PATH="/opt/homebrew/bin:/opt/homebrew/sbin${PATH+:$PATH}"
|
||||
export MANPATH="/opt/homebrew/share/man${MANPATH+:$MANPATH}:"
|
||||
export INFOPATH="/opt/homebrew/share/info:${INFOPATH:-}"
|
||||
|
||||
# Remap macOS core utils to GNU equivalents (from coreutils, findutils, gnu-*, etc.):
|
||||
# https://gist.github.com/skyzyx/3438280b18e4f7c490db8a2a2ca0b9da?permalink_comment_id=3049694#gistcomment-3049694
|
||||
for p in "$(brew --prefix)"/opt/*/libexec/gnubin; do export PATH=$p:$PATH; done
|
||||
# Ensure `man` refers to the new binaries:
|
||||
for p in "$(brew --prefix)"/opt/*/libexec/gnuman; do export MANPATH=$p:$MANPATH; done
|
||||
|
||||
# Go
|
||||
export GOPATH="$HOME/golang"
|
||||
#export GOROOT="/usr/local/opt/go/libexec"
|
||||
export PATH="$GOPATH/bin:$PATH"
|
||||
#export PATH="$GOROOT/bin:$PATH"
|
||||
|
||||
# Ruby
|
||||
# export PATH="$HOME/.gem/ruby/3.1.0/bin:$PATH"
|
||||
# export RUBY_HOME="$HOMEBREW_PREFIX/opt/ruby/bin"
|
||||
# export GEM_PATH="$HOMEBREW_PREFIX/lib/ruby/gems/3.1.0/bin"
|
||||
# export PATH="$RUBY_HOME:$PATH"
|
||||
# export PATH="$GEM_PATH:$PATH"
|
||||
|
||||
# rbenv
|
||||
export RUBY_CONFIGURE_OPTS="--with-openssl-dir=$(brew --prefix openssl@1.1) --with-readline-dir=$(brew --prefix readline) --with-libyaml-dir=$(brew --prefix libyaml)"
|
||||
@@ -32,11 +29,8 @@ if command -v rbenv 1>/dev/null 2>&1; then
|
||||
eval "$(rbenv init -)"
|
||||
fi
|
||||
|
||||
# Python
|
||||
# export PATH="$HOME/Library/Python/3.9/bin:$PATH"
|
||||
|
||||
# openjdk
|
||||
export PATH="$HOMEBREW_PREFIX/opt/openjdk/bin:$PATH"
|
||||
# OpenJDK
|
||||
export PATH="$(brew --prefix)/opt/openjdk/bin:$PATH"
|
||||
|
||||
# Metasploit
|
||||
export PATH="/opt/metasploit-framework/bin:$PATH"
|
||||
|
Reference in New Issue
Block a user