-
Notifications
You must be signed in to change notification settings - Fork 1
/
.snippet
193 lines (193 loc) · 47.1 KB
/
.snippet
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
wget -S # spider
/System/Library/PrivateFrameworks/Apple80211.framework/Resources/airport -s # macOS WiFi
echo | nc 8.8.8.8 53 # test network by nc
du -d 1 -h .git/ # calc sizes in .git
du -d 1 -h -I ".git" . # calc sizes current exclude ".git"
curl -I localhost # curl only header
cat <<EOF > text.txt # redirect heredoc
tcpdump -nn src host 0.0.0.0 and not port 22 and not port 3306 # tcpdump 1
tcpdump -nn src port 80 or src port 443 # tcpdump 2
dig -x localhost # reverse w/ dig
rg foo -g '!*.min.js' # exclude w/ rg
rg foo -g '*.min.js' # include w/ rg
export WAIT=$(( RANDOM % 5 )) && sleep $WAIT && echo "foo" # jitter sample 1
for i in `seq 1 100`; do WAIT=$(($RANDOM % 1200)); echo $WAIT; sleep 0.1; done # jitter sample 2
perl -i -pe 's/foo/bar/' text.txt # to overwrite replacement by PCRE
perl -npe 's/(?<=-\d{2})T(?=\d{2}:)/ /g ; s/(?<=:\d{2})Z/ /g'" # to apply multi patterns as inline by PCRE
cut -d' ' -f1,3 # cut
history | cut -d " " -f 4-11 # cut numbers of history
cat readme.md | awk '!/^$/' # remove empty lines by awk
[ -f `which yarn` ] && echo hoge # if by bash
if [ "" != `which yarn` ]; then echo 1; elif [ "" != `which npm` ]; then echo 2; else echo 3; fi # if-else by bash
tree -d -L 2 -I "node_modules" # tree w/ ignoring
ls | grep template | awk -F '.' '{print "."$2"."$3"."$4}' | xargs -I % -L 1 mv "template"% "tmpl"% # rename from 'template' to 'tmpl'
lt -L 2 -a -I 'node_*|.git|.circleci' # ls w/ tree by exa w/ --level and --ignore-glob option
eval $(aws ecr get-login --no-include-email --region ap-northeast-1) # docker login w/ ecr
perl -lnae 'print $F[1]' log.txt # parse file by default "white space" delimiter
perl -F, -lnae 'print $F[1]' log.txt # parse file by "comma" delimiter
perl -F/,/ -lane 'print @F[1,2]' data.csv # parse csv and print multi columns by "comma" delimiter
docker run -d ubuntu:16.04 tail -f /dev/null # run docker w/ ubuntu as deamon
for i in `seq 1 10`; do echo $i && sleep 1; done # seq and sleep
IPS="10.6.1.5 10.6.1.6 10.6.1.7 10.6.1.8" for i in ${IPS[@]}; do echo $i; ssh $i 'hostname'; done # ip with for to echo and ssh
openssl req -x509 -newkey rsa:2048 -keyout key.pem -out cert.pem -days 365 -nodes -subj "/C=GO/ST=AAA/L=AAA/O=AAA/OU=IT Department/CN=AAA.org" # create self signed certification
while true; do { echo -e 'HTTP/1.1 200 OK\r\n'; } | nc -l 8000; done # run dummy http server in 8000
ssh BASTION_SERVER -L 8080:DB_SERVER:3306 -N # -f (<= backgroud flag) # ssh forwarding (=8080=> local =22=> bastion =3306=> db)
tmux swap-window -t 1 # swap window on tmux
tmux swap-pane -s 7.1 -t 7.2 # swap pane in window, -s(src) -t(target) {window number}.{pane number(1-indexes)}
git branch | colrm 1 2 # list branch names
openssl rand -base64 24 # make random password easily
cat /dev/urandom | tr -dc "[:alnum:]" | fold -w 12 | head -n 10 # make random password
cat /dev/urandom | tr -dc "[:graph:]" | tr -d "1Il0O~^" | fold -w 12 | head -n 10 # make random password strongly
fd -0 | xargs -0 -I % echo % # use fd and null separate in xargs
netstat -antp # [GNU] show relationship of ip/port and process
gss | sed -e "s/^...\(.*\)$/'\1'/" |xargs rm -f # delete specific files in git by sed
curl -k -L -o /usr/local/bin/BINARY http://fqdn/BINARY # download binary from url by curl
shasum -a 256 /path/to/file # check sum by sha256
shasum -a 256 /path/to/file | cut -d" " -f1 | grep -c -E "^796d8b936bca76f3f344d105883e45fbfb755af313b66ad7c163ccca7d295a60$" # check sum by sha256 w/ cut and grep
aws iam get-role --role-name dumb-dummy-role --query "Role.RoleId" --output text # get IAM Role ID by awscli
dstat --top-cpu-adv # dstat: spiked cpu w/ PID
dstat --top-io --top-bio # dstat: spiked cpu, blocking io
dstat --top-cpu --top-cputime # dstat: spiked cpu
dstat --top-io-adv --top-bio-adv # dstat: io, blocking io w/ PID
echo {roleA,roleB,roleC} | xargs -L 1 -I % aws iam get-role --role-name % --query "Role.[RoleName,RoleId]" --output text # extract AWS RoleId and RoleName by xargs
echo 'abcde' | awk '{print substr($0, index($0, "b"))}' # substr w/ index(str, start, end) by awk
echo 'abcde' | awk '$0 ~ /b.*/' # match shorthand and print by awk
echo 'abcde' | awk 'match($0, /b.*/) {print RSTART, RLENGTH}' # match w/ ReservedVar of RSTART/RLENGTH by awk
echo 'abcde' | awk 'match($0, /b.*/) {print substr($0, RSTART, RLENGTH)}' # substr and match w/ ReservedVar of RSTART/RLENGTH by awk
echo 'abcde' | grep -o 'b.*' # extract matched string by grep
ccrypt -e filename # 暗号化 by ccrypt
ccrypt -c -d filename > decripted-file # 復号 by ccrypt
TZ=UTC date '+%Y%m%dT%H%M%SZ' # Print date as UTC timestamp
ffmpeg -i input.mov -r 30 output.gif # .mov to .gif in macOS by ffmpeg
sed -e 's/\x0/\n/g' /proc/%PID%/environ # visible proc environ
tcpdump -nn -l -i eth0 port 53 | tee 53.log # tcpdump w/ tee using `-l`
netstat -p udp # [BSD] show udp protocol
netstat -a -n -u # [BSD] show udp and ips as static adress
lsof -i:<PORT> # show process by pid
curl -s http://inet-ip.info # check my ip (1)
curl checkip.amazonaws.com # check my ip (2)
ip -r a # ip a (Linux only)
echo $PATH | /usr/bin/sed 's/:/\'$'\n/g' # print path
patch -p0 < diff.patch # apply patch at same dir
netstat -l # port check 1 / リッスンポートを一覧表示する
netstat -ltunp4 # port check 1'/ リッスンポートを一覧表示する(ipv4)
ss -ltunp # port check 2 / リッスンポートを一覧表示する
nmap -n -PN -sT -sU -p- localhost # port check 3 / リッスンポートを一覧表示する
lsof -i -P # port check 4 / 使われているネットワークファルを一覧表示
tar zcvf backup.tar.gz filepath1 filepath2 # tar compress gzip
tar zxvf backup.tar.gz # tar decompress gzip
tar jcf backup.tar.bz2 filepath1 filepath2 # tar compress bzip2
tar jxf backup.tar.bz2 # tar decompress bzip2
openssl s_client -connect www.example.com:443 -tlsextdebug # debug openssl
openssl rand 107324 > test.txt # fill the file by random binaries (openssl / pseudo random)
dd if=/dev/urandom of=test.bin bs=1m count=1 # fill the file by random binaries (dd / real random)
dd if=/dev/zero of=test.bin bs=1m # fill the file by zero
fd chrome | fzy | xargs vim # fd and fzy
awk 'END{print NR}' filename # count file lines by awk (`wc -l` is count of \r\n )
date +%s # get unix timestamp
echo -e "\033[32m green \033[00m , \033[31m red \033[00m , \033[36m cyan \033[00m , \033[33m yellow \033[00m" # color code
diff -ybBw # useful diff
diff <(grep -v -E '^\s*#' a.txt | grep -v '^$') <(grep -v -E '^\s*#' b.txt | grep -v '^$') # diff ignoring comment out and white space
(grep -v -E '\s*#' a.txt | grep -v '^$') | (grep -v -E '\s*#' b.txt | grep -v '^$' | diff -b /dev/fd/3 - >./prediff 2>&1) 3<&0 # diff ignoring comment out and white space by POSIX
aws ec2 describe-instances --filter "Name=instance-state-name,Values=running" --query 'Reservations[].Instances[].{InstanceId:InstanceId,PublicIpAddress:PublicIpAddress,Tags:Tags[?Key==`Name`].Value|[0]}' --output text # custom describe-instances
aws ec2 describe-instances | jq -r '.Reservations[] | .Instances[] | select(.State.Name == "running") | [ .InstanceId, (.Tags[] | select(.Key == "Name")).Value ] | @csv' # custom describe-instances w/ jq
aws ec2 describe-instances --filters Name=instance-state-name,Values=running --query 'Reservations[].Instances[].Tags[?Key==`Name`].Value' --output text # custom describe-Instances to output Name
strace -c -e write,open,read,close -T -tt ./binary # strace / -c / -e / -T / -tt
git archive HEAD | docker build - # build with same case between gitignore and dockerignore
git show $REVISION:$FILENAME # How to get just one file from another branch
nc -u $HOSTNAME $PORT # check udp
nc -s $SRC_IP -p $SRC_PORT $DEST_HOSTNAME $DEST_PORT # check udp by specific ip and port
nc -l 10000 # launch tcp server by port:10000
nc -lu 10000 # launch udp server by port:10000
strace -p $PORT; readlink /proc/$PORT/fd/$FD; netstat -ane | grep $SOCKET; # analyze network I/O blocking
go build -i # dependency caching at go build
go build -x # debugging at go build
mpstat -P ALL 1 # view multi cpu stat
pidstat 1 # view cpu stat per process
iostat -xz 1 # view I/O performance per block device
curl -s -L https://github.com/mruby/mruby/pull/605.patch | git am - # merge remote pull request from github by curl
curl -s -L https://github.com/masuidrive/mruby/commit/70b422a6f240a201993e75935b55a8dd497eb098.patch | git am - # cherry-pick remote pull request from github by curl
vim --startuptime /tmp/startuptime.txt # check start up time at vim
ssh-keygen -t ed25519 -f ~/.ssh/filename # generate ssh key
git checkout origin/master -- path/to/file # restore file or dir from a specific branch
rm -P # overwrite 3 times before delete file
ps eww # show used env per process
echo '[{"hoge": 1234}, {"fuga":5678}]' | http POST http://localhost:3000/endpoint # post with pipe json body by httpie
echo '[{"hoge": 1234}, {"fuga":5678}]' | curl -v -H "Content-type: application/json" -d @- http://localhost:3000/endpoint # post with pipe json body by curl
git stash pop --index # 変更を保存しつつ復元
git stash -k # not stagedなファイルだけ退避
stty sane # fix broken terminal
rg --files-with-matches SEARCH_TEXT # show only filename
export $(< /etc/environment grep -v "^#" | xargs) # dynamic loading of env
[[ "${hoge}" =~ ^.*$ ]] || exit 1 # pattern matching test by shell
go clean -i -n github.com/motemen/ghq... # print clean up commands for the package installed via `go get`.
aws organizations list-accounts --query 'Accounts[?Name==`account-name`]' # how to use jmespath
git log -1 --format=%H # Docker image tag をつけるときに使う
echo "hogefugapiyo" | grep -q -E "fu|po|pi" # check with egrep to see if it exists
/usr/bin/rkhunter --update -q || true # when the exit code is non-zero
find /home -type f -exec grep 'password' '{}' \; -exec cp '{}' . \; # find w/ bulk exec (1)
find . -type f -exec printf "[%s]\n" {} + # find w/ bulk exec (2)
find . -type f -regextype posix-egrep -regex ".*.(csv|txt)" ! -regex ".*./ruby/2.0/.*" # find w/ regex
find . -type f | grep -e "\.txt$" # find w/ egrep
find /aaa/bbb/ccc | grep -v -E \^\[!-~\]\+\$ | tr '\n' '\0' | xargs -0 -I {} mv {} /xxx/yyy/zzz # find w/ xargs (1)
find -print0 | xargs -0 # find w/ xargs (2)
find -print0 | xargs -0 -P 4 # find w/ xargs (3) multi process version
command < data.in &>> result.out # append log with stdout and stderr
command < data.in | tee -a result.out # append log by tee
tar tvf layer.tar # look at the insite of tar
cat event.json |grep -oP 'event_type":".*?"' | sort | uniq -c | sort -nr # gnu grep w/ pcre
pgrep -f -l tmux # grep pid and filepath
git tag -d 0.6.3 && git push origin :0.6.3 # delete tag
cat sample.json | grep -P '"key":".*?"' # GNU grep w/ pcre
lsof -iTCP # show TCP connection
lsof -iUDP # show UDP connection
lsof -U # show Unix Domain Socket connection
lsof [email protected] -n # show specific IP's conneciton
tar -czh . | docker build - # Build dockerfile including symlink's substance
git clone https://github.com/ktrysmt/dotfiles --recursive # git clone within submodule
git submodule add --depth 1 https://github.com/ktrysmt/dotfiles dir # submodule add
git submodule update # submodule update just only
git submodule update --init # submodule update when did git clone without --recursive
git submodule deinit dir && git rm -rf dir # submodule delete
echo "127.0.0.1 local.example.com" | sudo tee -a /etc/hosts # pipe and sudo
read -s pass ; perl -i -pe "s/TARGET_STRING/$pass/" /path/to/file # Safely replace strings
export AWS_ACCESS_KEY_ID=XXXX; export AWS_SECRET_ACCESS_KEY=YYYY; export AWS_DEFAULT_REGION=ap-northeast-1; export AWS_SESSION_TOKEN=ZZZZ; # use aws envs here
curl -fsSL URL # general one step curl command
openssl s_client -cipher ALL -servername github.com -connect github.com:443 # verify cipher
awk '{$1="";print $0}' file.txt # 最初のフィールドを除外
awk '{$NF="";print $0}' file.txt # 最後のフィールドを除外
sudo echo; curl "https://awscli.amazonaws.com/AWSCLIV2.pkg" -o awscliv2.pkg && sudo installer -pkg ./awscliv2.pkg -target / && rm awscliv2.pkg # one-liner to upgrade awscliv2 on macos
git show b69d8d:.zshrc | bat -l sh # show old revesion of file
ctags -R --languages=ruby --exclude=.git --exclude=log . $(bundle list --paths) -f .tags # example code of universal ctags
set -a && source .env && set +a # export data of .env on the fly
while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done # wait cloud-init
ruby -rjson -ryaml -e 'print YAML.load(STDIN.read).to_json' | jq . # yaml2json w/ jq (1)
yq r deployment.yaml -j -P > deployment.json | jq . # yaml2json w/ jq (2)
ruby -rjson -ryaml -e 'print JSON.parse(STDIN.read).to_yaml' < sample.json > sample.yaml # json2yaml
echo "/Place/='http://www.google.com'" | xargs -d'\n' echo # skip escape: output: /Place/='http://www.google.com'
pbpaste | jq '.c | fromjson' # jq w/ escaped json { "c" : "{\"id\":\"hoge\",\"parent\":\"abc\"}\n" }
cat log.txt.gz.txt | jq -c ".message | fromjson | {timestamp:.timestamp[:16], event_type:.event_type, flow_id:.flow_id, alert_signature:.alert.signature, alert_category:.alert.category}" | sort | uniq -c | sort -nr # jq w/ substr and put nested data
curl -X POST -H "Content-Type: application/json" -d '{"Name":"sensuikan1973", "Age":"100"}' localhost:8080/api/v1/users # curl with POST method
ls | egrep '.*_test\.(py|cc)' | xargs -d $'\n' -- rm # Remove Python and C++ unit tests
tmux set-window-option synchronize-panes on # synchronize panes on the tmux window
git cherry-pick -n [COMMIT HASH] # prevent auto commit when cherry-picking
tcpdump -tttt -l -i eth0 -A -n -s 0 dst port 3306 | grep -iE "select|update|delete|insert" # https://memo.yuuk.io/entry/2018/01/25/221111
EXITCODE=$(set +e ; command ; echo $?) # use exitcode of some-command after
echo 'abcde' | awk 'match($0, /b.*/) {print substr($0, RSTART, RLENGTH)}' # match and substr with awk
echo 'abcde' | grep -o 'b.*' # match and substr with grep
scp -o ProxyCommand='ssh -i ~/.ssh/bastion.pem -W %h:%p username@global-ip' username@private-ip:~/path/to/file ./ # scp via bastion
echo '甲は乙に' | tr 甲乙 乙甲 # replace by tr
for i in `seq 1 50000`; do logger -t local7 `date +"%T.%3N"` ;sleep 0.01; done # benchmark syslog
wget https://ip-ranges.amazonaws.com/ip-ranges.json -O - | jq -r '.prefixes[] | select(.region == "ap-northeast-1") | select(.service == "CLOUDFRONT")' # jq example (cloudfront)
aws ec2 describe-instances --filters Name=instance-state-name,Values=running --query 'Reservations[].Instances[].InstanceId --output text # filter and jmespath w/ instance id
aws ec2 describe-instances --query 'Reservations[].Instances[?((Tags[?Key==`Name`].Value|[0]==`app`) || (Tags[?Key==`Env`].Value|[0]==`production`))][] | [][InstanceId]' --output # multi expression by jmespath
ssh-add ~/.ssh/target.pem; ssh -A -o ProxyCommand='ssh -i ~/.ssh/bastion.pem -W %h:%p ec2-user@<bastion-global-ip>' ec2-user@<target-private-ip> w # direct throw the bastion
echo 1 2 3 4 5 | xargs -n2 # xargs with multi arg
TOKEN=`curl -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 21600"` && curl -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/ # higher level metadata in ec2
cat file.json | jq -r -c 'select(.Stdout| test("No packages marked for update")) | .InstanceId' # jq w/ select and extract another field
git log --patch --pickaxe-all -S "hogehoge" # search from git log
git log --reverse --patch --pickaxe-all -S "hogehoge" # search from git log with reverse
git grep -w 'text' $(git rev-list --all) # grep in all hashes
bash -c 'echo "hoge fuga" | xargs -n 1 echo "m:"' # xargs -n 1
echo "${json_datas}" | jq -r ".[]" | while read -r data; do # json with jq to each
IFS="=" read key value <<< "$data" # data="key1=value1" # split by IFS