Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(vmess): add length mask (opt=4) #298

Merged
merged 1 commit into from
Nov 28, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 17 additions & 12 deletions proxy/vmess/aead.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,24 @@ import (

type aeadWriter struct {
io.Writer
chunkSizeEncoder ChunkSizeEncoder
cipher.AEAD
nonce [32]byte
count uint16
}

// AEADWriter returns a aead writer.
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte) io.Writer {
aw := &aeadWriter{Writer: w, AEAD: aead}
copy(aw.nonce[2:], iv[2:12])
func AEADWriter(w io.Writer, aead cipher.AEAD, iv []byte, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
aw := &aeadWriter{Writer: w, AEAD: aead, chunkSizeEncoder: chunkSizeEncoder}
copy(aw.nonce[2:], iv[2:aead.NonceSize()])
return aw
}

func (w *aeadWriter) Write(b []byte) (n int, err error) {
buf := pool.GetBuffer(chunkSize)
defer pool.PutBuffer(buf)

var lenBuf [lenSize]byte
lenBuf := make([]byte, w.chunkSizeEncoder.SizeBytes())
var writeLen, dataLen int

nonce := w.nonce[:w.NonceSize()]
Expand All @@ -44,7 +45,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {
}
dataLen = writeLen - w.Overhead()

binary.BigEndian.PutUint16(lenBuf[:], uint16(writeLen))
w.chunkSizeEncoder.Encode(uint16(writeLen), lenBuf)
binary.BigEndian.PutUint16(nonce[:2], w.count)

w.Seal(buf[:0], nonce, b[n:n+dataLen], nil)
Expand All @@ -63,6 +64,7 @@ func (w *aeadWriter) Write(b []byte) (n int, err error) {

type aeadReader struct {
io.Reader
chunkSizeDecoder ChunkSizeDecoder
cipher.AEAD
nonce [32]byte
count uint16
Expand All @@ -71,32 +73,35 @@ type aeadReader struct {
}

// AEADReader returns a aead reader.
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte) io.Reader {
ar := &aeadReader{Reader: r, AEAD: aead}
copy(ar.nonce[2:], iv[2:12])
func AEADReader(r io.Reader, aead cipher.AEAD, iv []byte, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
ar := &aeadReader{Reader: r, AEAD: aead, chunkSizeDecoder: chunkSizeDecoder}
copy(ar.nonce[2:], iv[2:aead.NonceSize()])
return ar
}

func (r *aeadReader) read(p []byte) (int, error) {
if _, err := io.ReadFull(r.Reader, p[:lenSize]); err != nil {
if _, err := io.ReadFull(r.Reader, p[:r.chunkSizeDecoder.SizeBytes()]); err != nil {
return 0, err
}

size := int(binary.BigEndian.Uint16(p[:lenSize]))
size, err := r.chunkSizeDecoder.Decode(p[:r.chunkSizeDecoder.SizeBytes()])
if err != nil {
return 0, err
}
p = p[:size]
if _, err := io.ReadFull(r.Reader, p); err != nil {
return 0, err
}

binary.BigEndian.PutUint16(r.nonce[:2], r.count)
_, err := r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
_, err = r.Open(p[:0], r.nonce[:r.NonceSize()], p, nil)
r.count++

if err != nil {
return 0, err
}

return size - r.Overhead(), nil
return int(size) - r.Overhead(), nil
}

func (r *aeadReader) Read(p []byte) (int, error) {
Expand Down
29 changes: 16 additions & 13 deletions proxy/vmess/chunk.go
Original file line number Diff line number Diff line change
@@ -1,24 +1,23 @@
package vmess

import (
"encoding/binary"
"io"
"net"
)

const (
lenSize = 2
chunkSize = 16 << 10
)

type chunkedWriter struct {
io.Writer
buf [lenSize]byte
chunkSizeEncoder ChunkSizeEncoder
buf []byte
}

// ChunkedWriter returns a chunked writer.
func ChunkedWriter(w io.Writer) io.Writer {
return &chunkedWriter{Writer: w}
func ChunkedWriter(w io.Writer, chunkSizeEncoder ChunkSizeEncoder) io.Writer {
return &chunkedWriter{Writer: w, chunkSizeEncoder: chunkSizeEncoder, buf: make([]byte, chunkSizeEncoder.SizeBytes())}
}

func (w *chunkedWriter) Write(p []byte) (n int, err error) {
Expand All @@ -28,8 +27,7 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {
if dataLen > chunkSize {
dataLen = chunkSize
}

binary.BigEndian.PutUint16(w.buf[:], uint16(dataLen))
w.chunkSizeEncoder.Encode(uint16(dataLen), w.buf)
if _, err = (&net.Buffers{w.buf[:], p[n : n+dataLen]}).WriteTo(w.Writer); err != nil {
break
}
Expand All @@ -42,23 +40,28 @@ func (w *chunkedWriter) Write(p []byte) (n int, err error) {

type chunkedReader struct {
io.Reader
buf [lenSize]byte
left int
chunkSizeDecoder ChunkSizeDecoder
buf []byte
left int
}

// ChunkedReader returns a chunked reader.
func ChunkedReader(r io.Reader) io.Reader {
return &chunkedReader{Reader: r}
func ChunkedReader(r io.Reader, chunkSizeDecoder ChunkSizeDecoder) io.Reader {
return &chunkedReader{Reader: r, chunkSizeDecoder: chunkSizeDecoder}
}

func (r *chunkedReader) Read(p []byte) (int, error) {
if r.left == 0 {
// get length
_, err := io.ReadFull(r.Reader, r.buf[:lenSize])
_, err := io.ReadFull(r.Reader, r.buf[:r.chunkSizeDecoder.SizeBytes()])
if err != nil {
return 0, err
}
n, err := r.chunkSizeDecoder.Decode(r.buf[:])
if err != nil {
return 0, err
}
r.left = int(binary.BigEndian.Uint16(r.buf[:lenSize]))
r.left = int(n)

// if left == 0, then this is the end
if r.left == 0 {
Expand Down
60 changes: 60 additions & 0 deletions proxy/vmess/chunk_size_parser.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
package vmess

import (
"encoding/binary"
"golang.org/x/crypto/sha3"
)

// ChunkSizeEncoder is a utility class to encode size value into bytes.
type ChunkSizeEncoder interface {
SizeBytes() int32
Encode(uint16, []byte) []byte
}

// ChunkSizeDecoder is a utility class to decode size value from bytes.
type ChunkSizeDecoder interface {
SizeBytes() int32
Decode([]byte) (uint16, error)
}

type ShakeSizeParser struct {
shake sha3.ShakeHash
buffer [2]byte
}

func NewShakeSizeParser(nonce []byte) *ShakeSizeParser {
shake := sha3.NewShake128()
shake.Write(nonce)
return &ShakeSizeParser{
shake: shake,
}
}

func (*ShakeSizeParser) SizeBytes() int32 {
return 2
}

func (s *ShakeSizeParser) next() uint16 {
s.shake.Read(s.buffer[:])
return binary.BigEndian.Uint16(s.buffer[:])
}

func (s *ShakeSizeParser) Decode(b []byte) (uint16, error) {
mask := s.next()
size := binary.BigEndian.Uint16(b)
return mask ^ size, nil
}

func (s *ShakeSizeParser) Encode(size uint16, b []byte) []byte {
mask := s.next()
binary.BigEndian.PutUint16(b, mask^size)
return b[:2]
}

func (s *ShakeSizeParser) NextPaddingLen() uint16 {
return s.next() % 64
}

func (s *ShakeSizeParser) MaxPaddingLen() uint16 {
return 64
}
21 changes: 13 additions & 8 deletions proxy/vmess/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ const (
OptBasicFormat byte = 0
OptChunkStream byte = 1
// OptReuseTCPConnection byte = 2
// OptMetadataObfuscate byte = 4
OptMetadataObfuscate byte = 4
)

// Security types
Expand Down Expand Up @@ -72,6 +72,9 @@ type Conn struct {
respBodyIV [16]byte
respBodyKey [16]byte

writeChunkSizeParser ChunkSizeEncoder
readChunkSizeParser ChunkSizeDecoder

net.Conn
dataReader io.Reader
dataWriter io.Writer
Expand All @@ -90,7 +93,7 @@ func NewClient(uuidStr, security string, alterID int, aead bool) (*Client, error
c.users = append(c.users, user.GenAlterIDUsers(alterID)...)
c.count = len(c.users)

c.opt = OptChunkStream
c.opt = OptChunkStream | OptMetadataObfuscate
c.aead = aead

security = strings.ToLower(security)
Expand Down Expand Up @@ -150,6 +153,8 @@ func (c *Client) NewConn(rc net.Conn, target string, cmd CmdType) (*Conn, error)
return nil, err
}
}
conn.writeChunkSizeParser = NewShakeSizeParser(conn.reqBodyIV[:])
conn.readChunkSizeParser = NewShakeSizeParser(conn.respBodyIV[:])

// Request
err = conn.Request(cmd)
Expand Down Expand Up @@ -292,12 +297,12 @@ func (c *Conn) Write(b []byte) (n int, err error) {
if c.opt&OptChunkStream == OptChunkStream {
switch c.security {
case SecurityNone:
c.dataWriter = ChunkedWriter(c.Conn)
c.dataWriter = ChunkedWriter(c.Conn, c.writeChunkSizeParser)

case SecurityAES128GCM:
block, _ := aes.NewCipher(c.reqBodyKey[:])
aead, _ := cipher.NewGCM(block)
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)

case SecurityChacha20Poly1305:
key := pool.GetBuffer(32)
Expand All @@ -306,7 +311,7 @@ func (c *Conn) Write(b []byte) (n int, err error) {
t = md5.Sum(key[:16])
copy(key[16:], t[:])
aead, _ := chacha20poly1305.New(key)
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:])
c.dataWriter = AEADWriter(c.Conn, aead, c.reqBodyIV[:], c.writeChunkSizeParser)
pool.PutBuffer(key)
}
}
Expand All @@ -328,12 +333,12 @@ func (c *Conn) Read(b []byte) (n int, err error) {
if c.opt&OptChunkStream == OptChunkStream {
switch c.security {
case SecurityNone:
c.dataReader = ChunkedReader(c.Conn)
c.dataReader = ChunkedReader(c.Conn, c.readChunkSizeParser)

case SecurityAES128GCM:
block, _ := aes.NewCipher(c.respBodyKey[:])
aead, _ := cipher.NewGCM(block)
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)

case SecurityChacha20Poly1305:
key := pool.GetBuffer(32)
Expand All @@ -342,7 +347,7 @@ func (c *Conn) Read(b []byte) (n int, err error) {
t = md5.Sum(key[:16])
copy(key[16:], t[:])
aead, _ := chacha20poly1305.New(key)
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:])
c.dataReader = AEADReader(c.Conn, aead, c.respBodyIV[:], c.readChunkSizeParser)
pool.PutBuffer(key)
}
}
Expand Down