rafthtt: smart batching

Improved the overall performance more than 20% under heavyload
with little latency impact

heavy load
```
Requests/sec: ~23200  vs  Requests/sec: ~31500

Latency distribution:
  10% in 0.0883 secs.
  25% in 0.1022 secs.
  50% in 0.1207 secs.
  75% in 0.1460 secs.
  90% in 0.1647 secs.
  95% in 0.1783 secs.
  99% in 0.2223 secs.

vs

Latency distribution:
  10% in 0.1119 secs.
  25% in 0.1272 secs.
  50% in 0.1469 secs.
  75% in 0.1626 secs.
  90% in 0.1765 secs.
  95% in 0.1863 secs.
  99% in 0.2276 secs.
```

Similar on light load too.
This commit is contained in:
Xiang Li 2016-02-17 11:51:01 -08:00
parent 74382f56fb
commit e4f22cd6d8

View File

@ -136,6 +136,7 @@ func (cw *streamWriter) run() {
t streamType t streamType
enc encoder enc encoder
flusher http.Flusher flusher http.Flusher
batched int
) )
tickc := time.Tick(ConnReadTimeout / 3) tickc := time.Tick(ConnReadTimeout / 3)
@ -146,6 +147,7 @@ func (cw *streamWriter) run() {
err := enc.encode(linkHeartbeatMessage) err := enc.encode(linkHeartbeatMessage)
if err == nil { if err == nil {
flusher.Flush() flusher.Flush()
batched = 0
reportSentDuration(string(t), linkHeartbeatMessage, time.Since(start)) reportSentDuration(string(t), linkHeartbeatMessage, time.Since(start))
continue continue
} }
@ -159,7 +161,13 @@ func (cw *streamWriter) run() {
start := time.Now() start := time.Now()
err := enc.encode(m) err := enc.encode(m)
if err == nil { if err == nil {
flusher.Flush() if len(msgc) == 0 || batched > streamBufSize/2 {
flusher.Flush()
batched = 0
} else {
batched++
}
reportSentDuration(string(t), m, time.Since(start)) reportSentDuration(string(t), m, time.Since(start))
continue continue
} }