From e4f22cd6d879eef9c247d742b0e318bd748535de Mon Sep 17 00:00:00 2001 From: Xiang Li Date: Wed, 17 Feb 2016 11:51:01 -0800 Subject: [PATCH] rafthtt: smart batching Improved the overall performance more than 20% under heavyload with little latency impact heavy load ``` Requests/sec: ~23200 vs Requests/sec: ~31500 Latency distribution: 10% in 0.0883 secs. 25% in 0.1022 secs. 50% in 0.1207 secs. 75% in 0.1460 secs. 90% in 0.1647 secs. 95% in 0.1783 secs. 99% in 0.2223 secs. vs Latency distribution: 10% in 0.1119 secs. 25% in 0.1272 secs. 50% in 0.1469 secs. 75% in 0.1626 secs. 90% in 0.1765 secs. 95% in 0.1863 secs. 99% in 0.2276 secs. ``` Similar on light load too. --- rafthttp/stream.go | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/rafthttp/stream.go b/rafthttp/stream.go index 4d91d5d61..5ac4d797d 100644 --- a/rafthttp/stream.go +++ b/rafthttp/stream.go @@ -136,6 +136,7 @@ func (cw *streamWriter) run() { t streamType enc encoder flusher http.Flusher + batched int ) tickc := time.Tick(ConnReadTimeout / 3) @@ -146,6 +147,7 @@ func (cw *streamWriter) run() { err := enc.encode(linkHeartbeatMessage) if err == nil { flusher.Flush() + batched = 0 reportSentDuration(string(t), linkHeartbeatMessage, time.Since(start)) continue } @@ -159,7 +161,13 @@ func (cw *streamWriter) run() { start := time.Now() err := enc.encode(m) if err == nil { - flusher.Flush() + if len(msgc) == 0 || batched > streamBufSize/2 { + flusher.Flush() + batched = 0 + } else { + batched++ + } + reportSentDuration(string(t), m, time.Since(start)) continue }