The test code
The client
package main
import (
"net/http"
log "github.com/sirupsen/logrus"
"io/ioutil"
"fmt"
"bytes"
"sync"
_"time"
)
func main(a) {
var wg sync.WaitGroup
var count int
var rw sync.RWMutex
TEST:
for i := 0; i < 1; i++ {
wg.Add(1)
go func (a) {
defer wg.Done()
tr := http.Transport{DisableKeepAlives: false}
client := &http.Client{Transport: &tr}
for {
f, err := ioutil.ReadFile("data")
iferr ! =nil {
fmt.Println("read file err", err)
return
}
fmt.Println(len(f))
reader := bytes.NewReader(f)
rw.Lock()
count += 1
index := count
rw.Unlock()
resp, err := client.Post("http://0.0.0.0:8888"."application/x-www-form-urlencoded", reader)
iferr ! =nil {
rw.RLock()
currentCount := count
rw.RUnlock()
log.Fatal(err, index, currentCount)
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
iferr ! =nil {
log.Fatal(err)
}
log.Printf("data[%s]".string(data))
// time.Sleep(time.Second)
}
}()
}
wg.Wait()
goto TEST
}Copy the code
The service side
package main
import (
"fmt"
"io"
"io/ioutil"
"net/http"
"time"
"context"
)
type myHandler struct{}func (h myHandler)ServeHTTP(w http.ResponseWriter, r *http.Request) {
//print header
// fmt.Println("header", r.Header)
//debug body
_, err := ioutil.ReadAll(r.Body)
iferr ! =nil {
fmt.Println("read body error", err)
io.WriteString(w, "read you body error!")
return
}
// fmt.Println("data len", len(data))
io.WriteString(w, "goad it")
return
}
func main(a) {
// http.HandleFunc("/", myHandler)
// err := http.ListenAndServe("0.0.0.0:8888", nil)
// if err ! = nil {
// fmt.Println("ListenAndServe error", err)
// return
// }
server := &http.Server {
Addr: "0.0.0.0:8888",
Handler: myHandler{},
}
d := time.Duration(time.Second*10)
t := time.NewTimer(d)
defer t.Stop()
go func (a){
<- t.C
shutdown(server)
}()
server.ListenAndServe()
for {
fmt.Println(1)
time.Sleep(time.Second)
}
fmt.Println(2)
return
}
func shutdown(server *http.Server) {
ctx, cancel := context.WithTimeout(context.TODO(), 3600)
defer cancel()
server.Shutdown(ctx)
}
Copy the code
The experiment
Code:
Server execution location:
After this, the keepalive execution will be affected. (Actually, the server sends a TCP Keepalive every 15 seconds.)
func (s *Server) doKeepAlives(a) bool { return atomic.LoadInt32(&s.disableKeepAlives) == 0 && !s.shuttingDown()}func (s *Server) shuttingDown(a) bool { returnatomic.LoadInt32(&s.inShutdown) ! =0}Copy the code
DoKeepAlives () above executes after the server handler completes the request:
// In go1.10 net/ HTTP /server.go 1845 line if! w.conn.server.doKeepAlives() { // We're in shutdown mode. We might've replied // to the user without "Connection: Close "and // they might think they can send another // request, but such is life with HTTP/1.1. return}Copy the code
The server side is the location of the debug screenshot above
The client is just waiting for the data in the POST response
Caught phenomenon
You can see that the connection sent by the server disconnects, the client disconnects, and then re-initiates the request. The black area is ignored because the debugger stopped later.
In closeIdles
The execution status of the program is as follows, the top right is the server side debugging, the bottom left is the client state
Corresponding package:
Therefore, we can analyze the effect of different connection states when shutdown has been executed to close all current connections
1. Prepare for the connection
Connection refused
2.active
Wait idle and close the connection
3.idle
Direct initiated shutdown
4.closed
ignore
Copy the code
Are there really no problems?
At the beginning of the system call, there was no packet exchange between the server and the client, except for keepalive.
In the implementation:
After the system call in the figure
Every package I see above does not have RST because I am debugging, client did not send a package, I put a non-debugging package
Therefore, it is suspected that the mode of close causes this situation, so for long connections, if the client continues to send data may occur this situation.
To verify, I changed closeIdles’ code to only turn off server writes so that clients can send data
Stability is four times waved, but the client still receives the EOF, the server is not elegant.
I changed it to sleep 500 milliseconds before shutting down
The red RST is ignored, which is a client-initiated reconnect
So the preliminary conclusion is that the server cannot be truly graceful.