re-implement data usage crawler to be more efficient (#9075)

Implementation overview: 

https://gist.github.com/klauspost/1801c858d5e0df391114436fdad6987b
This commit is contained in:
Klaus Post
2020-03-19 00:19:29 +01:00
committed by GitHub
parent 7fdeb44372
commit 8d98662633
61 changed files with 2895 additions and 543 deletions

View File

@@ -21,7 +21,6 @@ import (
"encoding/json"
"errors"
"net/http"
gohttp "net/http"
"strings"
xhttp "github.com/minio/minio/cmd/http"
@@ -41,7 +40,7 @@ type Target struct {
// User-Agent to be set on each log request sent to the `endpoint`
userAgent string
logKind string
client gohttp.Client
client http.Client
}
func (h *Target) startHTTPLogger() {
@@ -54,7 +53,7 @@ func (h *Target) startHTTPLogger() {
continue
}
req, err := gohttp.NewRequest(http.MethodPost, h.endpoint, bytes.NewBuffer(logJSON))
req, err := http.NewRequest(http.MethodPost, h.endpoint, bytes.NewBuffer(logJSON))
if err != nil {
continue
}
@@ -78,12 +77,12 @@ func (h *Target) startHTTPLogger() {
// New initializes a new logger target which
// sends log over http to the specified endpoint
func New(endpoint, userAgent, logKind string, transport *gohttp.Transport) *Target {
func New(endpoint, userAgent, logKind string, transport *http.Transport) *Target {
h := Target{
endpoint: endpoint,
userAgent: userAgent,
logKind: strings.ToUpper(logKind),
client: gohttp.Client{
client: http.Client{
Transport: transport,
},
logCh: make(chan interface{}, 10000),