gpt4 book ai didi

go - 提供http的打开文件过多

转载 作者:行者123 更新时间:2023-12-01 22:28:41 28 4
gpt4 key购买 nike

我有以下代码

package main

import (
"bytes"
"fmt"
"github.com/gorilla/mux"
"log"
"net/http"
"time"
"io"
httprouter "github.com/fasthttp/router"
"github.com/valyala/fasthttp"
)

func main() {
router := mux.NewRouter().StrictSlash(true)
/*router := NewRouter()*/
router.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
_, _ = fmt.Fprintf(w, "Hello!!!")
})

router.HandleFunc("/{name}", func(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
prepare(w, r, vars["name"])

}).Methods("POST")

log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", 8080), router))

}

//using fast http
func _() {
router := httprouter.New()
router.GET("/", func(w *fasthttp.RequestCtx) {
_, _ = fmt.Fprintf(w, "Hello!!!")
})
router.POST("/:name", func(w *fasthttp.RequestCtx) {
prepareRequest(w, w.UserValue("name").(string))
})

log.Fatal(fasthttp.ListenAndServe(fmt.Sprintf(":%d", 8080), router.Handler))
}

//func prepare(w *fasthttp.RequestCtx, name string)
func prepare(w http.ResponseWriter, r *http.Request, name string) {
//other part of the code and call to goroutine
var urls []string
//lets say all the url loaded, call the go routine func and wait for channel to respond and then proceed with the response of all url
results := callUrls(urls) //there are 10 urls atleast to call simultaneously for each request everytime
process(w, results)
}

type Response struct {
status int
url string
body string
}

func callUrls(urls []string) []*Response {
ch := make(chan *Response, len(urls))
for _, url := range urls {
go func(url string) {
//http post on url,
//base on status code of url call, add to status code
//some thing like

req, err := http.NewRequest("POST", url, bytes.NewBuffer(somePostData))
req.Header.Set("Content-Type", "application/json")
req.Close = true

client := &http.Client{
Timeout: time.Duration(time.Duration(100) * time.Millisecond),
}

response, err := client.Do(req)

//Using fast http client
/*req := fasthttp.AcquireRequest()
req.SetRequestURI(url)
req.Header.Set("Content-Type", "application/json")
req.Header.SetMethod("POST")
req.SetBody(somePostData)

response := fasthttp.AcquireResponse()
client := &fasthttp.Client{
ReadTimeout: time.Duration(time.Duration(100) * time.Millisecond),
}
err := client.Do(req, response)*/

if err != nil {
//do other thing with the response received
_, _ = io.Copy(ioutil.Discard, response.Body)
_ = response.Body.Close()
} else {
//success response
_, _ = io.Copy(ioutil.Discard, response.Body)
_ = response.Body.Close()

body, _:= ioutil.ReadAll(response.Body)
strBody := string(body)
strBody = strings.Replace(strBody, "\r", "", -1)
strBody = strings.Replace(strBody, "\n", "", -1)
}

// return to channel accordingly
ch <- &Response{200, "url", "response body"}

}(url)
}
var results []*Response
for {
select {
case r := <-ch:
results = append(results, r)
if len(results) == len(urls) {
//Done
close(ch)
return results
}
}
}
}

//func process(w *fasthttp.RequestCtx,results []*Response){
func process(w http.ResponseWriter, results []*Response){
fmt.Println("response", "response body")
}

在多核CPU上处理了很少的请求后(每秒大约有4000-6000请求),我收到太多的文件打开错误,响应时间和CPU都超出了限制。 (CPU是否会很高,因为我几次将字节转换为字符串以替换几个字符?有什么建议吗?)

我看到了另一个问题,涉及关闭req / res主体和/或将sysctl或ulimit设置为更高的值,我确实遵循了这些要求,但始终会遇到错误。

在服务器上配置:
/etc/sysctl.conf net.ipv4.tcp_tw_recycle = 1
open files (-n) 65535

我需要代码在millisec中进行响应,但是当CPU高时,它最多需要50秒。

尝试过net / http和fast http都没有改善。我的Node.js请求npm在同一服务器上完美地完成了所有工作。什么是处理这些连接或改进所需代码中更改的最佳方法。

最佳答案

您可以使用以下库:

请求:一个Go库,用于减少发出HTTP请求时的麻烦(每秒请求20k)

https://github.com/alessiosavi/Requests

它是为解决to many open files处理并行请求而开发的。

这个想法是分配一个请求列表,而不是发送带有可配置“并行”因子的请求,该因子允许一次仅运行“N”个请求。

初始化请求(您已经有一组URL)

// This array will contains the list of request
var reqs []requests.Request

// N is the number of request to run in parallel, in order to avoid "TO MANY OPEN FILES. N have to be lower than ulimit threshold"
var N int = 12

// Create the list of request
for i := 0; i < 1000; i++ {
// In this case, we init 1000 request with same URL,METHOD,BODY,HEADERS
req, err := requests.InitRequest("https://127.0.0.1:5000", "GET", nil, nil, true)
if err != nil {
// Request is not compliant, and will not be add to the list
log.Println("Skipping request [", i, "]. Error: ", err)
} else {
// If no error occurs, we can append the request created to the list of request that we need to send
reqs = append(reqs, *req)
}
}

此时,我们有了一个列表,其中包含必须发送的请求。
让我们并行发送它们!

// This array will contains the response from the givens request
var response []datastructure.Response

// send the request using N request to send in parallel
response = requests.ParallelRequest(reqs, N)

// Print the response
for i := range response {
// Dump is a method that print every information related to the response
log.Println("Request [", i, "] -> ", response[i].Dump())
// Or use the data present in the response
log.Println("Headers: ", response[i].Headers)
log.Println("Status code: ", response[i].StatusCode)
log.Println("Time elapsed: ", response[i].Time)
log.Println("Error: ", response[i].Error)
log.Println("Body: ", string(response[i].Body))
}

您可以在存储库的example文件夹中找到示例用法。

SPOILER :

我是这个小图书馆的作者

关于go - 提供http的打开文件过多,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57621449/

28 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com