异步请求响应回写

    /**
     * 异步请求响应回写
     * 
     * @param json
     * @throws ECApplicationException
     */
    private void writeAjax(String json) throws ECApplicationException {
        String methodName = "writeAjax";
        Writer writer = null;
        logger.logp(Level.SEVERE, CLASSNAME, methodName, json);
        try {
            // 获取响应对象,此处可以根据不同的框架结构,进行调整
            HttpServletResponse response = (HttpServletResponse) ((ViewCommandContext) this.context)
                    .getResponse();
            response.setContentType("text/html;charset=UTF-8");
            response.setHeader("Cache-Control", "no-cache");
            writer = response.getWriter();
            writer.write(json);
            writer.flush();
        } catch (Exception e) {
            logger.logp(Level.SEVERE, CLASSNAME, methodName, e.toString(), e);
        } finally {
            if (writer != null) {
                try {
                    writer.close();
                } catch (IOException e) {
                    logger.logp(Level.SEVERE, CLASSNAME, methodName, e1.toString(), e);
                }
            }
        }
    }

<div class="post-text" itemprop="text"> <p>I am trying to implement http server that:</p> <ul> <li>Calculate farther redirect using some logic</li> <li>Redirect user</li> <li>Log user data</li> </ul> <p>The goal is to achieve maximum throughput (at least 15k rps). In order to do this, I want to save log asynchronously. I'm using kafka as logging system and separate logging block of code into separate goroutine. Overall example of current implementation:</p> <pre><code>package main import ( "github.com/confluentinc/confluent-kafka-go/kafka" "net/http" "time" "encoding/json" ) type log struct { RuntimeParam string `json:"runtime_param"` AsyncParam string `json:"async_param"` RemoteAddress string `json:"remote_address"` } var ( producer, _ = kafka.NewProducer(&kafka.ConfigMap{ "bootstrap.servers": "localhost:9092,localhost:9093", "queue.buffering.max.ms": 1 * 1000, "go.delivery.reports": false, "client.id": 1, }) topicName = "log" ) func main() { siteMux := http.NewServeMux() siteMux.HandleFunc("/", httpHandler) srv := &http.Server{ Addr: ":8080", Handler: siteMux, ReadTimeout: 2 * time.Second, WriteTimeout: 5 * time.Second, IdleTimeout: 10 * time.Second, } if err := srv.ListenAndServe(); err != nil { panic(err) } } func httpHandler(w http.ResponseWriter, r *http.Request) { handlerLog := new(log) handlerLog.RuntimeParam = "runtimeDataString" http.Redirect(w, r, "http://google.com", 301) go func(goroutineLog *log, request *http.Request) { goroutineLog.AsyncParam = "asyncDataString" goroutineLog.RemoteAddress = r.RemoteAddr jsonLog, err := json.Marshal(goroutineLog) if err == nil { producer.ProduceChannel() <- &kafka.Message{ TopicPartition: kafka.TopicPartition{Topic: &topicName, Partition: kafka.PartitionAny}, Value: jsonLog, } } }(handlerLog, r) } </code></pre> <p>The questions are: </p> <ol> <li>Is it correct/efficient to use separate goroutine to implement async logging or should I use a different approach? (workers and channels for example)</li> <li>Maybe there is a way to further improve performance of server, that I'm missing?</li> </ol> </div>
©️2020 CSDN 皮肤主题: 大白 设计师:CSDN官方博客 返回首页