some fixes and binary build optimization

This commit is contained in:
2026-01-20 11:34:07 +03:00
parent c5671add06
commit 2355623c38
7 changed files with 57 additions and 56 deletions

View File

@@ -71,11 +71,13 @@ func NewOpenAIAPI(baseURL, token, model string) *OpenAIAPI {
if err != nil {
logger.Error(err)
}
t := &http.Transport{}
if proxy.Host != "" {
t.Proxy = http.ProxyURL(proxy)
}
client := &http.Client{
Timeout: 15 * time.Second,
Transport: &http.Transport{
Proxy: http.ProxyURL(proxy),
},
Timeout: 5 * time.Minute,
Transport: t,
}
return &OpenAIAPI{
Token: token,
@@ -97,36 +99,53 @@ type CreateCompletionReq struct {
MaxCompletionTokens int `json:"max_completition_tokens,omitempty"`
}
func (o *OpenAIAPI) CreateCompletion(request CreateCompletionReq) (*OpenAIResponse, error) {
u := fmt.Sprintf("%s/v1/chat/completions", o.BaseURL)
request.Model = o.Model
data, err := json.Marshal(request)
o.Logger.Debug("REQ", u, string(data))
func (o *OpenAIAPI) DoRequest(url string, params any) ([]byte, error) {
responseBody := make([]byte, 0)
data, err := json.Marshal(params)
if err != nil {
return nil, err
return responseBody, err
}
buf := bytes.NewBuffer(data)
req, err := http.NewRequest("POST", u, buf)
req, err := http.NewRequest("POST", url, bytes.NewBuffer(data))
if err != nil {
return nil, err
return responseBody, err
}
req.Header.Set("Content-Type", "application/json")
if o.Token != "" {
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", o.Token))
}
resp, err := http.DefaultClient.Do(req)
res, err := o.client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
defer res.Body.Close()
if res.StatusCode == 504 || res.StatusCode == 400 {
time.Sleep(5 * time.Second)
return o.DoRequest(url, params)
}
responseBody, err = io.ReadAll(res.Body)
if err != nil {
return responseBody, err
}
return responseBody, err
}
func (o *OpenAIAPI) CreateCompletion(request CreateCompletionReq) (*OpenAIResponse, error) {
u := fmt.Sprintf("%s/v1/chat/completions", o.BaseURL)
request.Model = o.Model
data, err := json.Marshal(request)
if err != nil {
return nil, err
}
o.Logger.Debug("REQ", u, string(data))
body, err := o.DoRequest(u, request)
if err != nil {
return nil, err
}
o.Logger.Debug("RES", u, string(body))
response := new(OpenAIResponse)
err = json.Unmarshal(body, response)
return response, err
@@ -144,39 +163,17 @@ func (o *OpenAIAPI) CompressChat(history []Message) (*OpenAIResponse, error) {
}
u := fmt.Sprintf("%s/v1/chat/completions", o.BaseURL)
data, err := json.Marshal(request)
if err != nil {
return nil, err
}
o.Logger.Debug("COMPRESS REQ", u, string(data))
if err != nil {
return nil, err
}
buf := bytes.NewBuffer(data)
req, err := http.NewRequest("POST", u, buf)
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
if o.Token != "" {
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", o.Token))
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
if resp.StatusCode == 504 || resp.StatusCode == 400 {
time.Sleep(5 * time.Second)
resp, err = http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
body, err := o.DoRequest(u, request)
if err != nil {
return nil, err
}
o.Logger.Debug("COMPRESS RES", u, string(body))
response := new(OpenAIResponse)
err = json.Unmarshal(body, response)
return response, err