Skip to content

Commit e892c74

Browse files
author
ops
committed
feat(llm_api):change litellm to unionllm; fix webhook bug on WEBHOOK_VERIFY_TOKEN;fix load_api bug.
1 parent b94107d commit e892c74

File tree

4 files changed

+16
-14
lines changed

4 files changed

+16
-14
lines changed

app/gitlab_webhook.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import threading
33
from os import abort
44
from flask import Blueprint, request, jsonify
5-
from config.config import WEBHOOK_VERIFY_TOKEN
5+
# from config.config import WEBHOOK_VERIFY_TOKEN
66
from service.chat_review import review_code, review_code_for_mr, review_code_for_add_commit
77
from utils.logger import log
88
from app.gitlab_utils import get_commit_list, get_merge_request_id, get_commit_change_file
@@ -23,10 +23,10 @@ def webhook():
2323
webhook_token = request.headers.get('X-Gitlab-Token')
2424

2525
# gitlab的webhook的token验证
26-
if webhook_token == WEBHOOK_VERIFY_TOKEN:
27-
return jsonify({'status': 'success'}), 200
28-
else:
29-
return jsonify({'status': 'bad token'}), 401
26+
# if webhook_token == WEBHOOK_VERIFY_TOKEN:
27+
return jsonify({'status': 'success'}), 200
28+
#else:
29+
# return jsonify({'status': 'bad token'}), 401
3030

3131
elif request.method == 'POST':
3232
"""

llm_api/llm_api_default.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,30 +5,32 @@
55
from config.config import api_config as out_config
66
from llm_api.llm_api_interface import LLMApiInterface
77
from llm_api.load_api import create_llm_api_instance
8+
from unionllm import unionchat
89

910

1011
class LLMApiDefault(LLMApiInterface):
1112

1213
def __init__(self):
1314
self.model_name = None
1415
self.response = None
16+
self.provider = None
1517

1618
def set_config(self, api_config: dict) -> bool:
1719
if api_config is None:
1820
raise ValueError("api_config is None")
1921
for key in api_config:
2022
if key == "MODEL_NAME":
2123
self.model_name = api_config[key]
22-
continue
24+
if key == "PROVIDER":
25+
self.provider = api_config[key]
2326
os.environ[key] = api_config[key]
2427
return True
2528

2629
def generate_text(self, messages: list) -> bool:
2730

28-
self.response = completion(
29-
model=self.model_name,
30-
messages=messages,
31-
)
31+
self.response = unionchat(provider=self.provider, model=self.model_name,
32+
messages=messages)
33+
3234
return True
3335

3436
def get_respond_content(self) -> str:
@@ -38,8 +40,6 @@ def get_respond_tokens(self) -> int:
3840
return trunc(int(self.response['usage']['total_tokens']))
3941

4042

41-
42-
4343
# 示例使用
4444
if __name__ == "__main__":
4545
api = create_llm_api_instance()
@@ -54,4 +54,3 @@ def get_respond_tokens(self) -> int:
5454
])
5555
print(api.get_respond_content())
5656
print(api.get_respond_tokens())
57-

llm_api/load_api.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,13 +2,15 @@
22

33
from config.config import llm_api_impl
44

5+
56
def get_llm_api_class():
67
module_name, class_name = llm_api_impl.rsplit('.', 1)
78
module = importlib.import_module(module_name)
89
cls = getattr(module, class_name)
910
return cls
1011

12+
1113
# 使用工厂函数获取类实例
1214
def create_llm_api_instance():
1315
cls = get_llm_api_class()
14-
return cls()
16+
return cls()

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ tqdm==4.66.1
5151
trio==0.22.2
5252
trio-websocket==0.11.1
5353
tzdata==2023.3
54+
unionllm==0.1.23
5455
urllib3==2.0.5
5556
Werkzeug==2.3.7
5657
wsproto==1.2.0

0 commit comments

Comments
 (0)