mirror of
https://github.com/open-compass/opencompass.git
synced 2025-05-30 16:03:24 +08:00
add & update openai models
This commit is contained in:
parent
2abf6ca795
commit
794ab7c372
@ -3,10 +3,13 @@ from mmengine.config import read_base
|
||||
with read_base():
|
||||
from opencompass.configs.datasets.OpenHuEval.HuMatchingFIB.HuMatchingFIB import hu_matching_fib_datasets
|
||||
|
||||
from opencompass.configs.models.openai.gpt_4o_mini_20240718 import models as gpt_4o_mini_20240718_model
|
||||
from opencompass.configs.models.qwen2_5.lmdeploy_qwen2_5_7b_instruct import models as lmdeploy_qwen2_5_7b_instruct_model
|
||||
from opencompass.configs.models.hf_internlm.lmdeploy_internlm3_8b_instruct import models as lmdeploy_internlm3_8b_instruct_model
|
||||
# from opencompass.configs.models.openai.gpt_4o_mini_20240718 import models as gpt_4o_mini_20240718_model
|
||||
# from opencompass.configs.models.qwen2_5.lmdeploy_qwen2_5_7b_instruct import models as lmdeploy_qwen2_5_7b_instruct_model
|
||||
# from opencompass.configs.models.hf_internlm.lmdeploy_internlm3_8b_instruct import models as lmdeploy_internlm3_8b_instruct_model
|
||||
|
||||
# from opencompass.configs.models.qwq.lmdeploy_qwq_32b_preview import models as lmdeploy_qwq_32b_preview_model
|
||||
from opencompass.configs.models.openai.o1_mini_2024_09_12 import models as o1_mini_2024_09_12_model
|
||||
# from opencompass.configs.models.openai.o3_mini_2025_01_31 import models as o3_mini_2025_01_31_model
|
||||
|
||||
datasets = hu_matching_fib_datasets
|
||||
models = sum([v for k, v in locals().items() if k.endswith('_model')], [])
|
||||
|
@ -12,6 +12,7 @@ models = [
|
||||
path='gpt-4o-mini-2024-07-18',
|
||||
key=
|
||||
'ENV', # The key will be obtained from $OPENAI_API_KEY, but you can write down your key here as well
|
||||
openai_proxy_url='ENV',
|
||||
meta_template=api_meta_template,
|
||||
query_per_second=1,
|
||||
max_out_len=2048,
|
||||
|
@ -12,6 +12,8 @@ models = [
|
||||
path='o1-mini-2024-09-12',
|
||||
key=
|
||||
'ENV', # The key will be obtained from $OPENAI_API_KEY, but you can write down your key here as well
|
||||
openai_proxy_url='ENV',
|
||||
verbose=True,
|
||||
meta_template=api_meta_template,
|
||||
query_per_second=1,
|
||||
batch_size=1,
|
||||
|
@ -12,6 +12,7 @@ models = [
|
||||
path='o1-preview-2024-09-12',
|
||||
key=
|
||||
'ENV', # The key will be obtained from $OPENAI_API_KEY, but you can write down your key here as well
|
||||
openai_proxy_url='ENV',
|
||||
meta_template=api_meta_template,
|
||||
query_per_second=1,
|
||||
batch_size=1,
|
||||
|
22
opencompass/configs/models/openai/o3_mini_2025_01_31.py
Normal file
22
opencompass/configs/models/openai/o3_mini_2025_01_31.py
Normal file
@ -0,0 +1,22 @@
|
||||
from opencompass.models import OpenAISDK
|
||||
|
||||
api_meta_template = dict(round=[
|
||||
dict(role='HUMAN', api_role='HUMAN'),
|
||||
dict(role='BOT', api_role='BOT', generate=True),
|
||||
], )
|
||||
|
||||
models = [
|
||||
dict(
|
||||
abbr='o3-mini-2025-01-31',
|
||||
type=OpenAISDK,
|
||||
path='o3-mini-2025-01-31',
|
||||
key=
|
||||
'ENV', # The key will be obtained from $OPENAI_API_KEY, but you can write down your key here as well
|
||||
openai_proxy_url='ENV',
|
||||
verbose=True,
|
||||
meta_template=api_meta_template,
|
||||
query_per_second=1,
|
||||
batch_size=1,
|
||||
temperature=1,
|
||||
max_completion_tokens=8192), # you can change it for large reasoning inference cost, according to: https://platform.openai.com/docs/guides/reasoning
|
||||
]
|
@ -30,6 +30,7 @@ O1_MODEL_LIST = [
|
||||
'o1-mini-2024-09-12',
|
||||
'o1-preview',
|
||||
'o1-mini',
|
||||
'o3-mini-2025-01-31',
|
||||
]
|
||||
|
||||
|
||||
@ -579,6 +580,7 @@ class OpenAISDK(OpenAI):
|
||||
verbose=verbose,
|
||||
max_completion_tokens=max_completion_tokens,
|
||||
)
|
||||
key = random.choice(self.keys)
|
||||
from openai import OpenAI
|
||||
|
||||
# support multiple api_base for acceleration
|
||||
|
Loading…
Reference in New Issue
Block a user