OpenCompass/configs/api_examples/eval_api_bailing.py
Yi Ding 3f833186dc
[Feature] Support the reasoning from BaiLing LLM (#1541)
* [Feature] Support the reasoning from BaiLing LLM

This commit includes the access to BaiLing LLM and gets the reasoning.

* Add the api example

The example of evalute bailing api

* Revise the generation arguments

Based on current experiment, we update some generation arguments for better reasoning

* [fix] set the batch size

* Retry under flowcontrol of serverside

* add dependent package into requirement.txt

add dependent package retrying to clean up the pre-comment check.

* correct the file names and make the file copy

correct the file names.
copy the files under configs to opencompass

* fix the lint issue

---------

Co-authored-by: christopher.dy <christopher.dy@antgroup.com>
2024-09-26 16:49:52 +08:00

39 lines
1009 B
Python

from mmengine.config import read_base
from opencompass.models import BailingAPI
from opencompass.partitioners import NaivePartitioner
from opencompass.runners.local_api import LocalAPIRunner
from opencompass.tasks import OpenICLInferTask
with read_base():
from opencompass.configs.datasets.ceval.ceval_gen import ceval_datasets
from opencompass.configs.summarizers.medium import summarizer
datasets = [
*ceval_datasets,
]
models = [
dict(
path="Bailing-Lite-0830",
token="xxxxxx", # set your key here or in environment variable BAILING_API_KEY
url="https://bailingchat.alipay.com/chat/completions",
type=BailingAPI,
generation_kwargs={},
query_per_second=1,
max_seq_len=4096,
),
]
infer = dict(
partitioner=dict(type=NaivePartitioner),
runner=dict(
type=LocalAPIRunner,
max_num_workers=2,
concurrent_users=2,
task=dict(type=OpenICLInferTask),
),
)
work_dir = "outputs/api_bailing/"