mirror of
https://github.com/open-compass/opencompass.git
synced 2025-05-30 16:03:24 +08:00

* [Feature] Support the reasoning from BaiLing LLM This commit includes the access to BaiLing LLM and gets the reasoning. * Add the api example The example of evalute bailing api * Revise the generation arguments Based on current experiment, we update some generation arguments for better reasoning * [fix] set the batch size * Retry under flowcontrol of serverside * add dependent package into requirement.txt add dependent package retrying to clean up the pre-comment check. * correct the file names and make the file copy correct the file names. copy the files under configs to opencompass * fix the lint issue --------- Co-authored-by: christopher.dy <christopher.dy@antgroup.com>
32 lines
815 B
Python
32 lines
815 B
Python
from opencompass.models import BailingAPI
|
|
|
|
api_meta_template = dict(
|
|
round=[
|
|
dict(role="HUMAN", api_role="HUMAN"),
|
|
dict(role="BOT", api_role="BOT", generate=False),
|
|
],
|
|
reserved_roles=[dict(role="SYSTEM", api_role="SYSTEM")],
|
|
)
|
|
|
|
models = [
|
|
dict(
|
|
path="Bailing-Lite-0830",
|
|
token="", # set your key here or in environment variable BAILING_API_KEY
|
|
url="https://bailingchat.alipay.com/chat/completions",
|
|
type=BailingAPI,
|
|
meta_template=api_meta_template,
|
|
query_per_second=1,
|
|
max_seq_len=4096,
|
|
batch_size=1,
|
|
generation_kwargs={
|
|
"temperature": 0.4,
|
|
"top_p": 1.0,
|
|
"top_k": -1,
|
|
"n": 1,
|
|
"logprobs": 1,
|
|
"use_beam_search": False,
|
|
},
|
|
),
|
|
]
|
|
|