mirror of
https://github.com/open-compass/opencompass.git
synced 2025-05-30 16:03:24 +08:00
27 lines
1.3 KiB
Python
27 lines
1.3 KiB
Python
from mmengine.config import read_base
|
||
# we use mmengine.config to import other config files
|
||
|
||
with read_base():
|
||
from opencompass.configs.models.hf_internlm.hf_internlm2_chat_7b import models as internlm2_chat_7b
|
||
|
||
# Evaluate needlebench_32k, adjust the configuration to use 4k, 32k, 128k, 200k, or 1000k if necessary.
|
||
# from .datasets.needlebench.needlebench_32k.needlebench_32k import needlebench_datasets
|
||
# from .summarizers.needlebench import needlebench_32k_summarizer as summarizer
|
||
|
||
# only eval original "needle in a haystack test" in needlebench_32k
|
||
from opencompass.configs.datasets.needlebench.needlebench_32k.needlebench_single_32k import needlebench_zh_datasets, needlebench_en_datasets
|
||
from opencompass.configs.summarizers.needlebench import needlebench_32k_summarizer as summarizer
|
||
|
||
# eval Ancestral Tracing Challenge(ATC)
|
||
# from .datasets.needlebench.atc.atc_0shot_nocot_2_power_en import needlebench_datasets
|
||
# ATC use default summarizer thus no need to import summarizer
|
||
|
||
datasets = sum([v for k, v in locals().items() if ('datasets' in k)], [])
|
||
|
||
for m in internlm2_chat_7b:
|
||
m['max_seq_len'] = 32768 # 保证InternLM2-7B模型能接收到完整的长文本,其他模型需要根据各自支持的最大序列长度修改。
|
||
m['max_out_len'] = 4096
|
||
|
||
models = internlm2_chat_7b
|
||
|
||
work_dir = './outputs/needlebench' |