mirror of
https://github.com/open-compass/opencompass.git
synced 2025-05-30 16:03:24 +08:00
[Fix] Update HF configs (#42)
This commit is contained in:
parent
0625294e5f
commit
50b658d234
@ -5,12 +5,13 @@ models = [
|
|||||||
dict(
|
dict(
|
||||||
type=HuggingFaceCausalLM,
|
type=HuggingFaceCausalLM,
|
||||||
abbr='internlm-7b-hf',
|
abbr='internlm-7b-hf',
|
||||||
path="internlm-7b",
|
path="internlm/internlm-7b",
|
||||||
tokenizer_path='internlm-7b',
|
tokenizer_path='internlm/internlm-7b',
|
||||||
tokenizer_kwargs=dict(
|
tokenizer_kwargs=dict(
|
||||||
padding_side='left',
|
padding_side='left',
|
||||||
truncation_side='left',
|
truncation_side='left',
|
||||||
use_fast=False,
|
use_fast=False,
|
||||||
|
trust_remote_code=True,
|
||||||
),
|
),
|
||||||
max_out_len=100,
|
max_out_len=100,
|
||||||
max_seq_len=2048,
|
max_seq_len=2048,
|
||||||
|
@ -12,12 +12,13 @@ models = [
|
|||||||
dict(
|
dict(
|
||||||
type=HuggingFaceCausalLM,
|
type=HuggingFaceCausalLM,
|
||||||
abbr='internlm-chat-7b-hf',
|
abbr='internlm-chat-7b-hf',
|
||||||
path="internlm-chat-7b",
|
path="internlm/internlm-chat-7b",
|
||||||
tokenizer_path='internlm-chat-7b',
|
tokenizer_path='internlm/internlm-chat-7b',
|
||||||
tokenizer_kwargs=dict(
|
tokenizer_kwargs=dict(
|
||||||
padding_side='left',
|
padding_side='left',
|
||||||
truncation_side='left',
|
truncation_side='left',
|
||||||
use_fast=False,
|
use_fast=False,
|
||||||
|
trust_remote_code=True,
|
||||||
),
|
),
|
||||||
max_out_len=100,
|
max_out_len=100,
|
||||||
max_seq_len=2048,
|
max_seq_len=2048,
|
||||||
|
30
configs/models/hf_internlm_chat_7b_8k.py
Normal file
30
configs/models/hf_internlm_chat_7b_8k.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from opencompass.models import HuggingFaceCausalLM
|
||||||
|
|
||||||
|
|
||||||
|
_meta_template = dict(
|
||||||
|
round=[
|
||||||
|
dict(role='HUMAN', begin='<|User|>:', end='<eoh>\n'),
|
||||||
|
dict(role='BOT', begin='<|Bot|>:', end='<eoa>\n', generate=True),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
models = [
|
||||||
|
dict(
|
||||||
|
type=HuggingFaceCausalLM,
|
||||||
|
abbr='internlm-chat-7b-8k-hf',
|
||||||
|
path="internlm/internlm-chat-7b-8k",
|
||||||
|
tokenizer_path='internlm/internlm-chat-7b-8k',
|
||||||
|
tokenizer_kwargs=dict(
|
||||||
|
padding_side='left',
|
||||||
|
truncation_side='left',
|
||||||
|
use_fast=False,
|
||||||
|
trust_remote_code=True,
|
||||||
|
),
|
||||||
|
max_out_len=100,
|
||||||
|
max_seq_len=2048,
|
||||||
|
batch_size=8,
|
||||||
|
meta_template=_meta_template,
|
||||||
|
model_kwargs=dict(trust_remote_code=True, device_map='auto'),
|
||||||
|
run_cfg=dict(num_gpus=1, num_procs=1),
|
||||||
|
)
|
||||||
|
]
|
@ -5,6 +5,7 @@ models = [
|
|||||||
# LLaMA 13B
|
# LLaMA 13B
|
||||||
dict(
|
dict(
|
||||||
type=HuggingFaceCausalLM,
|
type=HuggingFaceCausalLM,
|
||||||
|
abbr='llama-13b-hf',
|
||||||
path="decapoda-research/llama-13b-hf",
|
path="decapoda-research/llama-13b-hf",
|
||||||
tokenizer_path='decapoda-research/llama-13b-hf',
|
tokenizer_path='decapoda-research/llama-13b-hf',
|
||||||
tokenizer_kwargs=dict(padding_side='left',
|
tokenizer_kwargs=dict(padding_side='left',
|
||||||
|
Loading…
Reference in New Issue
Block a user