[Fix] change save_every defaults to 1 (#592)

This commit is contained in:
Hubert 2023-11-15 13:00:25 +08:00 committed by GitHub
parent 19ad7f9613
commit fcab30f82e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 8 additions and 8 deletions

View File

@ -42,7 +42,7 @@ class AttackInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs.
`save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method.
"""
@ -58,7 +58,7 @@ class AttackInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None,
save_every: Optional[int] = 1,
dataset_cfg: Optional[List[int]] = None,
**kwargs) -> None:
super().__init__(

View File

@ -36,7 +36,7 @@ class GenInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs.
`save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method.
"""
@ -50,7 +50,7 @@ class GenInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None,
save_every: Optional[int] = 1,
**kwargs) -> None:
super().__init__(
model=model,

View File

@ -34,7 +34,7 @@ class SCInferencer(BaseInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs.
`save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method.
sc_size (:obj:`int`, optional): Sample size for Self-Consistency
@ -51,7 +51,7 @@ class SCInferencer(BaseInferencer):
gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None,
save_every: Optional[int] = 1,
sc_size: Optional[int] = 1,
infer_type: Optional[str] = '',
generation_kwargs: dict = {},

View File

@ -43,7 +43,7 @@ class ToTInferencer(GenInferencer):
gen_field_replace_token (:obj:`str`, optional): Used to replace the
generation field token when generating prompts.
save_every (:obj:`int`, optional): Save intermediate results every
`save_every` epochs.
`save_every` iters. Defaults to 1.
generation_kwargs (:obj:`Dict`, optional): Parameters for the
:obj:`model.generate()` method.
naive_run (:obj:`bool`): if True, run naive IO/CoT sampling instead of
@ -74,7 +74,7 @@ class ToTInferencer(GenInferencer):
gen_field_replace_token: Optional[str] = '',
output_json_filepath: Optional[str] = './icl_inference_output',
output_json_filename: Optional[str] = 'predictions',
save_every: Optional[int] = None,
save_every: Optional[int] = 1,
naive_run: bool = False,
prompt_wrapper: dict = {},
prompt_sample: str = 'standard',