BaseInferencer batch_size and max_seq_len cast to int

This commit is contained in:
Francesco Bertolotti 2025-05-05 19:55:21 +02:00
parent 37cbaf8d92
commit 61b52844be

View File

@ -46,8 +46,8 @@ class BaseInferencer:
self.model = model
self.max_seq_len = max_seq_len
self.batch_size = batch_size
self.max_seq_len = int(max_seq_len)
self.batch_size = int(batch_size)
self.output_json_filepath = output_json_filepath
self.output_json_filename = output_json_filename
self.is_main_process = is_main_process()