Skip to content

Commit

Permalink
Fix max token setting in EncoderDecoder generator
Browse files Browse the repository at this point in the history
  • Loading branch information
ignorejjj committed Jul 26, 2024
1 parent ed7ef8e commit 96b20c8
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions flashrag/generator/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,12 @@ def generate(self, input_list: List, batch_size=None, **params):
stopping_criteria = [StopWordCriteria(tokenizer=self.tokenizer, prompts=input_list, stop_words=stop_sym)]
generation_params['stopping_criteria'] = stopping_criteria

if 'max_tokens' in generation_params:
if 'max_tokens' in params:
generation_params['max_new_tokens'] = params.pop('max_tokens')
else:
generation_params['max_new_tokens'] = generation_params.pop('max_tokens')
max_tokens = params.pop('max_tokens', None) or params.pop('max_new_tokens', None)
if max_tokens is not None:
generation_params['max_new_tokens'] = max_tokens
else:
generation_params['max_new_tokens'] = generation_params.get('max_new_tokens', generation_params.pop('max_tokens', None))
generation_params.pop('max_tokens', None)

responses = []
for idx in trange(0, len(input_list), batch_size, desc='Generation process: '):
Expand Down

0 comments on commit 96b20c8

Please sign in to comment.