From f070aad0dedd81a4d5a7a7089f2f813902ac7e9c Mon Sep 17 00:00:00 2001 From: Jintao Date: Mon, 9 Sep 2024 10:49:05 +0800 Subject: [PATCH] fix typo (#1980) --- swift/llm/utils/utils.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/swift/llm/utils/utils.py b/swift/llm/utils/utils.py index c360127a1..5e8646a0d 100644 --- a/swift/llm/utils/utils.py +++ b/swift/llm/utils/utils.py @@ -413,26 +413,26 @@ def print_example(example: Dict[str, Any], logger.info(f'[INPUT_IDS] {input_ids}') input_str = safe_tokenizer_decode(tokenizer, input_ids, **tokenizer_kwargs) logger.info(f'[INPUT] {input_str}') - if labels is not None: - logger.info(f'[LABLES_IDS] {labels}') - labels_str = safe_tokenizer_decode(tokenizer, labels, **tokenizer_kwargs) - logger.info(f'[LABLES] {labels_str}') if chosen_input_ids is not None: logger.info(f'[CHOSEN_INPUT_IDS] {chosen_input_ids}') input_str = safe_tokenizer_decode(tokenizer, chosen_input_ids, **tokenizer_kwargs) - logger.info(f'[CHOSEN INPUT] {input_str}') + logger.info(f'[CHOSEN_INPUT] {input_str}') if rejected_input_ids is not None: logger.info(f'[REJECTED_INPUT_IDS] {rejected_input_ids}') input_str = safe_tokenizer_decode(tokenizer, rejected_input_ids, **tokenizer_kwargs) - logger.info(f'[REJECTED INPUT] {input_str}') + logger.info(f'[REJECTED_INPUT] {input_str}') + if labels is not None: + logger.info(f'[LABELS_IDS] {labels}') + labels_str = safe_tokenizer_decode(tokenizer, labels, **tokenizer_kwargs) + logger.info(f'[LABELS] {labels_str}') if chosen_labels is not None: - logger.info(f'[CHOSEN_LABLES_IDS] {chosen_labels}') + logger.info(f'[CHOSEN_LABELS_IDS] {chosen_labels}') labels_str = safe_tokenizer_decode(tokenizer, chosen_labels, **tokenizer_kwargs) - logger.info(f'[CHOSEN LABELS] {labels_str}') + logger.info(f'[CHOSEN_LABELS] {labels_str}') if rejected_labels is not None: - logger.info(f'[REJECTED_INPUT_IDS] {rejected_labels}') + logger.info(f'[REJECTED_LABELS_IDS] {rejected_labels}') labels_str = safe_tokenizer_decode(tokenizer, rejected_labels, **tokenizer_kwargs) - logger.info(f'[REJECTED LABELS] {labels_str}') + logger.info(f'[REJECTED_LABELS] {labels_str}') def _find_layers(model: Module, module_cls: type) -> List[str]: