Browse Source

recover logging

main
hiyouga 2 years ago
parent
commit
13d1f0709c
  1. 6
      src/utils/common.py
  2. 11
      src/utils/other.py
  3. 4
      src/utils/pairwise.py
  4. 4
      src/utils/peft_trainer.py
  5. 4
      src/utils/ppo.py
  6. 4
      src/utils/seq2seq.py

6
src/utils/common.py

@ -38,7 +38,7 @@ from .config import (
) )
from .other import ( from .other import (
get_main_logger, get_logger,
load_trainable_params, load_trainable_params,
load_valuehead_params, load_valuehead_params,
print_trainable_params, print_trainable_params,
@ -53,7 +53,7 @@ require_version("peft>=0.3.0", "To fix: pip install peft>=0.3.0")
require_version("trl>=0.4.1", "To fix: pip install trl>=0.4.1") require_version("trl>=0.4.1", "To fix: pip install trl>=0.4.1")
logger = get_main_logger(__name__) logger = get_logger(__name__)
def _init_adapter( def _init_adapter(
@ -289,7 +289,7 @@ def prepare_args(
logger.info( logger.info(
f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}\n" f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}\n"
+ f" distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" + f" distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}"
, main_process_only=False) )
logger.info(f"Training/evaluation parameters {training_args}") logger.info(f"Training/evaluation parameters {training_args}")
# Set seed before initializing model. # Set seed before initializing model.

11
src/utils/other.py

@ -10,8 +10,6 @@ from transformers.modeling_utils import PreTrainedModel
from transformers.generation.utils import LogitsProcessorList from transformers.generation.utils import LogitsProcessorList
from transformers.generation.logits_process import LogitsProcessor from transformers.generation.logits_process import LogitsProcessor
from accelerate.logging import get_logger
from peft.utils.other import WEIGHTS_NAME from peft.utils.other import WEIGHTS_NAME
@ -20,16 +18,19 @@ VALUE_HEAD_FILE_NAME = "value_head.bin"
FINETUNING_ARGS_NAME = "finetuning_args.json" FINETUNING_ARGS_NAME = "finetuning_args.json"
logger = get_logger(__name__, log_level="INFO") def get_logger(name: str) -> logging.Logger:
return logging.getLogger(name)
logging.basicConfig( logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S", datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
handlers=[logging.StreamHandler(sys.stdout)] handlers=[logging.StreamHandler(sys.stdout)]
) )
def get_main_logger(name: str) -> logging.Logger: logger = get_logger(__name__)
return get_logger(name, log_level="INFO")
class AverageMeter: class AverageMeter:

4
src/utils/pairwise.py

@ -5,9 +5,9 @@ from .data_collator import DynamicDataCollatorWithPadding
from .peft_trainer import PeftTrainer from .peft_trainer import PeftTrainer
from .other import get_main_logger from .other import get_logger
logger = get_main_logger(__name__) logger = get_logger(__name__)
class PairwiseDataCollatorWithPadding(DynamicDataCollatorWithPadding): class PairwiseDataCollatorWithPadding(DynamicDataCollatorWithPadding):

4
src/utils/peft_trainer.py

@ -21,7 +21,7 @@ from peft.utils.other import WEIGHTS_NAME
from .config import FinetuningArguments from .config import FinetuningArguments
from .other import ( from .other import (
get_main_logger, get_logger,
get_state_dict, get_state_dict,
load_trainable_params, load_trainable_params,
load_valuehead_params, load_valuehead_params,
@ -30,7 +30,7 @@ from .other import (
) )
logger = get_main_logger(__name__) logger = get_logger(__name__)
class LogCallback(TrainerCallback): class LogCallback(TrainerCallback):

4
src/utils/ppo.py

@ -16,12 +16,12 @@ from .config import FinetuningArguments
from .other import ( from .other import (
AverageMeter, AverageMeter,
get_main_logger, get_logger,
get_logits_processor get_logits_processor
) )
logger = get_main_logger(__name__) logger = get_logger(__name__)
def replace_model(model: AutoModelForCausalLMWithValueHead, target: Literal["default", "reward"]) -> None: def replace_model(model: AutoModelForCausalLMWithValueHead, target: Literal["default", "reward"]) -> None:

4
src/utils/seq2seq.py

@ -13,10 +13,10 @@ from nltk.translate.bleu_score import sentence_bleu, SmoothingFunction
from .peft_trainer import PeftTrainer from .peft_trainer import PeftTrainer
from .other import get_main_logger, IGNORE_INDEX from .other import get_logger, IGNORE_INDEX
logger = get_main_logger(__name__) logger = get_logger(__name__)
@dataclass @dataclass

Loading…
Cancel
Save