Skip to content

Commit

Permalink
add the pure fp16 (#5591)
Browse files Browse the repository at this point in the history
  • Loading branch information
wawltor authored Apr 10, 2023
1 parent 6d1c163 commit c2f7e31
Showing 1 changed file with 0 additions and 2 deletions.
2 changes: 0 additions & 2 deletions paddlenlp/transformers/bloom/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ def __init__(
attention_dropout=0.0,
attention_softmax_in_fp32=True,
pretraining_tp=1, # TP rank used when training with megatron
dtype="float16",
slow_but_exact=False,
use_recompute=False,
use_pure_fp16=False,
Expand All @@ -133,7 +132,6 @@ def __init__(

self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
self.dtype = dtype
self.slow_but_exact = slow_but_exact
self.use_recompute = use_recompute
self.use_pure_fp16 = use_pure_fp16

0 comments on commit c2f7e31

Please sign in to comment.