Skip to content

Commit 358acb8

Browse files
author
gongel
committed
Merge branch 'update_codegen_doc' of https://github.com/gongel/PaddleNLP into update_codegen_doc
2 parents f9d4c17 + 9915442 commit 358acb8

File tree

3 files changed

+7
-21
lines changed

3 files changed

+7
-21
lines changed

applications/text_classification/hierarchical/train.py

+3-7
Original file line numberDiff line numberDiff line change
@@ -182,9 +182,6 @@ def train():
182182
logits = model(**batch)
183183
loss = criterion(logits, labels)
184184

185-
probs = F.sigmoid(logits)
186-
metric.update(probs, labels)
187-
188185
loss.backward()
189186
optimizer.step()
190187
if args.warmup:
@@ -193,11 +190,10 @@ def train():
193190

194191
global_step += 1
195192
if global_step % args.logging_steps == 0 and rank == 0:
196-
micro_f1_score, macro_f1_score = metric.accumulate()
197193
logger.info(
198-
"global step %d, epoch: %d, batch: %d, loss: %.5f, micro f1 score: %.5f, macro f1 score: %.5f, speed: %.2f step/s"
199-
% (global_step, epoch, step, loss, micro_f1_score,
200-
macro_f1_score, 10 / (time.time() - tic_train)))
194+
"global step %d, epoch: %d, batch: %d, loss: %.5f, speed: %.2f step/s"
195+
% (global_step, epoch, step, loss, 10 /
196+
(time.time() - tic_train)))
201197
tic_train = time.time()
202198

203199
early_stop_count += 1

applications/text_classification/multi_class/train.py

+2-7
Original file line numberDiff line numberDiff line change
@@ -180,10 +180,6 @@ def train():
180180
logits = model(**batch)
181181
loss = criterion(logits, labels)
182182

183-
probs = F.softmax(logits, axis=1)
184-
correct = metric.compute(probs, labels)
185-
metric.update(correct)
186-
187183
loss.backward()
188184
optimizer.step()
189185
if args.warmup:
@@ -192,10 +188,9 @@ def train():
192188

193189
global_step += 1
194190
if global_step % args.logging_steps == 0 and rank == 0:
195-
acc = metric.accumulate()
196191
logger.info(
197-
"global step %d, epoch: %d, batch: %d, loss: %.5f, acc: %.5f, speed: %.2f step/s"
198-
% (global_step, epoch, step, loss, acc, args.logging_steps /
192+
"global step %d, epoch: %d, batch: %d, loss: %.5f, speed: %.2f step/s"
193+
% (global_step, epoch, step, loss, args.logging_steps /
199194
(time.time() - tic_train)))
200195
tic_train = time.time()
201196

applications/text_classification/multi_label/train.py

+2-7
Original file line numberDiff line numberDiff line change
@@ -181,9 +181,6 @@ def train():
181181
logits = model(**batch)
182182
loss = criterion(logits, labels)
183183

184-
probs = F.sigmoid(logits)
185-
metric.update(probs, labels)
186-
187184
loss.backward()
188185
optimizer.step()
189186
if args.warmup:
@@ -192,11 +189,9 @@ def train():
192189

193190
global_step += 1
194191
if global_step % args.logging_steps == 0 and rank == 0:
195-
micro_f1_score, macro_f1_score = metric.accumulate()
196192
logger.info(
197-
"global step %d, epoch: %d, batch: %d, loss: %.5f, micro f1 score: %.5f, macro f1 score: %.5f, speed: %.2f step/s"
198-
% (global_step, epoch, step, loss, micro_f1_score,
199-
macro_f1_score, args.logging_steps /
193+
"global step %d, epoch: %d, batch: %d, loss: %.5f, speed: %.2f step/s"
194+
% (global_step, epoch, step, loss, args.logging_steps /
200195
(time.time() - tic_train)))
201196
tic_train = time.time()
202197

0 commit comments

Comments
 (0)