Skip to content

Commit

Permalink
fix dygraph has_grad (#34649)
Browse files Browse the repository at this point in the history
  • Loading branch information
sneaxiy authored Aug 5, 2021
1 parent 4a52c0c commit 68377b4
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 1 deletion.
2 changes: 2 additions & 0 deletions paddle/fluid/imperative/tracer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ DECLARE_string(tracer_mkldnn_ops_off);
namespace paddle {
namespace imperative {

thread_local bool Tracer::has_grad_ = true;

static std::shared_ptr<Tracer> g_current_tracer(nullptr);

const std::shared_ptr<Tracer>& GetCurrentTracer() { return g_current_tracer; }
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/imperative/tracer.h
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@ class Tracer {
bool enable_program_desc_tracing_{false};
std::unique_ptr<UniqueNameGenerator> generator_;
platform::Place expected_place_;
bool has_grad_{true};
bool enable_autocast_{false};
GarbageCollectorMap gcs_;
static thread_local bool has_grad_;
};

// To access static variable current_tracer
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest
import paddle
import time
import paddle.nn as nn
import numpy as np
import threading


class SimpleNet(nn.Layer):
def __init__(self, in_dim, out_dim):
super(SimpleNet, self).__init__()
self.fc = nn.Linear(in_dim, out_dim)

def forward(self, x):
return self.fc(x)


class TestCases(unittest.TestCase):
@paddle.no_grad()
def thread_1_main(self):
time.sleep(8)

def thread_2_main(self):
in_dim = 10
out_dim = 3
net = SimpleNet(in_dim, out_dim)
for _ in range(1000):
x = paddle.to_tensor(np.random.rand(32, in_dim).astype('float32'))
self.assertTrue(x.stop_gradient)
x = net(x)
self.assertFalse(x.stop_gradient)

def test_main(self):
threads = []
for _ in range(10):
threads.append(threading.Thread(target=self.thread_1_main))
threads.append(threading.Thread(target=self.thread_2_main))
for t in threads:
t.start()
for t in threads:
t.join()


if __name__ == "__main__":
unittest.main()

0 comments on commit 68377b4

Please sign in to comment.