@@ -85,8 +85,8 @@ def __init__(self,
85
85
self .out_feats = out_feats
86
86
self .edge_embedding = tlx .nn .Embedding (num_etypes , edge_feats )
87
87
88
- self .fc_node = tlx .nn .Linear (out_feats * heads , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_node' )
89
- self .fc_edge = tlx .nn .Linear (edge_feats * heads , in_features = edge_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_edge' )
88
+ self .fc_node = tlx .nn .Linear (out_feats * heads , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ))
89
+ self .fc_edge = tlx .nn .Linear (edge_feats * heads , in_features = edge_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ))
90
90
91
91
self .attn_src = self ._get_weights ('attn_l' , shape = (1 , heads , out_feats ), init = tlx .initializers .XavierNormal (gain = 1.414 ), order = True )
92
92
self .attn_dst = self ._get_weights ('attn_r' , shape = (1 , heads , out_feats ), init = tlx .initializers .XavierNormal (gain = 1.414 ), order = True )
@@ -96,7 +96,7 @@ def __init__(self,
96
96
self .attn_drop = tlx .nn .Dropout (attn_drop )
97
97
self .leaky_relu = tlx .nn .LeakyReLU (negative_slope )
98
98
99
- self .fc_res = tlx .nn .Linear (heads * out_feats , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 ), name = 'fc_res' ) if residual else None
99
+ self .fc_res = tlx .nn .Linear (heads * out_feats , in_features = in_feats , b_init = None , W_init = tlx .initializers .XavierNormal (gain = 1.414 )) if residual else None
100
100
101
101
self .activation = activation
102
102
0 commit comments