import tensorflow as tf
from tensorflow.python.framework import ops # for gradient
from tensorflow.python.ops import gen_nn_ops # compute gradient
위 예제를 텐서플로우 함수를 이용해 구해 보자.
전체 코드는 여기에 참조되어 있다.
tf.reset_default_graph()
feature_out = tf.constant([[1,-1,5],[2,-5,-7],[-3,2,4]], dtype=tf.float32, name ='features')
feature_out
<tf.Tensor 'features:0' shape=(3, 3) dtype=float32>
grad = tf.constant([[-2,3,-1],[6,-3,1],[2,-1,3]], dtype=tf.float32, name ='gradients')
grad
<tf.Tensor 'gradients:0' shape=(3, 3) dtype=float32>
# feature output > 0
BackpropRelu = gen_nn_ops.relu_grad(grad, feature_out)
# grad > 0
DeconvRelu = tf.where(0. < grad, grad, tf.zeros(grad.get_shape()))
# (feature output > 0 & grad > 0)
GuidedReluGrad = tf.where(0. < grad, gen_nn_ops.relu_grad(grad, feature_out), tf.zeros(grad.get_shape()))
sess = tf.InteractiveSession()
print('BackpropRelu: \n',BackpropRelu.eval())
print('DeconvRelu: \n',DeconvRelu.eval())
print('GuidedReluGrad: \n',GuidedReluGrad.eval())
BackpropRelu:
[[-2. 0. -1.]
[ 6. -0. 0.]
[ 0. -1. 3.]]
DeconvRelu:
[[0. 3. 0.]
[6. 0. 1.]
[2. 0. 3.]]
GuidedReluGrad:
[[0. 0. 0.]
[6. 0. 0.]
[0. 0. 3.]]
텐서플로우에서 Gradient 변환
gradient > 0인 값들에 grad 산출
BackpropRelu
@ops.RegisterGradient("BackpropRelu")
def _BackpropRelu(unused_op, grad):
return gen_nn_ops.relu_grad(grad, unused_op.outputs[0])
DeconvRelu
@ops.RegisterGradient("DeconvRelu")
def _DeconvRelu(unused_op, grad):
return tf.where(0. < grad, grad, tf.zeros(tf.shape(grad)))
GuidedRelu
@ops.RegisterGradient("GuidedRelu")
def _GuidedReluGrad(unused_op, grad):
return tf.where(0. < grad, gen_nn_ops.relu_grad(grad, unused_op.outputs[0]),
tf.zeros(tf.shape(grad)))
Relu
함수의 gradient를 <method>
라고 정의된 gradient 함수로 정의
g = tf.get_default_graph()
with g.gradient_override_map({"Relu": "<method>"}):
...graph...