#!/usr/bin/env python2# -*- coding: utf-8 -*-"""Created on Fri Aug 10 16:13:29 2018@author: myhaspl"""from mxnet import ndfrom mxnet import autogradx = nd.array([[1, 2], [3, 4]])x.attach_grad()#在ndarray里准备存储梯度with autograd.record():#定义f(x) ???y=2*x*x#反向传播backward()y.backward()#f‘(x)=4*xz=x.gradprint xprint z
[[1. 2.]
?[3. 4.]]
<NDArray 2x2 @cpu(0)>
[[ 4. ?8.]
?[12. 16.]]
<NDArray 2x2 @cpu(0)>
#######################!/usr/bin/env python2# -*- coding: utf-8 -*-"""Created on Fri Aug 10 16:13:29 2018@author: myhaspl"""from mxnet import ndfrom mxnet import autograddef f(x): ????b=x ???while b.norm().asscalar() < 100: #计算欧氏距离(norm) ???????b=b*2#y=ax ?,a=2*2*.....*2 ???????print b ???if b.sum().asscalar() >= 0: ????????y = b[0] ???else: ???????y = b[1] ???return yx = nd.array([1,4])x.attach_grad()#在ndarray里准备存储梯度with autograd.record():#定义f(x) ???y=f(x)#反向传播backward()y.backward()#f‘(x)=a,y=axz=x.gradprint "======="print [z,x,y,y/x]#a=y/x
[2. 8.]
<NDArray 2 @cpu(0)>
[ 4. 16.]
<NDArray 2 @cpu(0)>
[ 8. 32.]
<NDArray 2 @cpu(0)>
[16. 64.]
<NDArray 2 @cpu(0)>
[ 32. 128.]
<NDArray 2 @cpu(0)>
=======
[
[32. ?0.]
<NDArray 2 @cpu(0)>,?
[1. 4.]
<NDArray 2 @cpu(0)>,?
[32.]
<NDArray 1 @cpu(0)>,?
[32. ?8.]
<NDArray 2 @cpu(0)>]
mxnet-梯度与反向传播
原文地址:http://blog.51cto.com/13959448/2316718