# 动静态图结合

## MindSpore静态图

### Graph模式执行原理

Graph模式的代码用例如下所示：

[1]:

import numpy as np
import mindspore.nn as nn
import mindspore.ops as ops
import mindspore as ms

ms.set_context(mode=ms.GRAPH_MODE, device_target="CPU")

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.mul = ops.Mul()

def construct(self, x, y):
return self.mul(x, y)

x = ms.Tensor(np.array([1.0, 2.0, 3.0]).astype(np.float32))
y = ms.Tensor(np.array([4.0, 5.0, 6.0]).astype(np.float32))

net = Net()
print(net(x, y))

[ 4. 10. 18.]


## MindSpore动态图

### PyNative模式执行原理

[2]:

import numpy as np
import mindspore.nn as nn
import mindspore as ms
import mindspore.ops as ops

ms.set_context(mode=ms.PYNATIVE_MODE, device_target="CPU")
x = ms.Tensor(np.ones([1, 3, 3, 4]).astype(np.float32))
y = ms.Tensor(np.ones([1, 3, 3, 4]).astype(np.float32))
print(output.asnumpy())

[[[[2. 2. 2. 2.]
[2. 2. 2. 2.]
[2. 2. 2. 2.]]

[[2. 2. 2. 2.]
[2. 2. 2. 2.]
[2. 2. 2. 2.]]

[[2. 2. 2. 2.]
[2. 2. 2. 2.]
[2. 2. 2. 2.]]]]


### PyNative模式自动微分原理

[3]:

import numpy as np
import mindspore.nn as nn
import mindspore.ops as ops
import mindspore as ms

ms.set_context(mode=ms.PYNATIVE_MODE, device_target="CPU")

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.matmul = ops.MatMul()
self.z = ms.Parameter(ms.Tensor(np.array([2.0], np.float32)), name='z')

def construct(self, x, y):
x = x * self.z
out = self.matmul(x, y)
return out

def __init__(self, net):
self.net = net

def construct(self, x, y):

x = ms.Tensor([[0.8, 0.6, 0.2], [1.8, 1.3, 1.1]], dtype=ms.float32)
y = ms.Tensor([[0.11, 3.3, 1.1], [1.1, 0.2, 1.4], [1.1, 2.2, 0.3]], dtype=ms.float32)
print(output)

[[9.02      5.4       7.2000003]
[9.02      5.4       7.2000003]]


[4]:

from mindspore.ops._grad.grad_base import bprop_getters

@bprop_getters.register(ops.Mul)
def get_bprop_mul(self):
"""Grad definition for Mul operation."""
mul_func = P.Mul()

def bprop(x, y, out, dout):
bc_dx = mul_func(y, dout)
bc_dy = mul_func(x, dout)

return bprop


## 动静统一

### 动态图和静态图互相转换

[5]:

ms.set_context(mode=ms.PYNATIVE_MODE)


### 动静结合

MindSpore支持在动态图下使用静态编译的方式来进行混合执行，通过使用jit修饰需要用静态图来执行的函数对象，即可实现动态图和静态图的混合执行，更多jit的使用可参考jit文档

[6]:

import numpy as np
import mindspore as ms
import mindspore.nn as nn

def __init__(self):
self.param = ms.Parameter(ms.Tensor(0.5, ms.float32))

@ms.jit
def construct(self, x):
x = x + x
x = x * self.param
x = x * x
return x

class CellCallSingleCell(nn.Cell):
def __init__(self):
super(CellCallSingleCell, self).__init__()
self.bn = nn.BatchNorm2d(2, momentum=0.99, eps=0.00001, gamma_init="ones")
self.relu = nn.ReLU()

def construct(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x

ms.set_context(mode=ms.PYNATIVE_MODE, device_target="CPU")
inputs = ms.Tensor(np.ones([1, 1, 2, 2]).astype(np.float32))
net = CellCallSingleCell()
out = net(inputs)
print(out)

[[[[15.99984]]

[[15.99984]]]]


### JIT Fallback

JIT Fallback是从静态图的角度出发考虑静态图和动态图的统一。通过JIT Fallback特性，静态图可以支持尽量多的动态图语法，使得静态图提供接近动态图的语法使用体验，从而实现动静统一。为了便于用户选择是否使用JIT Fallback特性的能力，提供了开关MS_DEV_ENABLE_FALLBACK，当前默认已经打开。如果需要关闭，可以使用命令：export MS_DEV_ENABLE_FALLBACK=0

#### 创建和使用Tensor

JIT Fallback支持在静态图模式下创建和使用Tensor

[7]:

import mindspore.nn as nn
import mindspore as ms

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self):
return ms.Tensor(1, dtype=ms.int32)

ms.set_context(mode=ms.GRAPH_MODE)

net = Net()
print(net())

[1]


#### 调用第三方库

JIT Fallback支持在静态图模式下调用第三方库的对象和方法。

[8]:

import numpy as np
import mindspore as ms
import mindspore.nn as nn

# pylint: disable= W0235
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self):
a = np.array([1, 2, 3])
b = np.array([4, 5, 6])
c = a + b
return ms.Tensor(c)

ms.set_context(mode=ms.GRAPH_MODE)
net = Net()
print(net())

[5 7 9]


#### 使用Python原生的print打印

JIT Fallback支持在静态图模式下使用Python原生的print来打印常量，它与Print算子打印信息的时机有所不同。Python原生print是在编译过程中触发打印（编译时阶段打印），而Print算子是需要图编译完成后，下发到设备端运行才打印（运行时阶段打印）。

[9]:

import numpy as np
import mindspore as ms
import mindspore.nn as nn

# pylint: disable= W0235
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self):
x = ms.Tensor(np.array([1, 2, 3, 4, 5]))
y = ms.Tensor(np.array([1, 2, 3, 4, 5]))
tensor_sum = x + y
print("tensor_sum: ", tensor_sum)
x = np.array([1, 2, 3, 4, 5])
y = np.array([1, 2, 3, 4, 5])
np_sum = x + y
print("np_sum: ", np_sum)
return tensor_sum, ms.Tensor(np_sum)

ms.set_context(mode=ms.GRAPH_MODE)
net = Net()
net()

np_sum: [2 4 6 8 10]
tensor_sum: (2, 4, 6, 8, 10)


import numpy as np
import mindspore as ms
import mindspore.nn as nn

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self, input_x, input_y):
tensor_sum = input_x + input_y
x = np.array([1, 2, 3, 4, 5])
y = np.array([1, 2, 3, 4, 5])
np_sum = x + y
print("np_sum: ", np_sum, "tensor_sum: ", tensor_sum)
return tensor_sum, ms.Tensor(np_sum)

ms.set_context(mode=ms.GRAPH_MODE)
x = ms.Tensor(np.array([1, 2, 3, 4, 5]))
y = ms.Tensor(np.array([1, 2, 3, 4, 5]))
net = Net()
net(x,y)


ValueError: When using JIT Fallback to handle script 'print("np_sum: ", np_sum, "tensor_sum: ", tensor_sum)', the inputs should be constant, but found variable 'tensor_sum' to be nonconstant.


#### 使用raise和assert

JIT Fallback支持在静态图模式下使用raise和assert。

import mindspore.nn as nn
import mindspore as ms

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self, x):
if x <= 0:
raise ValueError("x should be greater than 0.")
else:
x += 1
return x

ms.set_context(mode=ms.GRAPH_MODE)
net = Net()
net(-1)


ValueError: x should be greater than 0.


import mindspore.nn as nn
import mindspore as ms

class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()

def construct(self):
x = 1
assert 1 in [2, 3, 4]
return x

ms.set_context(mode=ms.GRAPH_MODE)
net = Net()
net()


AssertionError.


#### 调用Python内置函数

MindSpore在静态图模式下已经支持了一些Python内置函数，包括但不限于len、isinstance、map、zip等，详情请参考静态图语法支持。通过JIT Fallback，可以在常量场景中支持更多的Python内置函数的用法。下面简单举例支持的部分Python内置函数。

##### dict()

[10]:

import mindspore as ms

@ms.jit
def func():
a = dict()                                          # 创建空字典
b = dict(a='a', b='b', t='t')                       # 传入关键字
c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))   # 映射函数方式来构造字典
d = dict([('one', 1), ('two', 2), ('three', 3)])    # 可迭代对象方式来构造字典
return a, b, c, d

a, b, c, d = func()
print("a: ", a)
print("b: ", b)
print("c: ", c)
print("d: ", d)

a: {}
b: {'a': 'a', 'b': 'b', 't': 't'}
c: {'one': 1, 'two': 2, 'three': 3}
d: {'one': 1, 'two': 2, 'three': 3}

##### type()

[11]:

import numpy as np
import mindspore as ms

@ms.jit
def func():
a = type(1)
b = type(1.0)
c = type([1, 2, 3])
d = type((1, 2, 3))
e = type({'a': 1, 'b': 2})
f = type(np.array([1, 2, 3]))
g = type(ms.Tensor([1, 2, 3]))
return a, b, c, d, e, f, g

a, b, c, d, e, f, g = func()
print("a: ", a)
print("b: ", b)
print("c: ", c)
print("d: ", d)
print("e: ", e)
print("f: ", f)
print("g: ", g)

a: <class 'int'>
b: <class 'float'>
c: <class 'list'>
d: <class 'tuple'>
e: <class 'dict'>
f: <class 'numpy.ndarray'>
g: <class 'mindspore.common.tensor.Tensor'>


type作为Python的原生函数还有另外一种使用方法，即type(name, bases, dict)返回name类型的类对象，由于该用法应用场景较少，因此暂不支持。

#### 支持常量场景下控制流

[ ]:

import numpy as np
import mindspore as ms

@ms.jit
def func():
x = np.array(1)
if x <= 1:
x += 1
return ms.Tensor(x)

res = func()
print("res: ", res)

res: 2


#### 支持运行时(Runtime)阶段的JIT Fallback

JIT Fallback处理不支持的语法表达式时，将会生成相应的节点，常量会在编译时阶段推导出值，否则这些节点将传递到后端运行时，在后端通过Python的能力执行得到结果。示例代码如下，np.add(x, y)会生成相应节点，作为函数的返回值将会传递到运行时，当前已支持部分场景下的运行时阶段的JIT Fallback。

[ ]:

import numpy as np
import mindspore as ms

@ms.jit
x = np.array([1, 2, 3, 4, 5])
y = np.array([1, 2, 3, 4, 5])


[ 2  4  6  8  10]


#### 顶层图支持返回list、dict、scalar、none等基础类型

##### 顶层图支持返回list
[ ]:

import mindspore as ms

@ms.jit
def test_return_list():
return [1, "a", True, None, ms.Tensor([2])]

res = test_return_list()
print(res)

[1, "a", True, None, Tensor([2])]

##### 顶层图支持返回dict
[ ]:

import mindspore as ms

@ms.jit
def test_return_dict():
x = {'a': 1, 'b': 2}
y = x.get('a')
y_tensor = ms.Tensor([y])
z = dict(a=y_tensor)
return z

res = test_return_dict()
print(res)

{'a': ms.Tensor(np.array(1), ms.int64)}

##### 顶层图支持返回scalar
[ ]:

import mindspore as ms

@ms.jit
def test_return_scalar(x, y):
return x + y

res = test_return_scalar(ms.mutable(1), ms.mutable(2))
print(res)

3

##### 顶层图支持返回None
[ ]:

import mindspore as ms

@ms.jit
def test_return_none():
return 1, "a", None

res = test_return_none()
print(res)

(1, 'a', None)


#### 使用须知

1.当前JIT Fallback支持常量场景，即要求编译期间能够确定实际值。有限支持部分变量场景。

2.JIT Fallback对标动态图的支持能力，须在动态图语法范围内，包括但不限于数据类型等。

3.当前常量控制流场景中暂不支持对Numpy Array数据的取下标赋值，错误的代码用例如下：

import numpy as np
import mindspore as ms

@ms.jit
def func():
x = np.array([1, 2, 3])
x[0] += 1
return ms.Tensor(x)

res = func()
print("res: ", res)


RuntimeError: The 'setitem' operation does not support the type [External, Int64, Int64].


4.值得注意的是，在常量场景中，NumPy整型数据、浮点型数据的运算结果将转换为常量进行保存，因此其运算结果可以作为函数返回值。例如：

[13]:

import numpy as np
import mindspore as ms

@ms.jit
x = 1.0
y = 2.0

print("res:", res)

res: 3.0


5.通过JIT Fallback支持的NumPy第三方库，与MindSpore提供的mindspore.numpy不同。

mindspore.numpy是通过MindSpore框架的算子能力实现的，涉及运行时阶段的算子计算，无法在编译期阶段推导其结果(变量的推导结果为None)。示例代码如下，对mnp.average(x)的结果使用Tensor()方法，不符合常量场景的条件，将会引发报错。

import mindspore as ms
import mindspore.numpy as mnp

@ms.jit
def test_mnp_average():
x = mnp.array(([[1., 2.], [3., 4.]]))
x_average = mnp.average(x)
return ms.Tensor(x_average)

out = test_mnp_average()
print(out)


TypeError: For 'Tensor', the type of input_data should be one of '['Tensor', 'ndarray', 'str_', 'list', 'tuple', 'float', 'int', 'bool', 'complex']', but got 'None' with type 'NoneType'.