# Comparing the function difference with torch.nn.Module.children
## torch.nn.Module.children
```python
torch.nn.Module.children()
```
For more information, see [torch.nn.Module.children](https://pytorch.org/docs/1.5.0/nn.html#torch.nn.Module.children).
## mindspore.nn.Cell.cells
```python
mindspore.nn.Cell.cells()
```
For more information, see [mindspore.nn.Cell.cells](https://mindspore.cn/docs/en/r2.0.0-alpha/api_python/nn/mindspore.nn.Cell.html#mindspore.nn.Cell.cells).
## Differences
PyTorch: The function returns a Generator over immediate children modules.
MindSpore: The function returns odict_values over immediate cells.
## Code Example
```python
# The following implements mindspore.nn.Cell.cells() with MindSpore.
import mindspore as ms
import numpy as np
from mindspore import nn
class ConvBN(nn.Cell):
def __init__(self):
super(ConvBN, self).__init__()
self.conv = nn.Conv2d(3, 64, 3)
self.bn = nn.BatchNorm2d(64)
def construct(self, x):
x = self.conv(x)
x = self.bn(x)
return x
class MyNet(nn.Cell):
def __init__(self):
super(MyNet, self).__init__()
self.build_block = nn.SequentialCell(ConvBN(), nn.ReLU())
def construct(self, x):
return self.build_block(x)
net = MyNet()
print(net.cells())
```
```text
# Out:
odict_values([SequentialCell<
(0): ConvBN<
(conv): Conv2d
(bn): BatchNorm2d
>
(1): ReLU<>
>])
```
```python
# The following implements torch.nn.Module.children() with torch.
import torch.nn as nn
class ConvBN(nn.Module):
def __init__(self):
super(ConvBN, self).__init__()
self.conv = nn.Conv2d(3, 64, 3)
self.bn = nn.BatchNorm2d(64)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
return x
class MyNet(nn.Module):
def __init__(self):
super(MyNet, self).__init__()
self.build_block = nn.Sequential(ConvBN(), nn.ReLU())
def construct(self, x):
return self.build_block(x)
net = MyNet()
print(net.children())
for child in net.children():
print(child)
```
```text
# Out:
Sequential(
(0): ConvBN(
(conv): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1))
(bn): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
)
(1): ReLU()
)
```