# Python实现Vgg16经典网络架构代码

## Python实现Vgg16经典网络架构代码

——16层明显比13层要高，但19层却和16层差异不太大，35层与19层之间的差异就更小，随着深度的

50x50的图像而言，若追求测试准确率在90%以上，则只需要小于20层左右就足够了。为了计算效率，

VGG架构对于神经网络研究和使用都有重要的意义，它不仅简单、有效，而且非常适合用来做各种实验

``````import torch
import torch.nn as nn
from torchinfo import summary
from torch.nn import functional as F

``````
``````data=torch.ones(10,3,224,224)
``````
``````class Vgg16(nn.Module):
def __init__(self):
super().__init__()
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.MaxPool2d(kernel_size=2))
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.MaxPool2d(kernel_size=2))
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.MaxPool2d(kernel_size=2))
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.MaxPool2d(kernel_size=2))
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.ReLU(inplace=True)
,nn.MaxPool2d(kernel_size=2))
self.fc=nn.Sequential(nn.Linear(7*7*512,4096)
,nn.ReLU(inplace=True)
,nn.Linear(4096,4096)
,nn.ReLU(inplace=True)
,nn.Linear(4096,1000)
,nn.ReLU(inplace=True)
)
def forward(self,x):
x=self.conv1(x)
x=self.conv2(x)
x=self.conv3(x)
x=self.conv4(x)
x=self.conv5(x)
x=torch.flatten(x,1)
x=self.fc(F.dropout( x,0.5))
out =F.softmax(x)
return out

``````
``````vgg_=Vgg16()
vgg_(data)
``````
``````<ipython-input-25-17853c1700ab>:50: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
out =F.softmax(x)

tensor([[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010],
[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010],
[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010],
...,
[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010],
[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010],
[0.0010, 0.0010, 0.0010,  ..., 0.0010, 0.0010, 0.0010]],
``````
``````summary(vgg_,data.shape,depth=3)
``````
``````<ipython-input-25-17853c1700ab>:50: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
out =F.softmax(x)

==========================================================================================
Layer (type:depth-idx)                   Output Shape              Param #
==========================================================================================
Vgg16                                    [10, 1000]                --
├─Sequential: 1-1                        [10, 64, 112, 112]        --
│    └─Conv2d: 2-1                       [10, 64, 224, 224]        1,792
│    └─ReLU: 2-2                         [10, 64, 224, 224]        --
│    └─Conv2d: 2-3                       [10, 64, 224, 224]        36,928
│    └─ReLU: 2-4                         [10, 64, 224, 224]        --
│    └─MaxPool2d: 2-5                    [10, 64, 112, 112]        --
├─Sequential: 1-2                        [10, 128, 56, 56]         --
│    └─Conv2d: 2-6                       [10, 128, 112, 112]       73,856
│    └─ReLU: 2-7                         [10, 128, 112, 112]       --
│    └─Conv2d: 2-8                       [10, 128, 112, 112]       147,584
│    └─ReLU: 2-9                         [10, 128, 112, 112]       --
│    └─MaxPool2d: 2-10                   [10, 128, 56, 56]         --
├─Sequential: 1-3                        [10, 256, 28, 28]         --
│    └─Conv2d: 2-11                      [10, 256, 56, 56]         295,168
│    └─ReLU: 2-12                        [10, 256, 56, 56]         --
│    └─Conv2d: 2-13                      [10, 256, 56, 56]         590,080
│    └─ReLU: 2-14                        [10, 256, 56, 56]         --
│    └─Conv2d: 2-15                      [10, 256, 56, 56]         590,080
│    └─ReLU: 2-16                        [10, 256, 56, 56]         --
│    └─MaxPool2d: 2-17                   [10, 256, 28, 28]         --
├─Sequential: 1-4                        [10, 512, 14, 14]         --
│    └─Conv2d: 2-18                      [10, 512, 28, 28]         1,180,160
│    └─ReLU: 2-19                        [10, 512, 28, 28]         --
│    └─Conv2d: 2-20                      [10, 512, 28, 28]         2,359,808
│    └─ReLU: 2-21                        [10, 512, 28, 28]         --
│    └─Conv2d: 2-22                      [10, 512, 28, 28]         2,359,808
│    └─ReLU: 2-23                        [10, 512, 28, 28]         --
│    └─MaxPool2d: 2-24                   [10, 512, 14, 14]         --
├─Sequential: 1-5                        [10, 512, 7, 7]           --
│    └─Conv2d: 2-25                      [10, 512, 14, 14]         2,359,808
│    └─ReLU: 2-26                        [10, 512, 14, 14]         --
│    └─Conv2d: 2-27                      [10, 512, 14, 14]         2,359,808
│    └─ReLU: 2-28                        [10, 512, 14, 14]         --
│    └─Conv2d: 2-29                      [10, 512, 14, 14]         2,359,808
│    └─ReLU: 2-30                        [10, 512, 14, 14]         --
│    └─MaxPool2d: 2-31                   [10, 512, 7, 7]           --
├─Sequential: 1-6                        [10, 1000]                --
│    └─Linear: 2-32                      [10, 4096]                102,764,544
│    └─ReLU: 2-33                        [10, 4096]                --
│    └─Linear: 2-34                      [10, 4096]                16,781,312
│    └─ReLU: 2-35                        [10, 4096]                --
│    └─Linear: 2-36                      [10, 1000]                4,097,000
│    └─ReLU: 2-37                        [10, 1000]                --
==========================================================================================
Total params: 138,357,544
Trainable params: 138,357,544
Non-trainable params: 0
==========================================================================================
Input size (MB): 6.02
Forward/backward pass size (MB): 1084.54
Params size (MB): 553.43
Estimated Total Size (MB): 1643.99
==========================================================================================
``````
``````
``````

THE END