In [1]:
import torch
In [2]:
torch.cuda.is_available()
Out[2]:
False
In [3]:
from torchvision import models
In [4]:
dir(models)
Out[4]:
['AlexNet',
 'AlexNet_Weights',
 'ConvNeXt',
 'ConvNeXt_Base_Weights',
 'ConvNeXt_Large_Weights',
 'ConvNeXt_Small_Weights',
 'ConvNeXt_Tiny_Weights',
 'DenseNet',
 'DenseNet121_Weights',
 'DenseNet161_Weights',
 'DenseNet169_Weights',
 'DenseNet201_Weights',
 'EfficientNet',
 'EfficientNet_B0_Weights',
 'EfficientNet_B1_Weights',
 'EfficientNet_B2_Weights',
 'EfficientNet_B3_Weights',
 'EfficientNet_B4_Weights',
 'EfficientNet_B5_Weights',
 'EfficientNet_B6_Weights',
 'EfficientNet_B7_Weights',
 'EfficientNet_V2_L_Weights',
 'EfficientNet_V2_M_Weights',
 'EfficientNet_V2_S_Weights',
 'GoogLeNet',
 'GoogLeNetOutputs',
 'GoogLeNet_Weights',
 'Inception3',
 'InceptionOutputs',
 'Inception_V3_Weights',
 'MNASNet',
 'MNASNet0_5_Weights',
 'MNASNet0_75_Weights',
 'MNASNet1_0_Weights',
 'MNASNet1_3_Weights',
 'MaxVit',
 'MaxVit_T_Weights',
 'MobileNetV2',
 'MobileNetV3',
 'MobileNet_V2_Weights',
 'MobileNet_V3_Large_Weights',
 'MobileNet_V3_Small_Weights',
 'RegNet',
 'RegNet_X_16GF_Weights',
 'RegNet_X_1_6GF_Weights',
 'RegNet_X_32GF_Weights',
 'RegNet_X_3_2GF_Weights',
 'RegNet_X_400MF_Weights',
 'RegNet_X_800MF_Weights',
 'RegNet_X_8GF_Weights',
 'RegNet_Y_128GF_Weights',
 'RegNet_Y_16GF_Weights',
 'RegNet_Y_1_6GF_Weights',
 'RegNet_Y_32GF_Weights',
 'RegNet_Y_3_2GF_Weights',
 'RegNet_Y_400MF_Weights',
 'RegNet_Y_800MF_Weights',
 'RegNet_Y_8GF_Weights',
 'ResNeXt101_32X8D_Weights',
 'ResNeXt101_64X4D_Weights',
 'ResNeXt50_32X4D_Weights',
 'ResNet',
 'ResNet101_Weights',
 'ResNet152_Weights',
 'ResNet18_Weights',
 'ResNet34_Weights',
 'ResNet50_Weights',
 'ShuffleNetV2',
 'ShuffleNet_V2_X0_5_Weights',
 'ShuffleNet_V2_X1_0_Weights',
 'ShuffleNet_V2_X1_5_Weights',
 'ShuffleNet_V2_X2_0_Weights',
 'SqueezeNet',
 'SqueezeNet1_0_Weights',
 'SqueezeNet1_1_Weights',
 'SwinTransformer',
 'Swin_B_Weights',
 'Swin_S_Weights',
 'Swin_T_Weights',
 'Swin_V2_B_Weights',
 'Swin_V2_S_Weights',
 'Swin_V2_T_Weights',
 'VGG',
 'VGG11_BN_Weights',
 'VGG11_Weights',
 'VGG13_BN_Weights',
 'VGG13_Weights',
 'VGG16_BN_Weights',
 'VGG16_Weights',
 'VGG19_BN_Weights',
 'VGG19_Weights',
 'ViT_B_16_Weights',
 'ViT_B_32_Weights',
 'ViT_H_14_Weights',
 'ViT_L_16_Weights',
 'ViT_L_32_Weights',
 'VisionTransformer',
 'Wide_ResNet101_2_Weights',
 'Wide_ResNet50_2_Weights',
 '_GoogLeNetOutputs',
 '_InceptionOutputs',
 '__builtins__',
 '__cached__',
 '__doc__',
 '__file__',
 '__loader__',
 '__name__',
 '__package__',
 '__path__',
 '__spec__',
 '_api',
 '_meta',
 '_utils',
 'alexnet',
 'convnext',
 'convnext_base',
 'convnext_large',
 'convnext_small',
 'convnext_tiny',
 'densenet',
 'densenet121',
 'densenet161',
 'densenet169',
 'densenet201',
 'detection',
 'efficientnet',
 'efficientnet_b0',
 'efficientnet_b1',
 'efficientnet_b2',
 'efficientnet_b3',
 'efficientnet_b4',
 'efficientnet_b5',
 'efficientnet_b6',
 'efficientnet_b7',
 'efficientnet_v2_l',
 'efficientnet_v2_m',
 'efficientnet_v2_s',
 'get_model',
 'get_model_builder',
 'get_model_weights',
 'get_weight',
 'googlenet',
 'inception',
 'inception_v3',
 'list_models',
 'maxvit',
 'maxvit_t',
 'mnasnet',
 'mnasnet0_5',
 'mnasnet0_75',
 'mnasnet1_0',
 'mnasnet1_3',
 'mobilenet',
 'mobilenet_v2',
 'mobilenet_v3_large',
 'mobilenet_v3_small',
 'mobilenetv2',
 'mobilenetv3',
 'optical_flow',
 'quantization',
 'regnet',
 'regnet_x_16gf',
 'regnet_x_1_6gf',
 'regnet_x_32gf',
 'regnet_x_3_2gf',
 'regnet_x_400mf',
 'regnet_x_800mf',
 'regnet_x_8gf',
 'regnet_y_128gf',
 'regnet_y_16gf',
 'regnet_y_1_6gf',
 'regnet_y_32gf',
 'regnet_y_3_2gf',
 'regnet_y_400mf',
 'regnet_y_800mf',
 'regnet_y_8gf',
 'resnet',
 'resnet101',
 'resnet152',
 'resnet18',
 'resnet34',
 'resnet50',
 'resnext101_32x8d',
 'resnext101_64x4d',
 'resnext50_32x4d',
 'segmentation',
 'shufflenet_v2_x0_5',
 'shufflenet_v2_x1_0',
 'shufflenet_v2_x1_5',
 'shufflenet_v2_x2_0',
 'shufflenetv2',
 'squeezenet',
 'squeezenet1_0',
 'squeezenet1_1',
 'swin_b',
 'swin_s',
 'swin_t',
 'swin_transformer',
 'swin_v2_b',
 'swin_v2_s',
 'swin_v2_t',
 'vgg',
 'vgg11',
 'vgg11_bn',
 'vgg13',
 'vgg13_bn',
 'vgg16',
 'vgg16_bn',
 'vgg19',
 'vgg19_bn',
 'video',
 'vision_transformer',
 'vit_b_16',
 'vit_b_32',
 'vit_h_14',
 'vit_l_16',
 'vit_l_32',
 'wide_resnet101_2',
 'wide_resnet50_2']
In [5]:
# In[3]:
alexnet = models.AlexNet()
In [6]:
resnet = models.resnet101(pretrained=True)
/opt/anaconda/lib/python3.9/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.
  warnings.warn(
/opt/anaconda/lib/python3.9/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=ResNet101_Weights.IMAGENET1K_V1`. You can also use `weights=ResNet101_Weights.DEFAULT` to get the most up-to-date weights.
  warnings.warn(msg)
In [7]:
resnet
Out[7]:
ResNet(
  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (relu): ReLU(inplace=True)
  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (layer1): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer2): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer3): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
  (fc): Linear(in_features=2048, out_features=1000, bias=True)
)
In [8]:
# In[6]:
from torchvision import transforms
preprocess = transforms.Compose([
       transforms.Resize(256),
       transforms.CenterCrop(224),
       transforms.ToTensor(),
       transforms.Normalize(
           mean=[0.485, 0.456, 0.406],
           std=[0.229, 0.224, 0.225]
       )])
In [9]:
from PIL import Image
In [10]:
img = Image.open("/home/scarl/Desktop/dixieanddiego.jpeg")
In [11]:
img
Out[11]:
In [12]:
# In[9]:
img_t = preprocess(img)
In [13]:
# In[10]:
import torch
batch_t = torch.unsqueeze(img_t, 0)
In [14]:
resnet.eval()
Out[14]:
ResNet(
  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (relu): ReLU(inplace=True)
  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (layer1): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer2): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer3): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
  (fc): Linear(in_features=2048, out_features=1000, bias=True)
)
In [15]:
out = resnet(batch_t)
In [16]:
out
Out[16]:
tensor([[-1.0904e+00, -1.1668e+00, -2.0417e+00, -4.8543e-01, -3.2340e-01,
         -1.0449e+00, -3.5000e-01, -1.6001e+00, -1.5317e+00, -2.7600e+00,
         -2.4733e+00, -1.3989e+00, -2.6538e+00, -3.2938e+00, -3.3542e+00,
         -2.8655e+00, -2.8140e+00, -1.3841e+00, -6.1175e-01, -2.8244e+00,
         -3.0434e+00, -2.7523e+00, -3.2458e+00, -4.7236e-01, -2.9510e+00,
          2.4230e+00,  3.7682e-02, -9.3466e-01,  1.6807e+00,  7.4369e-01,
         -1.4638e+00, -1.6478e+00, -1.0292e+00, -1.9701e+00, -5.8352e-01,
         -1.1160e+00, -3.7834e-01, -1.1535e+00,  2.9326e+00, -1.2817e+00,
         -1.2661e+00, -7.1476e-01, -8.7657e-01, -8.4959e-01, -4.7823e-01,
          2.1196e+00, -1.3153e-01,  1.0956e+00, -1.8428e+00, -1.4130e+00,
         -1.0377e+00,  1.0754e+00, -2.3712e+00, -2.7846e+00, -1.8020e+00,
         -2.9382e+00, -1.2152e+00, -1.9721e+00, -4.0388e+00, -1.0528e+00,
         -2.6749e-01, -1.3493e-01,  3.0475e-01, -1.1529e+00, -6.1889e-01,
         -1.8220e+00, -2.7431e-01, -1.0268e+00, -1.3868e+00, -5.5275e-02,
         -3.5556e+00,  2.3322e+00, -1.0626e+00,  8.0075e-01, -1.4265e+00,
          1.6144e+00,  1.6480e+00,  1.2762e+00,  1.6759e+00,  3.4125e-01,
         -2.6396e-01, -4.5866e+00, -1.4154e+00, -3.4036e-02, -1.8247e+00,
         -1.7416e+00, -1.8505e+00,  9.8279e-01, -1.2726e+00, -9.9827e-01,
         -2.2664e+00, -2.4914e+00, -1.8885e+00, -2.0182e+00, -9.3841e-01,
         -3.9443e+00, -2.7029e-01,  2.5416e-01, -2.6439e+00,  1.0195e+00,
         -6.1424e-01, -1.8003e-01, -4.2296e-01,  1.3388e+00,  1.3112e+00,
         -2.2832e+00, -7.6236e-01, -2.0559e+00, -1.5250e+00, -3.1263e+00,
         -3.2041e+00, -1.3483e+00, -2.6488e+00, -3.2594e+00,  7.7316e-01,
         -9.4879e-01, -2.1819e+00, -4.1974e+00,  8.5919e-01, -4.9926e-01,
         -1.0109e+00, -5.8167e-01, -7.3095e-01, -7.2455e-01, -3.5538e-01,
         -2.2992e+00, -5.6232e-01, -2.6043e+00, -1.2001e+00, -1.9929e+00,
         -2.7982e+00, -3.9422e+00, -3.6556e+00, -3.7879e+00, -1.6379e+00,
         -2.8505e+00, -3.2467e-01, -2.4452e+00, -3.0563e+00, -2.2048e+00,
         -2.7973e+00, -4.7114e+00, -3.8669e+00,  4.7274e-01, -2.6823e+00,
          1.8305e+00, -1.9225e-01, -3.0969e+00, -1.0366e+00, -3.3153e+00,
          5.0041e-01,  7.8777e+00, -1.1214e-01,  1.2403e+00,  1.1181e+00,
          1.6836e+00,  2.9382e-02,  1.4893e+00,  6.9407e+00,  6.6031e+00,
          3.8373e+00,  2.7453e+00,  4.1606e+00,  5.3203e+00,  3.9964e+00,
          5.5048e+00,  4.4726e+00,  4.0824e+00,  7.2195e+00,  5.0909e+00,
          6.5061e+00,  8.6167e+00,  8.5025e+00,  5.6220e+00,  5.0515e+00,
          6.2261e-01,  5.6898e+00,  6.2580e+00,  8.1428e+00,  6.3730e+00,
          6.2374e+00,  3.4961e+00,  4.0663e+00,  1.6179e+00,  4.0095e+00,
          2.8209e+00,  2.5095e+00,  1.3428e+00,  1.3603e+00,  2.9749e+00,
          7.0451e-01,  2.2906e+00,  3.5392e+00,  2.5792e+00,  3.9653e-01,
          5.3123e+00,  2.2098e+00,  3.8281e+00,  1.2428e+00,  4.7331e+00,
         -9.8908e-01,  1.1331e+00,  2.1335e+00,  2.0618e+00,  4.5687e-01,
          1.2661e+01,  1.0858e+01,  8.1510e+00,  1.4033e+01,  1.0511e+01,
          8.2247e+00,  6.8734e+00,  4.3481e+00,  4.6442e+00,  4.5468e+00,
          4.8940e+00,  1.1539e+00,  3.5994e+00,  1.8430e+00,  3.2219e+00,
          4.2409e+00,  3.4736e+00,  6.1413e+00,  7.2840e+00,  6.2870e+00,
          8.5064e+00,  2.0848e+00,  9.0037e+00,  4.2897e-01,  1.1883e-01,
          1.0420e+00,  4.9533e+00,  5.1752e+00, -7.2301e-01,  3.9996e+00,
          5.6730e+00,  7.4857e+00,  4.4823e+00,  3.6960e+00,  3.2835e+00,
          3.9621e+00,  4.2900e+00,  2.7426e+00,  3.8486e+00,  1.0573e+00,
          5.3835e+00,  8.5654e+00,  1.8756e+00,  1.6950e+00,  8.8337e-01,
          2.0355e+00,  7.0631e+00,  2.2236e+00,  3.6272e+00,  5.6384e+00,
          1.7146e+00,  4.1504e+00,  3.0012e+00,  1.4308e+00,  2.2709e+00,
         -1.7988e+00, -5.9231e-01,  2.3150e+00,  3.8783e+00,  4.7591e+00,
          2.4782e+00,  2.3794e+00,  3.3140e+00,  6.4666e+00, -3.6221e-01,
          3.2589e-01,  9.2652e-02, -6.7777e-01,  4.3920e+00,  4.2228e-01,
          3.9752e+00,  4.0849e+00, -2.1340e+00,  5.9151e-02, -1.5680e+00,
         -9.5022e-01,  1.0783e+00, -2.9041e-01, -1.9805e+00,  4.1058e+00,
          3.5671e+00,  1.4189e+00, -1.4370e+00, -1.1326e-01, -2.5640e+00,
          7.1210e-02, -1.1029e+00, -2.4273e+00, -5.4464e-01, -1.4032e+00,
          7.5916e-01, -9.9186e-01, -2.8053e+00,  1.8056e+00,  1.1704e+00,
         -4.2497e-01, -1.3314e+00, -7.0134e-02, -1.8032e-01, -4.2988e-01,
         -6.3542e-01,  9.9074e-01,  2.1028e-01, -1.3181e+00, -2.1655e+00,
          7.9418e-01, -4.5057e-01,  8.9500e-01, -6.3451e-01,  8.6760e-01,
         -1.1296e+00, -2.0782e+00,  7.0174e-02, -1.0692e+00, -2.0724e-01,
         -2.5124e+00, -3.0570e+00, -2.5907e+00, -3.7309e+00, -3.5128e+00,
         -1.9501e+00, -4.0433e+00, -5.4140e-01, -1.9096e+00, -1.4356e+00,
          4.4016e-01, -9.7842e-02, -4.2314e-01,  6.9961e-01, -9.8260e-01,
         -3.9190e+00, -3.3111e+00, -1.3826e+00, -1.0086e+00, -6.7219e-01,
          2.3324e-01,  1.8042e+00,  9.3179e-02, -2.6996e-01, -1.4390e+00,
          1.1770e+00,  2.3840e+00, -1.7163e+00,  2.0692e+00, -2.9829e-02,
          2.7260e-01, -1.7031e+00, -2.4667e-01,  1.7705e-01, -8.6037e-01,
          2.5990e+00,  4.0907e+00,  2.9051e+00,  1.6401e+00,  2.6546e+00,
          2.0786e+00,  2.6095e+00,  1.7333e+00, -8.9363e-02, -2.5797e+00,
         -2.4335e+00, -1.8472e+00, -2.2086e+00, -2.0980e+00, -1.7158e+00,
         -2.6723e+00, -3.2206e+00, -1.2329e+00, -2.6783e+00, -2.6129e+00,
         -2.1545e+00, -4.0390e+00, -3.2204e+00, -1.2480e+00, -6.7074e-01,
         -3.2035e+00, -6.9192e-01, -2.9632e+00, -2.5541e+00, -2.2132e+00,
         -9.2520e-01, -5.0208e-01, -3.0787e+00, -6.8115e-01, -1.7241e+00,
         -2.3506e+00, -2.5971e+00, -5.9822e-01, -2.0379e+00, -2.7715e-01,
         -6.4408e-01, -3.3498e+00, -8.0627e-01, -1.3246e+00, -2.3683e-01,
         -3.3905e-01, -3.1026e-01,  3.6579e-01, -4.4358e-01, -1.9913e+00,
         -1.7158e+00, -1.6471e+00, -4.7441e-01, -1.3035e+00, -1.1115e+00,
         -2.2582e+00,  2.0854e+00,  8.7390e-01, -6.8241e-01,  2.5315e+00,
          1.0712e+00,  6.2597e-01, -5.8828e-01, -1.3301e+00,  5.9531e-01,
          4.1161e-01, -2.5693e+00, -1.5343e+00, -8.8652e-01, -2.2419e+00,
         -3.5076e+00, -1.2946e+00,  1.8124e+00,  2.6548e+00,  1.7810e+00,
          3.1063e+00,  1.7819e+00, -1.7137e+00,  8.5885e-01,  5.4395e+00,
          2.4034e+00, -1.9137e+00, -2.5646e+00, -1.4019e+00, -2.0832e+00,
          1.5833e+00, -6.0392e-01, -2.4141e+00,  2.4507e+00, -9.3619e-01,
         -1.1494e+00,  6.8975e-01, -1.7501e+00, -9.0480e-01, -3.1743e+00,
         -9.0075e-01, -3.6046e-01,  2.4489e+00, -1.0353e+00, -2.7801e+00,
          5.7510e-01, -6.8782e-01,  3.8469e+00, -1.8408e+00,  2.1538e+00,
         -1.8949e+00, -1.5838e+00, -6.1552e-01,  1.3319e+00,  9.1967e-01,
          6.0433e-02, -9.6974e-01, -3.5409e-01, -1.5324e+00,  7.0797e-01,
         -1.0511e+00, -2.4212e+00,  3.9841e-01,  1.9486e+00, -2.2609e+00,
         -6.6631e-01,  2.4635e-02,  7.9782e-01,  2.9294e+00, -1.1117e+00,
         -8.9587e-01, -1.6398e+00,  3.0327e-02, -2.7147e+00, -6.8583e-01,
         -4.2846e-01, -2.2939e+00,  1.3533e+00, -1.4665e+00, -1.5603e+00,
         -1.5200e+00,  1.1031e+00, -2.6800e-01, -1.5267e+00, -2.7989e+00,
         -8.0836e-01,  2.6667e+00, -3.6520e+00, -1.6983e+00,  2.1130e+00,
         -1.5813e+00, -3.1202e-01,  3.0382e+00, -2.2727e+00, -5.3383e-01,
          5.0284e-01, -1.3554e+00, -2.3153e+00,  1.4344e+00, -1.7008e-01,
         -2.6370e+00, -1.0529e+00, -1.7612e-01, -2.2839e+00,  1.7102e+00,
          4.3165e+00,  5.1389e-01, -1.5231e+00,  2.5209e+00,  1.0931e+00,
          4.9016e-01, -7.0266e-01,  1.8193e+00,  7.6597e-01, -9.8156e-01,
         -1.9123e+00, -7.4873e-01, -5.7691e-01, -1.6373e+00,  2.1504e+00,
          1.6824e+00,  5.2099e-01, -1.3465e-01, -8.2297e-01,  5.6087e+00,
         -9.4001e-01, -1.7972e+00,  3.0381e+00, -4.2407e+00,  4.2495e+00,
         -2.9323e+00,  1.0920e+00,  1.6196e-01,  2.8145e+00,  3.6251e-01,
          5.5145e-02, -7.6325e-01, -3.2630e-01,  2.1292e-01, -4.3842e-02,
          1.0560e-01, -1.3520e-02,  3.6879e+00, -2.1586e+00, -1.5620e+00,
         -2.5862e+00,  9.4896e-01, -9.6247e-01, -2.0737e-01,  2.3312e+00,
          7.4314e-01, -2.1381e+00, -1.5752e+00, -7.3083e-01, -1.5399e+00,
         -2.8111e+00, -3.3892e+00,  1.9362e+00, -2.4932e-01, -2.8979e+00,
          1.3299e-01, -2.0386e+00, -5.0631e-01, -1.5803e+00,  1.4981e-01,
         -7.5699e-01, -2.9078e+00, -1.6537e+00,  4.6957e-01, -4.2923e-01,
         -2.5567e+00, -4.0166e+00,  2.9787e-01,  6.4840e-03,  6.5514e-01,
         -1.4091e+00, -6.8225e-01,  3.8787e-01,  4.5128e-01,  2.6987e+00,
         -2.1480e+00,  5.1388e-01, -2.7847e+00,  1.0182e+00, -1.3183e+00,
         -7.1688e-01, -8.8949e-01, -2.0477e-02,  6.1998e-01,  2.0287e-01,
          2.0397e-01, -3.7026e-03, -1.2613e+00, -1.6084e+00, -2.2067e+00,
         -2.7476e-02,  4.9344e-02,  2.0253e-01, -7.7133e-01, -4.1685e-01,
          8.9362e-01, -7.2533e-01, -2.6793e+00,  1.5606e+00, -1.5778e+00,
          3.9636e+00, -1.3671e+00,  8.0656e-01, -4.1068e-01,  2.0213e-01,
          2.4759e-01,  1.1996e+00,  1.9059e+00, -5.7546e-01, -2.1301e+00,
         -2.9296e+00,  9.9389e-02, -1.4825e-01, -4.1101e+00,  1.4444e-01,
          1.6522e+00, -6.5043e-03, -3.1587e-01,  1.7700e-01, -8.3341e-01,
         -2.5506e+00,  1.2515e+00, -2.2666e+00,  1.5186e-01, -6.7732e-01,
         -1.9073e+00,  2.1568e+00, -2.2029e+00,  4.6017e-01,  3.5450e-01,
         -2.0089e+00,  7.4745e-01,  1.5467e+00, -1.7960e+00, -3.5943e+00,
         -6.3445e-01,  3.2039e-01, -1.7840e+00, -1.3694e-01, -9.8352e-01,
         -2.8258e-02, -7.6344e-01, -4.4217e-01, -5.9684e-01,  8.0783e-01,
         -7.0412e-01, -3.3636e+00, -7.7790e-01, -2.7578e+00,  9.2209e-01,
         -4.2584e-01, -9.9808e-01,  1.1161e+00, -1.9465e+00, -3.7313e-01,
          1.2282e-01, -3.4685e-01, -9.9762e-01,  1.5724e+00,  3.3453e+00,
         -2.9048e+00,  6.5056e+00, -5.4299e-01,  2.0779e+00, -2.5364e+00,
          2.7790e+00, -8.1528e-01, -6.1613e-01, -1.6129e+00,  5.8559e-01,
         -2.7908e+00, -1.2169e-01, -4.0720e+00,  8.7923e-01, -1.1615e+00,
         -5.5292e-01,  1.3400e+00,  8.5649e-02,  3.6680e-01, -1.8139e+00,
         -1.5376e+00, -3.6879e-01,  4.3072e-02, -4.5653e-01,  5.3112e-03,
          4.1589e+00,  3.0625e+00, -1.2168e+00, -1.9573e+00, -4.0727e+00,
         -7.1465e-01, -2.6228e+00, -2.0144e+00,  2.4727e-01,  1.0984e+00,
          1.3974e+00, -1.7557e+00, -2.4164e+00, -1.4241e+00, -1.0078e+00,
         -1.1970e+00, -1.6625e+00, -1.8561e+00, -2.2561e+00,  1.2971e+00,
         -6.1687e-01,  8.0461e-01,  2.1053e+00, -1.6630e+00, -2.0449e+00,
         -7.5341e-01, -7.2177e-01, -2.6725e+00,  2.1569e+00, -9.7671e-01,
         -4.9096e-01,  2.3009e+00,  2.5872e-01,  4.7187e-01, -1.2907e+00,
         -3.7256e-01,  9.7507e-01,  5.0687e-01, -6.1780e-01,  3.2095e-01,
          1.8971e+00,  8.0654e-01, -7.2457e-01, -2.7293e-01, -5.8999e-01,
         -9.0638e-01, -3.6185e-01,  1.6046e+00,  7.4521e-01, -5.6564e-01,
          3.2469e+00, -2.3399e+00,  4.6625e-02, -1.2197e+00, -1.0243e+00,
         -2.1823e+00, -5.9415e-01,  2.8287e+00, -8.8646e-01, -9.7971e-01,
          2.2416e+00,  2.7644e+00, -2.1855e+00,  4.3549e-01, -1.1383e+00,
          7.6904e-01, -1.8472e+00,  6.7333e-02,  4.2886e+00, -3.9722e-02,
          1.5513e+00, -2.8666e-02, -6.0754e-01,  1.3900e-01,  3.1203e+00,
         -1.7501e+00,  5.7998e-01,  1.0279e+00, -1.3664e+00, -7.3699e-01,
         -3.3545e+00, -1.1573e+00,  1.1231e+00, -1.8963e+00,  1.2027e+00,
          3.6436e+00, -2.8259e-01,  5.1838e-02,  2.3572e+00,  6.3443e-01,
          5.4896e-01,  1.0371e+00, -1.4199e-01,  3.3959e+00,  1.9162e+00,
         -1.1874e+00,  8.1179e-01,  5.4103e+00, -2.8328e+00,  1.5816e+00,
         -2.6704e+00, -1.3515e+00, -1.6923e+00, -4.7354e+00,  1.1257e+00,
          6.7002e+00,  7.3224e-01, -2.8834e+00,  3.3626e+00, -2.0322e+00,
         -7.1955e-01,  6.6343e+00, -1.1942e+00,  2.0644e+00, -3.5859e+00,
         -1.1364e+00, -1.4668e+00, -3.4257e+00, -1.1161e+00, -8.9603e-01,
         -2.7706e+00, -4.3550e+00, -1.2170e-01, -8.8958e-01,  3.9014e-01,
         -1.2899e+00, -3.9436e-01,  2.2394e+00,  1.3415e+00, -1.6894e+00,
          2.5433e+00,  5.9746e-02, -3.5446e+00, -1.6516e+00, -7.1608e-01,
         -3.4153e+00,  1.3262e+00,  2.3340e+00, -1.3691e+00, -5.1148e+00,
          4.4386e-01,  1.1471e+00,  1.8358e+00, -2.6250e+00,  4.7463e-01,
         -6.6676e-01, -1.2268e+00, -1.5980e+00,  2.7540e-01, -3.4414e-01,
          9.9923e-01,  2.6813e+00,  8.5421e+00, -3.4746e+00, -9.0709e-01,
         -1.2975e+00, -4.7443e-01, -2.6807e+00, -3.1125e+00,  1.1319e+00,
         -1.9543e+00,  1.1561e+00, -2.1746e+00,  1.4505e-02, -3.1492e+00,
          2.3282e+00, -9.1608e-01, -2.1190e+00,  1.5113e+00,  7.3686e-01,
          1.1858e+00, -1.3190e+00,  1.2157e+00, -2.7116e+00, -2.4406e+00,
         -2.4500e+00,  2.8695e+00, -1.5843e+00, -1.2579e+00,  6.9762e-01,
          1.4429e+00, -9.1386e-01,  2.8526e+00, -1.6602e+00, -2.0205e+00,
         -2.8734e-01, -5.4055e-01, -2.2508e+00, -2.4912e+00, -4.0824e-01,
          7.6068e-01,  3.0911e-01, -1.0099e+00,  7.0227e-01, -9.7780e-01,
          3.2495e-01, -1.3059e-01,  9.7174e-01,  1.9505e+00,  6.9533e-01,
         -4.0190e+00, -2.6713e+00,  1.2381e+00, -7.5413e-01,  1.3258e+00,
         -9.9570e-01,  2.8679e-01,  9.5969e-01, -7.1460e-01, -6.7358e-01,
          1.4379e+00,  2.8673e-01,  3.2196e-02, -2.1116e+00, -5.1907e+00,
         -1.1070e+00, -8.1177e-01, -1.4766e-01,  8.2797e-02, -1.4149e+00,
         -3.0309e+00,  2.2163e-01, -7.2181e-01,  2.2478e-02, -1.3564e+00,
         -1.8865e+00, -2.4762e+00, -1.4092e+00, -1.2579e-01,  2.6582e+00,
         -8.9824e-01, -5.8175e-01, -3.8877e-01, -4.4331e-01,  1.1515e+00,
          3.1348e-01,  5.8268e-01,  3.6002e-01, -3.0529e-01,  3.8931e-01,
         -2.5724e-01,  5.8285e-01, -4.7591e-01,  1.1827e+00, -1.8019e+00,
         -2.8435e-01, -2.8249e+00, -5.8165e-01, -7.7269e-01, -9.0484e-01,
         -1.8916e+00, -1.0054e+00, -1.5964e+00, -3.0450e-01, -1.1741e+00,
          3.2606e-01, -4.6770e-01, -1.4017e+00, -1.6667e+00, -2.3465e-01,
          7.3912e-01,  1.6176e+00, -1.0858e+00, -3.1384e-01, -1.2774e+00,
         -6.6758e-01, -9.9701e-01, -1.6125e+00, -2.8855e-01, -2.0662e+00,
         -1.0920e+00, -9.1905e-01, -9.5032e-01, -1.6627e+00, -2.2766e+00,
          8.5495e-01, -1.9734e+00,  1.0368e+00, -9.5098e-01, -1.7640e+00,
         -3.3872e+00, -6.2224e-01, -2.6710e+00, -1.7468e+00, -5.2045e-01,
         -1.4859e+00, -8.7899e-01,  1.6595e+00,  7.3420e-01,  1.1056e+00,
          1.0582e+00, -2.1061e+00, -7.4197e-01, -2.7582e+00, -1.3889e+00,
          1.9799e+00,  3.1401e-01, -1.9116e+00,  1.8486e+00,  2.7812e+00]],
       grad_fn=<AddmmBackward0>)
In [17]:
with open('/home/scarl/Classes/CS415DL/Resources/DLwithPyTorch/data/p1ch2/imagenet_classes.txt') as f:
    labels = [line.strip() for line in f.readlines()]
In [18]:
# In[14]:
_, index = torch.max(out, 1)
In [19]:
percentage = torch.nn.functional.softmax(out, dim=1)[0] * 100
labels[index[0]], percentage[index[0]].item()
Out[19]:
('Labrador retriever', 72.41580200195312)
In [20]:
_, indices = torch.sort(out, descending=True)
[(labels[idx], percentage[idx].item()) for idx in indices[0][:5]]
Out[20]:
[('Labrador retriever', 72.41580200195312),
 ('flat-coated retriever', 18.35405731201172),
 ('curly-coated retriever', 3.025104522705078),
 ('Chesapeake Bay retriever', 2.1387224197387695),
 ('kelpie', 0.47368496656417847)]
In [21]:
img = Image.open("/home/scarl/Classes/CS415DL/Resources/DLwithPyTorch/data/p1ch2/bobby.jpg")
In [22]:
img
Out[22]:
In [23]:
img_t = preprocess(img)
In [24]:
batch_t = torch.unsqueeze(img_t, 0)
In [25]:
resnet.eval()
out = resnet(batch_t)
In [26]:
out
Out[26]:
tensor([[-3.4803e+00, -1.6618e+00, -2.4515e+00, -3.2662e+00, -3.2466e+00,
         -1.3611e+00, -2.0465e+00, -2.5112e+00, -1.3043e+00, -2.8900e+00,
         -1.6862e+00, -1.3055e+00, -2.6129e+00, -2.9645e+00, -2.4300e+00,
         -2.8143e+00, -3.3019e+00, -7.9404e-01, -6.5182e-01, -1.2308e+00,
         -3.0193e+00, -3.9457e+00, -2.2675e+00, -1.0811e+00, -1.0232e+00,
         -1.0442e+00, -3.0918e+00, -2.4613e+00, -2.1964e+00, -3.2354e+00,
         -3.3013e+00, -1.8553e+00, -2.0921e+00, -2.1327e+00, -1.9102e+00,
         -3.2403e+00, -1.1396e+00, -1.0925e+00, -1.2186e+00, -9.3332e-01,
         -4.5093e-01, -1.5489e+00,  1.4161e+00,  1.0871e-01, -1.8442e+00,
         -1.4806e+00,  9.6227e-01, -9.9456e-01, -3.0060e+00, -2.7384e+00,
         -2.5798e+00, -2.0666e+00, -1.8022e+00, -1.9328e+00, -1.7726e+00,
         -1.3041e+00, -4.5848e-01, -2.0537e+00, -3.2804e+00, -5.0451e-01,
         -3.8174e-01, -1.1147e+00, -7.3998e-01, -1.4299e+00, -1.4883e+00,
         -2.1073e+00, -1.7373e+00, -4.0412e-01, -1.9374e+00, -1.4862e+00,
         -1.2102e+00, -1.3223e+00, -1.0832e+00,  7.9208e-02, -4.1344e-01,
         -2.7477e-01, -8.5399e-01,  6.0364e-01, -8.9196e-01,  1.4761e+00,
         -2.6427e+00, -3.6478e+00, -2.7066e-01, -1.2360e-01, -2.2445e+00,
         -2.3425e+00, -1.4430e+00,  2.5264e-01, -1.0588e+00, -2.8812e+00,
         -2.5145e+00, -2.2579e+00,  4.1647e-01, -1.3463e+00, -1.6449e-02,
         -2.8798e+00, -5.5658e-01, -1.3859e+00, -2.9352e+00, -1.8880e+00,
         -4.2244e+00, -2.9742e+00, -2.0298e+00, -2.3869e+00, -2.7324e+00,
         -3.9905e+00, -3.6113e+00, -5.4423e-01, -1.0291e+00, -1.8998e+00,
         -3.5611e+00, -1.5031e+00,  1.0660e+00, -7.1587e-01, -7.2612e-01,
         -2.2173e+00, -2.2616e+00, -5.9990e-01, -1.4349e+00, -2.5965e+00,
         -3.9844e+00, -9.4164e-01, -5.3676e-01, -8.4138e-01, -1.1660e+00,
         -7.3556e-01, -1.1300e+00, -2.1074e+00, -4.0037e+00, -3.7229e-01,
         -2.7179e+00, -2.9849e+00, -1.9127e+00, -1.8412e+00, -1.3001e+00,
         -2.2268e+00, -2.0247e+00, -3.1761e+00, -3.2964e+00, -2.7923e+00,
         -4.3191e-01, -3.7750e+00, -2.4832e+00, -2.6228e+00, -2.7499e+00,
         -2.6306e+00, -3.2714e+00, -4.3249e+00, -4.2451e+00, -3.6207e+00,
         -1.1967e+00,  2.3839e+00,  1.8833e+00,  2.2390e+00,  4.9467e+00,
          9.9434e-01,  2.9570e+00,  8.5852e-01,  2.2356e+00,  6.1872e+00,
          4.2074e+00,  4.6280e+00,  7.5066e+00,  4.3456e+00,  4.8873e+00,
          5.8086e+00,  4.0282e+00,  3.5778e+00,  9.5398e+00,  1.0959e+00,
          3.3065e+00,  1.9473e+00, -4.7347e-01,  1.4388e+00,  1.8860e+00,
          5.5149e+00,  5.6885e+00,  2.1434e+00,  2.5016e+00,  6.2614e-01,
          1.9095e+00,  1.4927e+00,  3.4522e+00,  4.0987e-01,  4.2790e+00,
          4.3379e+00,  1.2945e+00,  1.6308e+00,  1.1426e+00,  2.1246e+00,
          8.6189e-01,  3.0266e+00,  3.5030e+00,  2.7914e+00,  1.8812e+00,
          1.3916e-01,  2.0182e+00,  2.6938e+00,  1.0643e+00,  1.9063e+00,
          3.5028e+00,  2.2950e+00,  2.5388e+00,  1.3140e+00,  3.5698e+00,
          7.7051e+00,  4.3443e+00,  1.5674e+01,  1.2140e+01,  5.2050e+00,
          1.9331e+00,  5.4996e+00,  6.1745e+00,  7.5155e+00,  5.8567e+00,
          6.9794e+00,  5.6891e+00,  2.6934e+00,  5.3248e+00,  9.8436e+00,
          6.4168e+00,  2.4431e+00,  5.6031e+00,  3.4884e+00,  2.0732e+00,
          1.3375e+00,  2.5550e+00,  5.7791e+00,  7.5825e-01,  1.0360e+00,
          4.8250e+00,  5.9932e+00,  3.9907e+00, -1.7508e+00,  3.6606e+00,
          2.8820e+00,  2.8978e+00,  1.3059e+00,  4.2622e+00,  4.0880e+00,
          3.4181e+00,  2.3945e+00,  3.1604e-01,  8.7091e-01,  5.0895e+00,
         -7.0908e-01,  1.9885e+00,  2.8699e+00,  2.5281e+00,  1.9253e+00,
          6.5843e-01,  3.4956e+00, -5.6701e-01,  1.9219e+00,  5.0423e-01,
          2.3949e+00,  3.4628e+00,  5.1851e+00,  1.8182e+00,  3.9127e+00,
          4.3620e+00,  3.3723e-01, -4.6588e-01,  5.6958e+00,  3.7192e+00,
          2.4205e+00,  3.6402e+00,  3.3705e+00, -9.3733e-01, -2.0590e-01,
          1.3019e-01,  1.1554e+00, -4.0951e-02,  4.5523e+00, -1.8349e+00,
         -2.6543e+00, -1.6859e+00, -6.3751e-01, -1.5596e+00, -2.1529e+00,
         -1.0245e+00,  1.5312e+00,  7.6857e-01, -1.8030e+00,  6.9033e-01,
          9.1473e-01, -2.0907e+00, -2.1250e+00, -1.5808e+00, -4.7830e+00,
         -1.0396e+00, -9.7836e-01, -2.0528e+00,  1.9793e+00, -6.0107e-01,
         -2.4964e+00, -1.4914e+00, -3.2041e+00, -1.9067e+00, -5.9215e-01,
         -1.0509e+00,  1.3131e+00, -1.5027e+00, -2.0352e+00,  1.3009e+00,
          3.9806e-01, -3.5442e-01,  7.1537e-01, -3.0085e-01, -7.6253e-01,
         -5.4504e-01,  1.0533e+00,  1.1973e-01,  7.1265e-02,  1.3234e+00,
         -2.0051e+00, -1.7127e+00,  1.1415e+00, -4.3746e-01,  2.9573e-01,
         -1.4572e+00, -2.6234e+00, -2.5400e+00, -2.4128e-01, -2.3629e+00,
         -1.5560e+00, -2.5256e+00, -8.0395e-01,  1.5960e-01, -2.8029e+00,
         -1.8937e+00, -9.4297e-01, -3.8988e-01, -4.6732e-01, -7.8798e-01,
         -2.5103e+00, -1.8726e+00, -2.1138e+00, -5.5075e-01,  1.8876e-01,
         -2.0678e+00, -1.7942e+00, -2.4776e+00, -3.8874e+00, -4.4214e+00,
         -2.1606e+00, -1.9960e+00, -3.7195e+00, -1.8627e+00, -3.3882e+00,
         -2.0034e+00, -2.2823e+00, -8.3603e-01, -5.1364e-01, -2.9197e+00,
         -1.6728e+00, -2.5686e-01, -2.7734e+00, -1.7911e+00,  1.1283e-01,
         -2.1214e+00, -1.5402e+00, -1.2457e+00, -9.6399e-01, -2.4953e+00,
         -1.3973e+00, -3.8589e+00, -4.3189e+00, -1.5287e+00, -1.9420e+00,
         -3.0008e+00, -2.9597e+00, -4.8460e+00, -2.4737e+00, -1.4287e+00,
         -2.9093e+00, -1.2882e+00, -6.0873e-01, -2.8312e+00, -1.8754e+00,
         -2.3758e+00, -3.4176e+00, -2.5520e+00, -3.8709e+00, -4.4702e+00,
         -3.5587e+00, -9.4389e-01, -2.3503e+00, -2.0270e+00, -1.8470e+00,
         -3.2897e+00, -3.4712e+00, -2.8471e+00, -1.9893e+00, -3.7441e+00,
         -1.1865e+00, -2.8282e+00,  2.2839e-01, -1.3325e-01, -3.1261e-01,
          1.4785e-01,  1.7180e+00,  1.8871e+00, -3.1302e+00, -3.7345e+00,
         -2.6754e+00, -6.7742e-01, -8.4727e-01, -1.3179e+00,  4.7847e-01,
         -2.2918e+00,  4.7733e+00,  1.5100e+00, -1.5956e+00,  3.3496e+00,
          3.0611e+00,  1.5253e+00,  6.8673e-01,  1.2918e+00,  1.6387e+00,
          1.0631e-01,  1.3420e+00,  5.2414e-02,  1.0270e+00, -4.6863e-01,
         -1.3585e+00,  5.7504e-01,  2.8775e-01,  2.8255e+00,  2.1875e+00,
          1.8301e+00,  1.3566e+00,  1.0992e+00,  2.3172e+00,  6.4046e+00,
          1.8630e+00,  6.0024e-01, -1.4953e+00, -1.9144e+00, -2.6436e+00,
          1.5186e+00, -4.8838e-01, -1.0530e-01,  1.9803e+00, -1.7358e+00,
          3.7236e-01,  1.6658e+00,  7.8257e-01,  2.1721e+00, -1.4210e+00,
         -2.4550e+00,  4.6637e-01,  3.3418e+00, -2.8537e-01,  1.1941e-01,
          1.1450e+00, -1.3834e+00,  1.5737e+00, -2.1716e+00, -4.2427e-01,
         -1.4805e+00, -2.1745e+00,  2.7962e+00,  2.4990e+00,  1.9237e-01,
          4.7498e-01, -1.9682e+00, -1.6105e+00, -7.3869e-01, -1.1794e+00,
         -2.9531e-01, -1.4142e+00,  2.2398e+00, -4.3380e-01, -8.6286e-01,
          4.0300e-01, -1.4318e+00, -3.1364e-01,  3.4846e+00,  4.3202e-01,
          4.5058e-01, -1.1090e+00,  2.2513e-01, -2.6651e+00, -2.8278e+00,
         -6.5790e-01, -3.0889e-01,  8.2096e-01,  1.8005e-01, -4.2284e-01,
         -5.8541e-01, -2.7820e-01,  1.6590e+00,  8.7698e-02, -4.6729e-01,
          1.1241e+00,  2.2742e+00, -1.0448e+00,  9.4819e-01,  9.9525e-01,
         -2.5969e+00, -5.5236e-01,  2.1583e+00, -9.2215e-01,  4.7108e-02,
         -3.8016e-01,  1.5210e+00, -1.0433e+00,  1.9041e+00,  1.4741e+00,
         -4.3896e+00, -1.6206e-01, -1.5698e-01, -7.3738e-01,  1.8179e+00,
          3.3264e+00,  7.3696e-01, -7.6419e-01,  1.5898e+00,  1.9445e+00,
          1.2725e+00, -1.5624e+00,  2.2197e+00,  9.9570e-01, -6.3256e-01,
         -1.4160e+00,  1.6144e+00,  4.5531e-02,  9.0731e-01,  9.5069e-01,
          5.3562e-01,  4.4124e-01,  1.0358e+00,  6.5592e-01,  3.3626e+00,
         -1.0299e+00, -2.8939e+00, -7.0227e-01, -8.1103e-01,  7.0547e+00,
         -3.3097e+00,  1.3230e+00,  1.6968e+00,  3.7732e+00, -1.1723e+00,
          5.7985e-01, -1.8231e+00, -1.3483e+00,  4.1487e-01,  2.6429e+00,
          1.4418e+00,  7.9635e-01,  4.8719e+00,  1.5457e+00, -3.5932e+00,
         -2.2285e+00, -1.3850e+00, -8.9728e-01,  2.1657e+00,  2.0583e+00,
         -8.9567e-01, -1.7835e+00, -1.4516e+00,  1.0497e+00, -7.6032e-01,
         -1.4353e+00,  4.7010e-01,  8.7255e-01,  6.7030e-01, -1.1902e+00,
         -1.4175e+00, -8.4839e-01,  1.1901e+00, -1.8283e+00,  2.4775e+00,
          3.4005e-01, -1.7652e+00, -9.1973e-01,  2.9893e+00,  2.2373e+00,
         -8.1442e-01, -1.9843e+00,  9.2510e-01, -2.1452e+00,  1.8890e-02,
          2.5441e-01, -1.1333e-01, -6.2533e-01,  8.0224e-01,  4.0010e+00,
         -1.1935e+00,  2.6455e+00, -1.7860e+00,  7.5865e-01,  5.1593e-01,
          2.4380e-03, -7.6760e-01,  4.8149e-01,  1.3055e+00,  8.0364e-01,
         -6.1874e-01,  4.6969e-02,  2.6322e-01, -2.1400e+00, -1.3908e+00,
         -4.0182e-02, -4.2920e-01,  4.6767e-01,  1.3024e+00,  7.5817e-01,
          9.9857e-02, -1.0072e-01, -8.5241e-01,  8.6249e-01,  6.9517e-01,
          2.1217e+00,  7.1266e-01, -1.9782e-01,  2.3986e+00,  1.8734e+00,
          1.0993e+00,  1.0336e+00,  1.4353e+00, -4.9214e-02, -1.3295e-01,
         -1.7147e+00, -1.2590e+00, -1.3166e+00, -3.4476e+00,  5.9193e-01,
          1.0995e+00,  1.0986e-02, -3.7005e-01, -4.5369e-01, -4.2330e-01,
         -1.5137e+00,  2.7933e-01, -2.0776e-01,  3.2132e+00,  1.8063e+00,
         -1.5186e+00,  2.8835e+00, -7.4290e-01,  3.2128e-02, -7.0117e-02,
         -1.0103e+00,  1.1795e+00,  5.9283e-01,  1.2191e-01, -3.4571e+00,
          1.3048e+00,  3.9847e-01, -1.2731e+00, -1.2927e+00, -1.6408e+00,
          1.9229e+00,  4.1588e-02, -9.8906e-01,  6.7141e-01,  2.8807e+00,
          1.6977e+00,  2.2304e-01, -8.1440e-01, -2.0507e+00,  1.7015e+00,
         -2.0312e-01,  7.4630e-01,  1.5227e+00, -1.4377e+00, -1.1784e+00,
          5.1375e-01, -6.4234e-01,  3.8708e-02,  2.6664e+00, -1.6256e+00,
         -3.3457e+00,  2.1520e+00,  8.6618e-01,  1.3850e+00, -3.4029e-01,
          1.8385e-01,  1.4680e+00, -1.0961e+00,  1.8217e+00, -1.2748e+00,
         -2.1175e+00, -8.4857e-01, -5.3657e-01, -1.2562e+00,  1.1329e+00,
         -1.4191e+00, -7.6893e-01, -3.4133e-01,  2.1594e+00, -2.1836e-01,
         -1.8166e+00,  9.8037e-02,  1.7366e+00,  1.6465e-01,  7.7769e-01,
          4.7226e+00, -7.3754e-01, -1.6683e+00, -8.1360e-01, -1.4618e+00,
          3.4068e+00,  5.3348e-01, -3.1106e-01, -5.0764e-01,  3.0037e-01,
          1.8626e+00, -1.1852e+00, -2.0411e+00, -9.6967e-02, -7.1424e-01,
         -2.5433e+00, -3.4143e-02,  7.6702e-01, -1.7948e+00,  2.9510e-01,
         -1.0903e+00,  1.5320e+00,  2.8823e+00,  5.1182e-01, -7.6857e-01,
         -9.0145e-01, -1.7196e+00, -1.0044e+00,  9.1568e-01, -9.2979e-02,
         -2.3068e+00,  2.2911e+00,  9.5719e-01,  1.9917e+00, -1.6980e+00,
          2.6118e+00,  3.7953e+00,  7.1091e-01, -2.2803e-03, -1.0275e+00,
          2.1824e+00,  1.4127e+00,  4.7933e-01, -1.3249e+00, -9.0533e-01,
          5.8118e-01, -6.0400e-01,  5.1155e-01,  1.1511e+00,  9.5682e-01,
          2.7826e+00, -3.0976e+00,  3.5563e+00, -1.6181e-01, -4.6197e-02,
         -2.0769e+00, -1.4204e+00,  2.9824e+00, -4.8723e-01,  2.1408e-01,
         -1.3643e-01,  2.2942e+00,  3.4084e-01,  9.9796e-01, -1.1452e+00,
          3.3055e+00, -1.8049e+00,  3.2445e+00, -1.6493e-01,  1.3805e+00,
          6.5878e-01,  4.6122e-01, -7.8641e-01,  3.8983e-01,  1.9974e+00,
          4.0911e-01,  2.4162e+00, -1.9111e+00,  8.1044e-02,  2.2694e+00,
         -1.6680e+00, -7.0304e-01,  1.4299e+00,  1.4234e-02,  7.9249e-01,
          2.9637e+00, -9.4825e-01, -1.3366e+00,  2.6750e-01,  2.3589e+00,
          1.8983e+00,  1.8345e+00,  8.5127e-01,  4.2841e+00,  4.8082e-01,
         -1.4365e+00, -4.8286e-01,  3.0412e+00, -8.2025e-01,  3.3065e+00,
         -6.5939e-01, -2.6282e+00, -3.1888e+00, -2.9725e+00,  1.2156e+00,
          5.6016e+00,  3.0274e-01, -3.1681e+00,  2.5582e+00, -3.3199e-01,
          1.4820e-01,  2.3601e+00, -1.4552e+00,  3.3269e+00, -3.3744e+00,
         -6.4104e-01,  1.1680e+00, -2.6107e+00,  1.6885e+00, -1.5028e+00,
         -2.6845e+00, -3.6659e+00, -1.7394e+00,  1.1231e+00,  2.0104e+00,
         -1.4943e-01,  1.3057e+00,  1.2092e+00,  2.6647e+00, -1.7969e+00,
         -1.8525e+00,  1.5487e+00, -2.0861e+00, -2.3154e+00,  9.9215e-01,
         -3.7871e+00, -1.1176e+00,  9.0636e-01, -3.2947e-01, -3.4544e+00,
          2.0940e+00,  5.4371e-01,  6.0876e-01, -1.3066e-01,  7.9443e-01,
          7.9938e-01,  1.0587e+00, -1.8372e+00,  2.8466e-01, -1.1158e+00,
          8.0787e-01,  1.0870e+00,  8.9547e+00, -8.9419e-01, -9.3960e-01,
          1.0807e+00, -4.1462e-01, -1.7524e+00,  9.1854e-02,  1.8185e-01,
         -1.3849e+00,  8.8831e-01, -4.1253e-01, -7.7844e-01, -3.1265e+00,
         -3.8734e-01,  1.8115e-01, -2.2122e+00,  2.8848e+00,  4.5000e-01,
          1.4854e+00, -3.4138e+00,  1.4939e+00, -2.5266e+00, -2.9228e+00,
         -7.6507e-01,  2.8269e+00, -1.1918e+00, -6.2602e-01,  3.6187e+00,
          1.1527e+00,  1.1860e+00,  3.4149e+00,  9.2982e-01, -1.1376e+00,
          1.0391e+00,  1.8575e-01, -7.4427e-01, -2.9312e+00, -1.6815e-01,
          1.5624e+00, -4.5063e-01,  1.5997e+00,  1.0128e+00, -1.3146e+00,
         -1.8426e+00, -4.7445e-01,  5.8991e-01,  2.3850e+00,  5.2548e-01,
         -1.3760e+00, -2.3240e+00, -7.6861e-01,  1.2772e+00,  2.9579e+00,
         -2.7968e-01, -5.9378e-01, -2.4311e-02, -7.2352e-01, -5.9500e-02,
          2.7550e+00,  2.9499e-01, -1.1396e+00, -1.4785e+00, -4.3375e+00,
         -3.2104e-01, -3.2125e-01, -2.0806e+00,  3.7004e-01, -1.4368e+00,
         -6.1700e-01, -2.0341e+00, -8.6155e-01, -4.0387e-01, -3.2359e-01,
         -1.8287e+00, -1.7554e+00, -6.5640e-01,  6.7694e-01,  3.7156e+00,
          2.1207e+00,  4.0970e+00,  1.7257e+00,  8.5265e-01,  1.2722e+00,
          1.0563e+00,  1.3809e+00,  1.2871e+00, -7.5314e-01,  2.2593e+00,
          1.1952e-01, -7.3866e-01,  1.0060e+00,  8.5880e-01, -6.6744e-01,
         -3.2016e-01, -1.5605e+00,  2.0461e+00,  2.4740e+00,  2.2464e-01,
          7.4987e-01,  3.8843e-02, -1.7622e+00,  1.9534e+00,  4.5175e-01,
          1.2086e+00,  7.3219e-01, -1.0001e+00,  1.2820e-01, -3.7380e-01,
          9.6211e-02,  3.2060e+00,  6.5023e-01, -1.1252e-01,  8.9641e-01,
         -5.2856e-02, -1.1585e+00,  1.4922e-01,  3.7309e-01,  8.7084e-01,
         -1.9354e+00,  1.0733e-01, -1.5175e+00, -1.8582e+00, -3.8437e+00,
          1.8629e-01, -2.9438e+00,  5.4171e-01, -7.8057e-01, -2.6016e+00,
         -4.4594e+00,  5.5604e-01, -1.3140e+00, -3.8407e+00, -7.5988e-01,
         -5.7457e-01, -2.5448e+00,  2.3831e+00,  6.1368e-01,  4.8295e-01,
          2.8674e+00, -3.7442e+00,  1.5085e+00, -3.2500e+00, -2.4894e+00,
         -3.3541e-01,  1.2856e-01, -1.1355e+00,  3.3969e+00,  4.4584e+00]],
       grad_fn=<AddmmBackward0>)
In [27]:
batch_t = torch.unsqueeze(img_t, 0)
In [28]:
_, index = torch.max(out, 1)
In [29]:
index
Out[29]:
tensor([207])
In [30]:
percentage = torch.nn.functional.softmax(out, dim=1)[0] * 100
labels[index[0]], percentage[index[0]].item()
Out[30]:
('golden retriever', 96.29335021972656)
In [31]:
_, indices = torch.sort(out, descending=True)
[(labels[idx], percentage[idx].item()) for idx in indices[0][:5]]
Out[31]:
[('golden retriever', 96.29335021972656),
 ('Labrador retriever', 2.808121681213379),
 ('cocker spaniel, English cocker spaniel, cocker', 0.28267431259155273),
 ('redbone', 0.20863044261932373),
 ('tennis ball', 0.11621593683958054)]
In [32]:
img = Image.open("/home/scarl/Classes/CS415DL/Resources/DLwithPyTorch/data/p1ch2/horse.jpg")
In [33]:
img
Out[33]:
In [34]:
img_t = preprocess(img)
In [35]:
batch_t = torch.unsqueeze(img_t, 0)
In [36]:
resnet.eval()
Out[36]:
ResNet(
  (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
  (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (relu): ReLU(inplace=True)
  (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)
  (layer1): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
        (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer2): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer3): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (3): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (4): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (5): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (6): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (7): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (8): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (9): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (10): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (11): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (12): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (13): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (14): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (15): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (16): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (17): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (18): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (19): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (20): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (21): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (22): Bottleneck(
      (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (layer4): Sequential(
    (0): Bottleneck(
      (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
      (downsample): Sequential(
        (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)
        (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      )
    )
    (1): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
    (2): Bottleneck(
      (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
      (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)
      (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
      (relu): ReLU(inplace=True)
    )
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(1, 1))
  (fc): Linear(in_features=2048, out_features=1000, bias=True)
)
In [37]:
out = resnet(batch_t)
In [38]:
out
Out[38]:
tensor([[-3.0750e+00, -1.5513e+00,  1.8620e+00,  5.0177e-02,  6.4220e-01,
         -1.0961e-01,  1.0240e+00,  2.9839e+00, -1.1159e+00,  1.8814e+00,
         -4.2087e+00, -3.6604e+00, -3.0206e+00, -5.5697e+00, -1.7747e+00,
         -3.0089e+00, -3.7074e+00, -1.0540e+00, -1.9684e+00, -4.5826e+00,
         -5.3042e+00,  1.0134e+00, -1.9531e+00,  1.4473e+00, -3.4471e+00,
         -2.6357e+00, -2.9163e+00, -2.4023e+00, -1.0720e+00, -3.1659e+00,
         -2.3239e+00, -2.8332e+00, -3.2967e+00, -7.6156e-01,  3.5111e-01,
         -3.0553e+00, -1.2116e+00, -2.0969e+00,  1.1910e-01,  2.0926e+00,
         -2.9356e+00,  1.3984e+00, -8.0890e-01,  8.0315e-03, -1.9727e+00,
         -1.6779e+00, -8.3520e-01, -2.0912e+00, -1.5653e+00, -2.1709e+00,
         -3.7323e+00,  4.1284e+00, -1.5014e+00, -2.6137e+00, -2.7159e+00,
         -1.0277e+00, -2.4023e+00, -2.5533e+00, -3.3681e+00, -1.1756e+00,
         -1.1515e+00, -1.4894e+00, -1.1030e+00, -2.7464e-01, -3.2600e-01,
         -8.5655e-01, -2.8436e-01, -1.7093e+00, -5.8457e-01, -1.5341e+00,
         -2.9875e+00,  3.9421e-01, -3.3114e+00, -1.3478e+00, -5.1481e-01,
         -2.0205e+00, -1.4381e+00, -1.7273e+00, -1.0198e+00,  2.9198e+00,
         -3.8886e-01, -4.0395e+00, -2.0980e+00, -1.1353e+00, -8.6785e-01,
         -3.9361e+00, -2.3260e+00, -5.9777e-01,  3.4348e+00, -6.3948e-01,
         -3.1372e+00, -1.8866e+00, -1.3329e+00, -2.4243e+00, -2.2202e+00,
         -3.8689e+00, -2.6816e+00, -3.4828e+00, -4.5295e+00, -3.8893e+00,
         -3.4818e+00, -3.6096e-01, -2.8020e+00, -1.6504e+00, -3.1293e+00,
         -1.3614e+00, -1.0350e+00,  2.4519e+00, -8.8725e-02, -1.2199e+00,
         -1.5925e+00,  2.1715e-01,  3.1667e+00, -2.0410e+00, -1.9258e+00,
         -5.3918e-01, -2.3997e+00, -3.6458e-01,  6.5779e-02, -8.8029e-02,
         -8.5191e-01,  7.7175e-01,  3.5242e+00,  1.2089e+00,  1.3336e+00,
          2.7358e-01,  1.3984e-02, -2.2161e+00, -3.5878e+00, -1.5578e+00,
          1.3404e+00, -2.4794e+00, -2.5471e+00, -3.6010e+00, -2.5262e+00,
         -1.8135e+00, -2.0481e+00, -3.4969e+00, -7.7189e-01, -2.9251e+00,
         -4.3594e+00, -3.3245e+00, -3.0636e+00, -3.1646e+00, -2.3336e+00,
         -3.1291e+00, -1.2650e+00,  4.9699e-01, -1.8190e+00, -4.8654e-01,
          2.6956e+00,  1.6119e-01, -1.0832e+00, -7.2186e-01, -1.7079e-01,
         -1.1810e+00,  9.2798e-01, -1.0906e+00, -1.3997e+00,  6.1357e+00,
          2.2050e+00,  2.6326e-01, -2.3821e+00,  1.7682e+00, -2.0726e+00,
         -1.0746e+00, -2.1874e-01, -1.2125e+00,  4.9753e+00, -2.5559e-02,
         -8.9830e-01,  1.9147e+00,  1.1626e+00,  2.5842e+00, -3.3994e+00,
         -2.4081e+00,  3.6312e+00,  2.6332e-01,  1.6199e+00, -5.3945e-01,
         -1.1913e+00, -1.7462e+00, -2.1878e+00, -7.5913e-02,  1.1508e+00,
          2.3227e+00,  3.9447e-01,  2.7888e-01, -3.4437e+00, -6.1372e-01,
         -3.7406e+00, -7.3368e-01, -1.3605e+00,  1.5639e+00, -4.5863e+00,
          4.8267e-01, -1.5303e+00, -9.2522e-01, -1.6569e+00, -2.0847e+00,
          9.1434e-01, -2.9495e-01,  5.9294e-02, -8.8672e-01,  1.3612e+00,
         -3.4944e-01, -2.1694e-01,  1.4501e+00, -5.2249e-01,  3.4245e-01,
         -1.1053e-01,  5.2656e+00, -3.7451e-01,  5.4699e+00, -9.8901e-02,
          1.6712e+00, -2.5142e+00, -2.6736e+00,  1.0728e+00, -2.9349e-01,
         -8.1180e-03, -5.2871e-01, -8.8389e-01, -1.6241e+00, -3.0416e-02,
         -1.6898e+00,  1.4535e-01,  1.7193e-01, -1.0854e+00, -8.6170e-01,
         -1.0957e+00, -9.0096e-01, -6.9100e-01, -2.0211e-01,  5.3522e-01,
         -1.5626e+00,  3.7701e-01,  1.2013e+00,  1.2662e+00, -1.4038e+00,
         -1.3377e+00, -1.5437e+00,  3.6889e+00,  1.0906e-01, -1.8560e-02,
         -2.3702e+00,  2.3577e+00, -1.2688e+00, -2.0292e+00, -1.8506e+00,
         -1.3019e+00,  9.6455e-01, -2.0126e+00,  1.7124e+00, -8.8646e-01,
         -1.7835e+00,  9.6510e-01, -1.8887e+00,  1.3338e-01, -9.2292e-01,
          2.8036e-01, -2.0238e+00, -1.7793e+00, -1.4091e+00, -1.5242e+00,
         -9.4466e-01, -1.6806e+00,  5.5001e-01,  3.0472e+00, -3.0593e+00,
         -4.4863e+00, -1.5339e+00, -3.5488e+00,  4.2532e-01, -4.8657e-01,
          4.4974e-01,  6.2478e-01, -3.6067e+00, -4.0159e+00, -4.6237e+00,
         -2.3436e+00, -7.7122e-01, -5.2093e-03, -2.1061e+00, -3.2589e+00,
         -9.3272e-01, -1.0939e-01, -4.1050e+00, -3.4493e-01, -3.8369e+00,
         -1.0618e+00, -1.9031e-01,  1.6646e-01, -9.8829e-01, -2.7577e+00,
         -4.5406e+00, -2.6407e+00, -3.0116e+00, -3.6406e+00, -2.3455e+00,
          1.1695e+00, -1.5896e+00,  2.1809e-01, -1.2481e+00, -1.2115e+00,
         -8.4251e-01,  7.0040e-01, -2.4268e+00, -1.3596e+00, -1.8975e+00,
          2.4237e+00, -8.9608e-01,  9.4217e-02,  1.0284e+00,  1.1809e+00,
         -4.9151e-02, -1.0243e+00, -1.6952e+00, -2.7461e+00,  1.9428e+00,
         -7.4414e-01, -1.9029e+00, -2.9699e+00, -7.6947e-01, -2.5906e+00,
         -5.6613e-01, -2.6049e+00,  1.4583e+00, -1.1266e+00, -1.0220e+00,
         -5.3992e+00, -4.3626e+00, -2.4492e+00, -2.5227e+00, -2.3142e+00,
         -3.2847e+00, -3.8041e+00, -3.5325e+00, -3.4042e+00,  1.5192e+01,
          4.9099e+00, -8.4690e-01, -2.2834e+00,  1.2908e+00,  7.0971e-01,
          3.8477e+00,  6.0061e-01,  2.7413e-01, -2.3175e-02,  5.2063e-01,
          5.1506e-02,  5.5243e+00,  2.5524e+00,  2.4408e+00,  1.0983e+01,
          4.1613e+00, -1.9235e+00, -2.6738e+00, -2.7022e+00, -9.1556e-01,
         -3.0842e+00, -2.1025e+00, -3.3759e+00, -2.2175e+00, -2.8203e+00,
          2.5552e+00, -1.7274e+00, -2.1645e+00, -3.1953e+00, -1.2565e+00,
         -3.8676e+00, -2.5297e+00, -9.9279e-01, -2.2395e+00, -1.8654e+00,
         -2.3220e+00, -3.2231e+00, -2.9501e+00, -2.5846e+00, -1.6061e+00,
         -3.2395e+00, -2.5925e+00, -3.1984e+00, -3.0638e+00, -2.8851e+00,
          1.1858e+00,  2.7632e+00, -2.9238e+00, -6.1821e+00,  1.1122e+00,
         -1.1659e+00, -1.9683e-01, -2.6951e+00, -1.0515e+00,  1.7568e-01,
         -9.6223e-01,  9.1245e-02, -1.6772e+00,  2.4359e+00,  9.8311e-01,
         -1.4479e+00,  1.0669e+00, -7.9831e-01, -1.8067e-01, -1.9093e-01,
          1.3752e+00, -2.3148e+00,  2.1029e-01,  5.8132e-01,  1.9322e+00,
          7.8975e-01, -2.9774e+00,  1.9257e+00,  1.3804e+00,  9.1614e-01,
          1.0445e+00,  7.6030e-02,  4.8830e+00, -1.9347e+00,  1.4989e+00,
         -3.0185e-01,  2.5539e+00,  2.5845e+00, -9.5934e-01, -7.6554e-01,
          5.3374e-01,  4.8749e-01,  7.2344e+00,  1.3764e+00,  3.0077e+00,
          1.8600e+00, -1.9697e+00, -3.6190e-01,  1.9939e+00,  1.0079e+00,
          4.8547e-01,  3.3023e-01,  3.6927e-01,  1.7372e+00,  2.7116e+00,
          1.5943e+00,  2.0828e+00,  4.9605e-02, -2.5994e-01,  2.1669e+00,
          5.6205e+00, -1.3834e+00,  6.2366e+00, -2.4156e+00, -2.9600e+00,
         -9.2810e-01,  7.2005e-02, -1.0152e+00,  2.2875e-01, -5.1365e-01,
         -1.4431e+00,  2.3879e+00,  1.1451e+00, -1.3737e-01,  3.2402e+00,
         -7.9574e-02,  4.4241e+00,  1.5413e+00,  5.2474e+00,  2.1874e+00,
          2.9592e-01, -2.8460e+00,  1.0732e+00, -9.1680e-02,  1.8944e-01,
          1.6858e+00,  1.1352e+00, -4.9349e-01, -1.0014e+00, -2.3213e-01,
          9.7594e-01,  7.0146e+00, -9.9239e-02, -9.7224e-03, -4.0847e-02,
          5.7326e-01, -1.4266e+00, -2.3005e+00, -6.9925e-01,  5.3698e-01,
         -2.4511e+00,  7.4426e-01,  1.5724e+00,  2.4352e+00,  2.4334e+00,
          2.6970e+00,  2.0699e+00, -1.5062e+00,  5.8687e-01, -3.7817e-01,
         -2.0911e-01, -1.6059e+00, -2.6941e+00,  1.2617e+00,  2.0815e-01,
          1.3054e+00,  2.2517e+00,  1.2663e+00,  2.3335e+00,  3.8308e+00,
          1.7731e-01,  2.5003e-01, -2.1123e+00, -5.0252e-01,  2.1800e+00,
          1.8855e+00,  8.0081e-01, -1.3612e+00,  1.8138e+00,  7.4010e+00,
          8.3886e+00, -1.5953e+00,  2.6224e+00,  4.0716e+00,  1.4029e+00,
         -2.5506e-01, -5.7980e-01,  1.5447e+00,  2.8967e+00,  4.8671e+00,
         -3.0649e-01,  3.6614e-01, -1.2653e+00, -1.0294e-01,  1.9058e+00,
          1.7223e+00, -6.3794e-01,  6.0246e-01,  1.2477e+00,  2.1230e-01,
         -3.2168e+00,  2.1306e+00, -1.0414e+00,  3.2032e-01, -6.9174e-01,
          1.2688e+00,  2.1939e+00,  1.0272e+00,  4.4735e+00, -1.4281e+00,
          8.0613e-01, -1.4580e+00, -2.2250e+00, -5.1685e-01, -8.5385e-01,
         -1.7215e+00, -1.1968e+00,  1.5952e+00, -4.8082e-01,  7.3523e-01,
          2.2725e+00, -7.5357e-01,  1.8556e+00, -5.4269e-01,  1.3727e+00,
          4.0489e+00, -1.9854e-01,  3.0434e+00, -3.2237e+00, -3.1463e+00,
          9.0915e-01,  6.8912e-01, -1.9772e+00,  1.9118e+00,  3.4416e-02,
          2.8775e+00,  2.5178e+00,  4.1408e+00, -9.4755e-01,  2.1666e-01,
          2.5224e-01, -7.1369e-01, -5.1859e-01, -5.6188e-02, -1.5330e+00,
         -9.4445e-01,  1.8515e+00,  7.5625e-01, -1.1524e+00,  2.6623e+00,
          3.0024e+00, -6.0677e-01,  5.5497e-01,  5.7069e-01,  2.0623e+00,
         -5.6235e-01,  2.3278e+00, -1.7420e+00,  6.1891e-01,  1.0272e+00,
          6.7515e-01,  1.8143e+00,  2.5875e+00,  1.6700e+00,  1.9951e+00,
          3.4296e+00,  4.4214e-01,  3.0352e+00,  6.4473e+00,  2.0444e+00,
          1.7176e-01, -8.8241e-01,  1.3754e+00,  5.5338e-01, -3.5221e-01,
          1.3465e+00, -4.6918e-01, -9.4055e-01, -1.3598e-01, -8.8630e-01,
          3.3203e+00,  2.9750e+00, -2.8820e-01, -1.0495e-01,  7.3694e-01,
         -1.3669e-01, -6.0076e-01, -7.4012e-01, -1.9961e+00, -7.8646e-02,
          3.1549e+00, -1.1565e+00, -2.0196e+00,  1.8609e+00, -1.6194e+00,
          2.2996e-02,  4.3261e+00,  1.1124e+00,  1.3879e+00, -4.4305e-02,
         -4.4671e-02,  1.6297e+00,  7.6040e-01,  7.0351e+00,  4.3387e+00,
         -3.8484e+00,  3.0385e+00,  4.5599e-01,  2.7613e+00, -7.5243e-01,
          6.9392e-01,  5.7654e-01,  1.9915e+00, -1.5713e+00,  7.2345e-01,
          1.4579e+00, -1.8761e+00,  2.3680e+00,  3.3101e+00,  3.2894e-02,
          3.1222e+00, -7.4757e-01, -4.1019e-01, -6.3982e-02,  1.5323e+00,
          1.1861e+00,  7.3263e-02, -3.3710e+00,  5.5556e-01, -1.8705e-01,
          2.1537e+00,  1.5427e+00, -7.6321e-01, -3.1494e+00, -9.9123e-01,
          1.7107e+00,  3.6151e+00,  1.9703e-01, -1.1572e+00, -4.4807e-01,
         -1.3285e+00,  4.8184e+00,  1.1757e+00,  1.9837e+00, -7.0967e-01,
          2.6796e+00,  2.5608e-01,  1.2107e-01, -8.0804e-01, -2.5190e-01,
         -2.3462e+00,  6.4577e-01, -2.9495e+00, -2.4907e+00, -1.5412e+00,
          1.6928e+00,  3.9888e+00,  4.0477e-01,  2.9625e+00, -7.6056e-01,
         -2.6578e-01,  5.5946e-01,  2.2658e+00,  2.7697e+00,  6.0591e-01,
          9.1674e-01,  5.6176e+00,  2.0617e+00,  8.8620e-01,  1.9682e+00,
         -5.9904e-01,  4.2919e-03,  7.9312e-01,  1.6939e+00, -9.3259e-01,
          8.5479e-01,  1.2919e+00,  1.0567e+00, -1.7262e+00, -1.2456e+00,
          9.0132e-01,  2.3885e+00,  2.0494e+00,  2.7574e+00,  2.8313e+00,
          2.6578e+00,  3.3781e-01,  1.1144e-02,  2.6258e+00,  1.4263e+00,
          1.6218e+00, -2.5966e+00,  1.4596e+00,  1.5618e+00,  5.0365e-01,
          3.8589e+00,  2.7506e+00, -4.1463e+00,  3.6608e+00, -1.5613e+00,
          3.5580e+00,  5.4558e-01,  4.8722e-01,  1.3343e+00,  3.8732e-01,
          2.9325e+00,  6.6885e-02, -3.2220e+00,  1.8568e+00,  1.8632e+00,
         -1.2192e+00,  2.3586e+00,  4.1371e+00, -1.1685e+00,  6.0892e-01,
          4.7988e-01,  5.2548e-01,  1.2528e+00,  7.1309e-01, -9.0775e-01,
          8.8670e-01,  5.6468e+00,  4.4055e-01, -9.7613e-01,  3.5916e-01,
          7.2978e-01, -1.1477e+00,  2.4725e-01, -5.3624e-01,  1.1936e+00,
         -1.8164e-01,  4.4913e-01, -1.1512e+00,  1.4731e+00,  2.0021e+00,
          2.3343e+00, -1.4432e+00,  9.8647e-01,  1.4186e+00,  1.3455e+00,
          3.0729e+00,  1.7749e+00,  4.5983e-01,  1.4486e+00, -1.8267e-01,
          3.7667e-01, -1.9178e-02,  7.0736e-01,  1.4271e+00,  9.1005e-01,
          1.1346e-01,  1.1097e-01,  5.8322e+00,  2.0595e-01, -4.7573e-01,
         -3.0609e-01,  4.1385e-01,  1.8722e+00, -2.1164e-02,  1.6529e-01,
          3.2811e+00,  4.6143e-01,  1.2704e+00, -2.6612e+00,  8.6243e-01,
         -1.5959e+00,  4.3128e+00, -2.9303e-01, -1.4848e-01,  2.8642e-01,
          4.5768e+00,  1.4004e+00,  1.4074e+00,  4.8198e+00, -1.4915e+00,
         -2.5037e+00, -9.0462e-01, -1.5153e+00,  1.3249e-01, -2.1861e-01,
         -1.0807e+00, -2.3508e-01,  1.7687e+00,  1.8585e+00,  1.6702e+00,
         -2.0829e+00, -1.9025e+00,  4.2789e+00,  8.3268e-01, -1.5365e+00,
         -6.3364e-01,  2.3217e+00, -2.7621e+00,  1.5995e+00, -2.2486e+00,
          2.5377e+00,  9.2816e-01, -7.3529e-02,  7.7825e-01,  6.4659e-02,
         -7.5901e-01,  3.0159e+00,  2.4554e+00,  5.8307e+00, -1.6824e+00,
         -3.2903e-01,  3.2652e+00,  5.6007e+00,  5.0335e+00, -1.3420e+00,
          6.7206e-01,  2.0732e-02,  4.5274e-01, -2.3044e+00,  3.0012e+00,
          3.9064e-01,  1.2119e+00,  5.3570e-01,  2.4730e-01,  1.2687e+00,
          7.2493e-01,  3.6507e+00, -3.1248e-01,  2.0225e+00, -1.1968e+00,
         -7.9175e-01,  1.7902e-01,  3.0972e+00,  2.1260e-01,  5.8845e-01,
          3.3056e+00,  3.1143e+00,  3.6509e-01,  2.0409e+00,  1.9007e+00,
         -5.7961e-02, -1.1439e+00,  1.7218e+00,  3.7281e-01, -3.8666e+00,
          1.0455e+00,  2.5631e+00,  1.4630e-01, -1.6742e+00,  1.0804e+00,
          3.2906e+00,  8.0825e-02, -1.4049e+00,  1.5020e+00, -6.5573e-01,
          4.5599e-01, -2.0197e+00, -3.4481e+00, -2.2627e+00, -3.7967e-01,
          5.9329e+00, -3.4863e+00,  1.9577e+00, -3.2115e-01, -1.1420e+00,
          2.6128e+00, -8.6592e-01,  1.4363e+00,  4.9180e+00,  3.6195e+00,
          1.8977e+00, -4.2550e-03,  1.4313e+00,  2.3247e+00, -2.6127e-01,
          8.2544e-01, -1.3406e+00,  3.5916e-01,  2.0049e+00, -3.1745e+00,
          5.3281e-01, -3.7463e-01,  3.1904e+00,  3.5361e+00, -7.4821e-01,
          8.3279e-01,  3.4904e+00,  3.6010e+00,  8.3358e-01,  2.5018e+00,
         -4.0404e-01,  2.2977e+00, -2.3045e+00, -9.0535e-01, -1.0839e-01,
         -2.1400e+00, -4.2126e+00,  5.7152e-01,  6.8873e-01,  2.5422e+00,
          1.4085e+00, -1.2299e+00,  2.8714e+00, -3.1171e+00, -4.0288e-01,
         -9.9689e-01, -6.9389e-01, -2.1643e+00,  3.5437e-02, -1.8121e+00,
         -1.9239e+00, -1.3584e+00, -1.4418e+00,  3.2409e-01, -1.7719e+00,
          1.4102e+00, -1.3120e+00,  1.8728e-01,  1.7307e-01, -7.8479e-01,
         -5.8426e-01, -1.0353e+00, -1.3336e+00, -1.5496e+00,  7.7492e-01,
         -2.4285e-01, -2.8524e+00, -8.1228e-01,  2.2731e+00, -2.3090e+00,
          3.3097e+00, -7.5211e-01, -2.8971e+00, -4.5026e+00, -3.4429e+00,
         -1.9211e+00,  1.3943e+00,  5.4338e-01,  2.3462e+00,  8.5777e-01,
          2.8477e+00,  3.5467e+00,  4.3124e+00,  2.3068e+00, -2.3933e-01,
          2.3755e+00,  4.0610e-01,  3.6122e+00,  5.9470e+00, -5.6418e-01,
          1.3033e-01,  3.7408e+00, -2.5032e+00,  3.6211e+00,  2.6381e+00,
         -2.2926e+00, -2.7876e+00,  1.0792e+00, -2.2052e+00, -1.2095e+00,
         -8.0939e-01, -1.6949e+00, -1.5961e+00,  1.2628e+00, -2.6245e+00,
         -7.1069e-01, -7.9061e-01, -3.6457e+00,  9.7368e-01,  2.7518e-01]],
       grad_fn=<AddmmBackward0>)
In [39]:
_, index = torch.max(out, 1)
In [40]:
percentage = torch.nn.functional.softmax(out, dim=1)[0] * 100
labels[index[0]], percentage[index[0]].item()
Out[40]:
('sorrel', 98.04431915283203)
In [41]:
_, indices = torch.sort(out, descending=True)
[(labels[idx], percentage[idx].item()) for idx in indices[0][:5]]
Out[41]:
[('sorrel', 98.04431915283203),
 ('Arabian camel, dromedary, Camelus dromedarius', 1.4567866325378418),
 ('cowboy hat, ten-gallon hat', 0.10884673148393631),
 ('cowboy boot', 0.04054175317287445),
 ('barrel, cask', 0.03432067111134529)]