import os
import numpy as np
import netron
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
class NaiveBlock(nn.Module):
def __init__(self, inp_ch: int = 256, out_ch: tuple = (128, 192, 96)) -> None:
super().__init__()
self.cov1x1 = nn.Conv2d(in_channels=inp_ch, out_channels=out_ch[0], kernel_size=1)
self.cov3x3 = nn.Conv2d(in_channels=inp_ch, out_channels=out_ch[1], kernel_size=3, padding=1)
self.cov5x5 = nn.Conv2d(in_channels=inp_ch, out_channels=out_ch[2], kernel_size=5, padding=2)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=(1, 1), padding=1)
def forward(self, x: torch.Tensor) -> torch.Tensor:
branch1 = F.relu(self.cov1x1(x))
branch2 = F.relu(self.cov3x3(x))
branch3 = F.relu(self.cov5x5(x))
branch4 = self.maxpool(x)
out = torch.cat((branch1, branch2, branch3, branch4), 1)
print(f'Total feature maps: {out.shape[1]} of size: {out.shape[2:]}')
return out