Skip to content

Commit

Permalink
#7743: Integrate yolov4 modules
Browse files Browse the repository at this point in the history
  • Loading branch information
keerthana-r-mcw committed Apr 30, 2024
1 parent 212cdfc commit 2125a68
Show file tree
Hide file tree
Showing 26 changed files with 2,986 additions and 0 deletions.
73 changes: 73 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn


class DownSample1(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(3, 32, 3, 1, 1, bias=False)
self.b1 = nn.BatchNorm2d(32)
self.relu = nn.ReLU(inplace=True)

self.c2 = nn.Conv2d(32, 64, 3, 2, 1, bias=False)
self.b2 = nn.BatchNorm2d(64)

self.c3 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(64)

self.c4 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(64)

self.c5 = nn.Conv2d(64, 32, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(32)

self.c6 = nn.Conv2d(32, 64, 3, 1, 1, bias=False)
self.b6 = nn.BatchNorm2d(64)

self.c7 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b7 = nn.BatchNorm2d(64)

self.c8 = nn.Conv2d(128, 64, 1, 1, 0, bias=False)
self.b8 = nn.BatchNorm2d(64)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x2_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

x4 = self.c4(x2_m)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)

x6 = self.c6(x5_m)
x6_b = self.b6(x6)
x6_m = self.relu(x6_b)
x6_m = x6_m + x4_m

x7 = self.c7(x6_m)
x7_b = self.b7(x7)
x7_m = self.relu(x7_b)
x7_m = torch.cat([x7_m, x3_m], dim=1)

x8 = self.c8(x7_m)
x8_b = self.b8(x8)
x8_m = self.relu(x8_b)

return x8_m
57 changes: 57 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock


class DownSample2(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(64, 128, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(128)
self.relu = nn.ReLU(inplace=True)

self.c2 = nn.Conv2d(128, 64, 1, 1, bias=False)
self.b2 = nn.BatchNorm2d(64)

self.c3 = nn.Conv2d(128, 64, 1, 1, bias=False)
self.b3 = nn.BatchNorm2d(64)

self.res = ResBlock(64, 2)

self.c4 = nn.Conv2d(64, 64, 1, 1, bias=False)
self.b4 = nn.BatchNorm2d(64)

self.c5 = nn.Conv2d(128, 128, 1, 1, bias=False)
self.b5 = nn.BatchNorm2d(128)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

r1 = self.res(x3_m)

x4 = self.c4(r1)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)

return x5_m
56 changes: 56 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock


class DownSample3(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(128, 256, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(256)
self.relu = nn.ReLU(inplace=True)

self.c2 = nn.Conv2d(256, 128, 1, 1, bias=False)
self.b2 = nn.BatchNorm2d(128)

self.c3 = nn.Conv2d(256, 128, 1, 1, bias=False)
self.b3 = nn.BatchNorm2d(128)

self.res = ResBlock(128, 8)

self.c4 = nn.Conv2d(128, 128, 1, 1, bias=False)
self.b4 = nn.BatchNorm2d(128)

self.c5 = nn.Conv2d(256, 256, 1, 1, bias=False)
self.b5 = nn.BatchNorm2d(256)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

r1 = self.res(x3_m)

x4 = self.c4(r1)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)
return x5_m
58 changes: 58 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock


class DownSample4(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(256, 512, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(512)
self.relu = nn.ReLU(inplace=True)

self.c2 = nn.Conv2d(512, 256, 1, 1, 0, bias=False)
self.b2 = nn.BatchNorm2d(256)

self.c3 = nn.Conv2d(512, 256, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(256)

self.res = ResBlock(256, 8)

self.c4 = nn.Conv2d(256, 256, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(256)

self.c5 = nn.Conv2d(512, 512, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(512)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

# resblock
r = self.res(x3_m)

x4 = self.c4(r)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)

return x5_m
60 changes: 60 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock


class DownSample5(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(512, 1024, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(1024)
self.relu = nn.ReLU(inplace=True)

self.c2 = nn.Conv2d(1024, 512, 1, 1, bias=False)
self.b2 = nn.BatchNorm2d(512)

self.c3 = nn.Conv2d(1024, 512, 1, 1, bias=False)
self.b3 = nn.BatchNorm2d(512)

self.res = ResBlock(512, 4)

self.c4 = nn.Conv2d(512, 512, 1, 1, bias=False)
self.b4 = nn.BatchNorm2d(512)
self.relu = nn.ReLU(inplace=True)

self.c5 = nn.Conv2d(1024, 1024, 1, 1, bias=False)
self.b5 = nn.BatchNorm2d(1024)
self.relu = nn.ReLU(inplace=True)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

# resblock
r = self.res(x3_m)

x4 = self.c4(r)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)

return x5_m
Loading

0 comments on commit 2125a68

Please sign in to comment.