Skip to content

Commit

Permalink
#7743: yolov4 integration
Browse files Browse the repository at this point in the history
  • Loading branch information
keerthana-r-mcw committed May 14, 2024
1 parent 847a1c7 commit 4001928
Show file tree
Hide file tree
Showing 26 changed files with 3,197 additions and 3 deletions.
81 changes: 81 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn


class Mish(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x

class DownSample1(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(3, 32, 3, 1, 1, bias=False)
self.b1 = nn.BatchNorm2d(32)
# self.relu = nn.ReLU(inplace=True)
self.relu = Mish()

self.c2 = nn.Conv2d(32, 64, 3, 2, 1, bias=False)
self.b2 = nn.BatchNorm2d(64)

self.c3 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(64)

self.c4 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(64)

self.c5 = nn.Conv2d(64, 32, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(32)

self.c6 = nn.Conv2d(32, 64, 3, 1, 1, bias=False)
self.b6 = nn.BatchNorm2d(64)

self.c7 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b7 = nn.BatchNorm2d(64)

self.c8 = nn.Conv2d(128, 64, 1, 1, 0, bias=False)
self.b8 = nn.BatchNorm2d(64)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x2_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

x4 = self.c4(x2_m)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)

x6 = self.c6(x5_m)
x6_b = self.b6(x6)
x6_m = self.relu(x6_b)
x6_m = x6_m + x4_m

x7 = self.c7(x6_m)
x7_b = self.b7(x7)
x7_m = self.relu(x7_b)
x7_m = torch.cat([x7_m, x3_m], dim=1)

x8 = self.c8(x7_m)
x8_b = self.b8(x8)
x8_m = self.relu(x8_b)
return x8_m
63 changes: 63 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock

class Mish(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x

class DownSample2(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(64, 128, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(128)
self.relu = Mish()

self.c2 = nn.Conv2d(128, 64, 1, 1, 0, bias=False)
self.b2 = nn.BatchNorm2d(64)

self.c3 = nn.Conv2d(128, 64, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(64)

self.res = ResBlock(ch = 64, nblocks=2)

self.c4 = nn.Conv2d(64, 64, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(64)

self.c5 = nn.Conv2d(128, 128, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(128)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

r1 = self.res(x3_m)

x4 = self.c4(r1)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)
return x5_m
64 changes: 64 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock


class Mish(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x

class DownSample3(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(128, 256, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(256)
self.relu = Mish()

self.c2 = nn.Conv2d(256, 128, 1, 1, 0, bias=False)
self.b2 = nn.BatchNorm2d(128)

self.c3 = nn.Conv2d(256, 128, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(128)

self.res = ResBlock(ch=128, nblocks=8)

self.c4 = nn.Conv2d(128, 128, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(128)

self.c5 = nn.Conv2d(256, 256, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(256)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

r1 = self.res(x3_m)

x4 = self.c4(r1)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)
return x5_m
64 changes: 64 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock

class Mish(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x

class DownSample4(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(256, 512, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(512)
self.relu = Mish()

self.c2 = nn.Conv2d(512, 256, 1, 1, 0, bias=False)
self.b2 = nn.BatchNorm2d(256)

self.c3 = nn.Conv2d(512, 256, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(256)

self.res = ResBlock(ch=256, nblocks=8)

self.c4 = nn.Conv2d(256, 256, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(256)

self.c5 = nn.Conv2d(512, 512, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(512)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

# resblock
r = self.res(x3_m)

x4 = self.c4(r)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)
return x5_m
64 changes: 64 additions & 0 deletions models/experimental/functional_yolov4/reference/downsample5.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# SPDX-FileCopyrightText: © 2023 Tenstorrent Inc.

# SPDX-License-Identifier: Apache-2.0


import torch
import torch.nn as nn
from models.experimental.functional_yolov4.reference.resblock import ResBlock

class Mish(torch.nn.Module):
def __init__(self):
super().__init__()

def forward(self, x):
x = x * (torch.tanh(torch.nn.functional.softplus(x)))
return x

class DownSample5(nn.Module):
def __init__(self):
super().__init__()
self.c1 = nn.Conv2d(512, 1024, 3, 2, 1, bias=False)
self.b1 = nn.BatchNorm2d(1024)
self.relu = Mish()

self.c2 = nn.Conv2d(1024, 512, 1, 1, 0, bias=False)
self.b2 = nn.BatchNorm2d(512)

self.c3 = nn.Conv2d(1024, 512, 1, 1, 0, bias=False)
self.b3 = nn.BatchNorm2d(512)

self.res = ResBlock(512, 4)

self.c4 = nn.Conv2d(512, 512, 1, 1, 0, bias=False)
self.b4 = nn.BatchNorm2d(512)

self.c5 = nn.Conv2d(1024, 1024, 1, 1, 0, bias=False)
self.b5 = nn.BatchNorm2d(1024)

def forward(self, input: torch.Tensor):
x1 = self.c1(input)
x1_b = self.b1(x1)
x1_m = self.relu(x1_b)

x2 = self.c2(x1_m)
x2_b = self.b2(x2)
x2_m = self.relu(x2_b)

x3 = self.c3(x1_m)
x3_b = self.b3(x3)
x3_m = self.relu(x3_b)

# resblock
r = self.res(x3_m)

x4 = self.c4(r)
x4_b = self.b4(x4)
x4_m = self.relu(x4_b)

x4_m = torch.cat([x4_m, x2_m], dim=1)

x5 = self.c5(x4_m)
x5_b = self.b5(x5)
x5_m = self.relu(x5_b)
return x5_m
Loading

0 comments on commit 4001928

Please sign in to comment.