text stringlengths 0 93.6k |
|---|
m.weight.data.normal_(0, math.sqrt(2.0 / fan_out)) |
if m.bias is not None: |
m.bias.data.zero_() |
def forward_features(self, x): |
B = x.shape[0] |
outs = [] |
# stage 1 |
x, H, W = self.patch_embed1(x) |
for i, blk in enumerate(self.block1): |
x = blk(x, H, W) |
x = self.norm1(x) |
x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() |
outs.append(x) |
# stage 2 |
x, H, W = self.patch_embed2(x) |
for i, blk in enumerate(self.block2): |
x = blk(x, H, W) |
x = self.norm2(x) |
x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() |
outs.append(x) |
# stage 3 |
x, H, W = self.patch_embed3(x) |
for i, blk in enumerate(self.block3): |
x = blk(x, H, W) |
x = self.norm3(x) |
x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() |
outs.append(x) |
# stage 4 |
x, H, W = self.patch_embed4(x) |
for i, blk in enumerate(self.block4): |
x = blk(x, H, W) |
x = self.norm4(x) |
x = x.reshape(B, H, W, -1).permute(0, 3, 1, 2).contiguous() |
outs.append(x) |
return x,outs[1:] |
def forward(self, x): |
x,outs = self.forward_features(x) |
return x,outs |
class DWConv(nn.Module): |
def __init__(self, dim=768): |
super(DWConv, self).__init__() |
self.dwconv = nn.Conv2d(dim, dim, 3, 1, 1, bias=True, groups=dim) |
def forward(self, x, H, W): |
B, N, C = x.shape |
x = x.transpose(1, 2).view(B, C, H, W) |
x = self.dwconv(x) |
x = x.flatten(2).transpose(1, 2) |
return x |
import torch |
import torch.nn as nn |
import torch.nn.functional as F |
class UpsampleConcatConv(nn.Module): |
def __init__(self): |
super(UpsampleConcatConv, self).__init__() |
self.upsamples2 = nn.ConvTranspose2d(128, 64, kernel_size=4, stride=2, padding=1) |
self.upsamplec3 = nn.Sequential( |
nn.ConvTranspose2d(384, 192, kernel_size=4, stride=2, padding=1), |
nn.ConvTranspose2d(192, 96, kernel_size=4, stride=2, padding=1) |
) |
self.upsamples3 = nn.Sequential( |
nn.ConvTranspose2d(320, 128, kernel_size=4, stride=2, padding=1), |
nn.ConvTranspose2d(128, 64, kernel_size=4, stride=2, padding=1) |
) |
self.upsamples4 = nn.Sequential( |
nn.ConvTranspose2d(512, 320, kernel_size=4, stride=2, padding=1), |
nn.ConvTranspose2d(320, 128, kernel_size=4, stride=2, padding=1), |
nn.ConvTranspose2d(128, 64, kernel_size=4, stride=2, padding=1) |
) |
def forward(self, inputs): |
# 上采样 |
c1,c3,s2,s3,s4 = inputs |
# c2 = self.upsamplec2(c2) |
c3 = self.upsamplec3(c3) |
# c4 = self.upsamplec4(c4) |
s2 = self.upsamples2(s2) |
s3 = self.upsamples3(s3) |
s4 = self.upsamples4(s4) |
# 拼接四个tensor |
x = torch.cat([c1,c3,s2,s3,s4], dim=1) |
features = [c1,c3,s2,s3,s4 ] |
# shortcut = x |
# x = x.permute(0, 2, 3, 1) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.