git init testdir
cd testdir/
git config core.sparsecheckout true
#echo 'model_zoo/official/cv/mobilenetv2' >> .git/info/sparse-checkout
echo 'model_zoo/official/cv/mobilenetv2' >> .git/info/sparse-checkout
git remote add -f origin git@gitee.com:mindspore/mindspore.git
#git remote add -f origin https://github.com/mindspore-ai/mindspore.git
git pull origin master
mv model_zoo/official/cv/mobilenetv2 ../mobilenetv2/
rm ../testdir -rf
https://s3.cn-north-1.amazonaws.com.cn/files.datafountain.cn/uploads/images/competitions/557/%E8%B5%9B%E9%A2%98%E6%95%B0%E6%8D%AE%EF%BC%88data%29-430493.ziphttps://s3.cn-north-1.amazonaws.com.cn/files.datafountain.cn/uploads/images/competitions/557/%E5%BB%BA%E6%A8%A1%E6%A0%B7%E4%BE%8B%EF%BC%88pytorch_reference_model%EF%BC%89-577358.ziphttps://s3.cn-north-1.amazonaws.com.cn/files.datafountain.cn/uploads/images/competitions/557/%E8%B5%9B%E9%A2%98%E4%BB%A3%E7%A0%81%EF%BC%88Pytorch_evaluation_inference%EF%BC%89-532530.zip
class DownBlock(nn.Module):
def __init__(self, in_features, out_features):
super(DownBlock, self).__init__()
self.down = nn.Sequential(
nn.Conv2d(in_features, out_features, 3, 2, padding=1),
nn.LeakyReLU(),
nn.Dropout(p=0.25)
)
def forward(self, x):
return self.down(x)
class Discriminator(nn.Module):
def __init__(self, num_blocks=4):
super(Discriminator, self).__init__()
zero_padding = nn.Sequential(
nn.ZeroPad2d((32, 32, 0, 0)),
)
down_blocks = []
for i in range(num_blocks):
down_blocks.append(
DownBlock(1 if i == 0 else i * 8, (i + 1) * 8))
linear_block = nn.Sequential(
nn.Linear(64, 1),
)
self.zero_padding = zero_padding
self.conv_block = nn.ModuleList(down_blocks)
self.linear_block = linear_block
def forward(self, input):
output = self.zero_padding(input)
output = self.conv_block(output)
output = output.view(-1, 64)
output = self.linear_block(output)
return output
class UpSamplingBlock(nn.Module):
def __init__(self, in_features, out_features, v):
super(UpSamplingBlock, self).__init__()
self.cov = nn.Conv2d(in_features, out_features, 3, 1, padding=1)
self.bn = nn.BatchNorm1d(out_features)
self.v = v
self.act = nn.LeakyReLU(negative_slope=0.2, inplace=True) if self.v == 0 else nn.Tanh()
def forward(self, x):
x = F.interpolate(x, scale_factor=2)
x = self.cov(x)
x = self.bn(x)
x = self.act(x)
return x
class Generator(nn.Module):
def __init__(self, num_blocks=4):
super(Generator, self).__init__()
linear_block = nn.Sequential(
nn.Linear(128, 8192),
nn.BatchNorm1d(8192),
nn.LeakyReLU(),
)
up_blocks = []
m = [1024, 1024, 512, 256, 128, 2]
v = [0, 0, 0, 0, 1]
for i in range(num_blocks):
up_blocks.append(
UpSamplingBlock(m[i], m[i+1], v[i]))
self.linear_block = linear_block
self.conv_block = nn.ModuleList(up_blocks)
def forward(self, input):
output = self.linear_block(input)
output = output.view(-1, 64, 4, 2)
for up in self.conv_block:
output = up(output)
output = output[:, :, self.crop_size:-self.crop_size]
return output