-
Notifications
You must be signed in to change notification settings - Fork 1
/
expanding_block.py
37 lines (26 loc) · 997 Bytes
/
expanding_block.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 3 11:30:03 2022
@author: ahmedemam576
one of the building blocks to the Gen and Disc
"""
import torch
from torch import nn
class Expanding_Block(nn.Module):
def __init__ (self, input_channels, use_inorm= True, kernel_size =3, activation= 'relu'):
super(Expanding_Block, self).__init__()
self.tconv = nn.ConvTranspose2d(input_channels, input_channels//2, kernel_size, stride=2, padding= 1,output_padding=1)
self.activation = activation
self.use_inorm = use_inorm
if use_inorm:
self.inorm = nn.InstanceNorm2d(input_channels//2)
def forward(self,x):
x= self.tconv(x)
if self.use_inorm:
x= self.inorm(x)
if self.activation == 'relu':
x= nn.functional.relu(x)
elif self.activation == 'leakyrelu':
x= nn.functional.leakyrelu(x,0.2)
return(x)