1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
|
class model_DNN(nn.Module): def __init__(self, vocab_num, embedding_dim, max_seq_length, classify_num): super().__init__() self.emb = nn.Embedding(vocab_num, embedding_dim) self.fc = nn.Linear(max_seq_length * embedding_dim, classify_num) return def forward(self, x): x = self.emb(x) x = x.view(x.shape[0], -1) x = self.fc(x) x = F.softmax(x, dim=1) return x
class model_LSTM(nn.Module): def __init__(self, vocab_num, embedding_dim, max_seq_length, classify_num, hidden_size, num_layers): super().__init__() self.emb = nn.Embedding(vocab_num, embedding_dim) self.lstm = nn.LSTM(input_size=max_seq_length, hidden_size=hidden_size, num_layers=num_layers, batch_first=True, bidirectional=True, dropout=0.5) self.fc1 = nn.Linear(hidden_size * 2, hidden_size) self.fc2 = nn.Linear(hidden_size, classify_num) return def forward(self, x): x = self.emb(x) x, (h_n, c_n) = self.lstm(x) x = torch.cat((h_n[-1, :, :], h_n[-2, :, :]), dim=-1) x = F.relu(self.fc1(x)) x = F.softmax(self.fc2(x), dim=1) return x
class IMDB_TextCNN(nn.Module): def __init__(self, vocab_num): super().__init__() self.emb = nn.Embedding(vocab_num, 256) self.convs = nn.ModuleList([nn.Conv2d(1, 256, (k, 256)) for k in (11, 7, 5, 3)]) self.linear = nn.Sequential( nn.Dropout(0.5), nn.Linear(256 * 4, 2), nn.Softmax(dim=-1), ) return def cal_conv_pool(self, x, conv): x = F.relu(conv(x)).squeeze(3) x = F.max_pool1d(x, x.shape[2]).squeeze(2) return x def forward(self, x): x = self.emb(x) x = x.unsqueeze(1) x = torch.cat([self.cal_conv_pool(x, conv) for conv in self.convs], -1) x = self.linear(x) return x
class IMDB_Transformer(nn.Module): def __init__(self, vocab_num): super().__init__() self.emb = nn.Embedding(vocab_num, 256) self.transformer = nn.TransformerEncoder( nn.TransformerEncoderLayer( d_model=256, nhead=8, ), num_layers=1, ) self.fc = nn.Sequential( nn.Flatten(), nn.Linear(256 * 256, 2), nn.Softmax(dim=-1), ) return def forward(self, x): x = self.emb(x) x = self.transformer(x) x = self.fc(x) return x
|