| import torch.nn as nn |
| from typing import Optional |
| from torch import nn, Tensor |
| import pdb |
| class CategoryValueEncoder(nn.Module): |
| def __init__( |
| self, |
| num_embeddings: int, |
| embedding_dim: int, |
| padding_idx: Optional[int] = None, |
| ): |
| super().__init__() |
| self.embedding = nn.Embedding( |
| num_embeddings, embedding_dim, padding_idx=padding_idx |
| ) |
| self.enc_norm = nn.LayerNorm(embedding_dim) |
|
|
| def forward(self, x: Tensor) -> Tensor: |
| x = x.long() |
| x = self.embedding(x) |
| x = self.enc_norm(x) |
| return x |
| |
| class GeneEncoder(nn.Module): |
| def __init__( |
| self, |
| num_embeddings: int, |
| embedding_dim: int, |
| padding_idx: Optional[int] = None, |
| ): |
| super().__init__() |
| self.embedding = nn.Embedding( |
| num_embeddings, embedding_dim, padding_idx=padding_idx |
| ) |
| self.enc_norm = nn.LayerNorm(embedding_dim) |
|
|
| def forward(self, x: Tensor) -> Tensor: |
| x = self.embedding(x) |
| x = self.enc_norm(x) |
| return x |
|
|
| class PerturbationEmbedding(nn.Module): |
| def __init__(self, num_perturbations, emb_dim, max_comb_len=2, fusion_method='mlp', output_matrix=False): |
| super().__init__() |
| self.embedding = nn.Embedding(num_perturbations, emb_dim) |
| self.fusion_method = fusion_method |
| self.max_comb_len = max_comb_len |
| self.output_matrix = output_matrix |
| self.output_dim = emb_dim if not output_matrix else emb_dim * emb_dim |
|
|
| if fusion_method == 'mlp': |
| self.fusion = nn.Sequential( |
| nn.Linear(emb_dim * max_comb_len, emb_dim * 2), |
| nn.ReLU(), |
| nn.Linear(emb_dim * 2, self.output_dim) |
| ) |
| elif fusion_method == 'sum': |
| self.fusion = None |
| else: |
| raise ValueError(f"Unsupported fusion method: {fusion_method}") |
|
|
| def forward(self, ids): |
| emb = self.embedding(ids) |
| |
| if self.fusion_method == 'mlp': |
| emb = emb.view(emb.size(0), -1) |
| fused = self.fusion(emb) |
|
|
| if self.output_matrix: |
| B = fused.size(0) |
| D = int(self.output_dim ** 0.5) |
| return fused.view(B, D, D) |
| else: |
| return fused |
|
|
| elif self.fusion_method == 'sum': |
| out = emb.sum(dim=1) |
| if self.output_matrix: |
| B = out.size(0) |
| D = out.size(1) |
| return out.view(B, D, 1).expand(B, D, D) |
| return out |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| |
| |
| |
| |
| |
| |