-
Notifications
You must be signed in to change notification settings - Fork 86
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
begin work on fine-tuning from base enformer
- Loading branch information
1 parent
2a706e6
commit 41f107c
Showing
3 changed files
with
93 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
import torch | ||
from contextlib import contextmanager | ||
from torch import nn, einsum | ||
from einops import rearrange | ||
from enformer_pytorch.enformer_pytorch import Enformer, poisson_loss | ||
|
||
def exists(val): | ||
return val is not None | ||
|
||
@contextmanager | ||
def null_context(): | ||
yield | ||
|
||
class ContextAdapterWrapper(nn.Module): | ||
def __init__( | ||
self, | ||
*, | ||
enformer, | ||
enformer_dim, | ||
context_dim | ||
): | ||
super().__init__() | ||
assert isinstance(enformer, Enformer) | ||
self.enformer = enformer | ||
|
||
self.to_context_weights = nn.Parameter(torch.randn(context_dim, enformer_dim * 2)) | ||
self.to_context_bias = nn.Parameter(torch.randn(context_dim)) | ||
|
||
def forward( | ||
self, | ||
seq, | ||
*, | ||
context, | ||
target = None, | ||
freeze_enformer = False | ||
): | ||
enformer_context = null_context if freeze_enformer else torch.no_grad | ||
|
||
with enformer_context(): | ||
_, embeddings = self.enformer(seq, return_embeddings = True) | ||
|
||
if freeze_enformer: | ||
embeddings.detach_() | ||
|
||
weights = einsum('t d, d e -> t e', context, self.to_context_weights) | ||
bias = einsum('t d, d -> t', context, self.to_context_bias) | ||
|
||
pred = einsum('b n d, t d -> b n t', embeddings, weights) + bias | ||
|
||
if not exists(target): | ||
return pred | ||
|
||
return poisson_loss(pred, target) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters