Skip to content

Commit

Permalink
resolves issue #238: remove custom_encoder option
Browse files Browse the repository at this point in the history
  • Loading branch information
Isha Gokhale committed Oct 17, 2023
1 parent 86630e3 commit b7c2ff8
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 19 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),

## [Unreleased]

### Removed
- removed custom_encoder option from config.yaml and in model_runner
### Added

- Checkpoints include model parameters, allowing for mismatches with the provided configuration file.
Expand Down
3 changes: 0 additions & 3 deletions casanovo/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,6 @@ dropout: 0.0
# Number of dimensions to use for encoding peak intensity
# Projected up to ``dim_model`` by default and summed with the peak m/z encoding
dim_intensity:
# Option to provide a pre-trained spectrum encoder when training
# Trained from scratch by default
custom_encoder:
# Max decoded peptide length
max_length: 100
# Number of warmup iterations for learning rate scheduler
Expand Down
22 changes: 7 additions & 15 deletions casanovo/denovo/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,6 @@ class Spec2Pep(pl.LightningModule, ModelMixin):
(``dim_model - dim_intensity``) are reserved for encoding the m/z value.
If ``None``, the intensity will be projected up to ``dim_model`` using a
linear layer, then summed with the m/z encoding for each peak.
custom_encoder : Optional[Union[SpectrumEncoder, PairedSpectrumEncoder]]
A pretrained encoder to use. The ``dim_model`` of the encoder must be
the same as that specified by the ``dim_model`` parameter here.
max_length : int
The maximum peptide length to decode.
residues: Union[Dict[str, float], str]
Expand Down Expand Up @@ -97,7 +94,6 @@ def __init__(
n_layers: int = 9,
dropout: float = 0.0,
dim_intensity: Optional[int] = None,
custom_encoder: Optional[SpectrumEncoder] = None,
max_length: int = 100,
residues: Union[Dict[str, float], str] = "canonical",
max_charge: int = 5,
Expand All @@ -119,17 +115,13 @@ def __init__(
super().__init__()
self.save_hyperparameters()

# Build the model.
if custom_encoder is not None:
self.encoder = custom_encoder
else:
self.encoder = SpectrumEncoder(
dim_model=dim_model,
n_head=n_head,
dim_feedforward=dim_feedforward,
n_layers=n_layers,
dropout=dropout,
dim_intensity=dim_intensity,
self.encoder = SpectrumEncoder(
dim_model=dim_model,
n_head=n_head,
dim_feedforward=dim_feedforward,
n_layers=n_layers,
dropout=dropout,
dim_intensity=dim_intensity,
)
self.decoder = PeptideDecoder(
dim_model=dim_model,
Expand Down
1 change: 0 additions & 1 deletion casanovo/denovo/model_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,6 @@ def initialize_model(self, train: bool) -> None:
n_layers=self.config.n_layers,
dropout=self.config.dropout,
dim_intensity=self.config.dim_intensity,
custom_encoder=self.config.custom_encoder,
max_length=self.config.max_length,
residues=self.config.residues,
max_charge=self.config.max_charge,
Expand Down

0 comments on commit b7c2ff8

Please sign in to comment.