Skip to content

Autoencoder

LitAutoEncoder

Bases: LightningModule

A simple autoencoder model.

Parameters:

Name Type Description Default
encoder Sequential

The encoder component, responsible for encoding input data.

required
decoder Sequential

The decoder component, responsible for decoding encoded data.

required
Source code in packages/lit-auto-encoder/src/lit_auto_encoder/auto_encoder.py
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
class LitAutoEncoder(L.LightningModule):
    """A simple autoencoder model.

    Args:
        encoder: The encoder component, responsible for encoding input data.
        decoder: The decoder component, responsible for decoding encoded data.
    """

    def __init__(self, encoder: nn.Sequential, decoder: nn.Sequential) -> None:
        super().__init__()
        self.encoder = encoder
        self.decoder = decoder

    def training_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> Tensor:
        """Performs a single training step for the model.

        Args:
            batch (Tuple[Tensor, Tensor]): A tuple containing the input data (x) and
                the corresponding labels (y).
            batch_idx (int): The index of the current batch.

        Returns:
            Tensor: The computed loss for the current training step.
        """

        x, y = batch
        x = x.view(x.size(0), -1)
        z = self.encoder(x)
        x_hat = self.decoder(z)
        loss = nn.functional.mse_loss(x_hat, x)
        # Logging to TensorBoard (if installed) by default
        # self.log("train_loss", loss)
        return loss

    def configure_optimizers(self) -> optim.Adam:
        """Configure the Adam optimizer."""
        optimizer = optim.Adam(self.parameters(), lr=1e-3)
        return optimizer

configure_optimizers()

Configure the Adam optimizer.

Source code in packages/lit-auto-encoder/src/lit_auto_encoder/auto_encoder.py
42
43
44
45
def configure_optimizers(self) -> optim.Adam:
    """Configure the Adam optimizer."""
    optimizer = optim.Adam(self.parameters(), lr=1e-3)
    return optimizer

training_step(batch, batch_idx)

Performs a single training step for the model.

Parameters:

Name Type Description Default
batch Tuple[Tensor, Tensor]

A tuple containing the input data (x) and the corresponding labels (y).

required
batch_idx int

The index of the current batch.

required

Returns:

Name Type Description
Tensor Tensor

The computed loss for the current training step.

Source code in packages/lit-auto-encoder/src/lit_auto_encoder/auto_encoder.py
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
def training_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> Tensor:
    """Performs a single training step for the model.

    Args:
        batch (Tuple[Tensor, Tensor]): A tuple containing the input data (x) and
            the corresponding labels (y).
        batch_idx (int): The index of the current batch.

    Returns:
        Tensor: The computed loss for the current training step.
    """

    x, y = batch
    x = x.view(x.size(0), -1)
    z = self.encoder(x)
    x_hat = self.decoder(z)
    loss = nn.functional.mse_loss(x_hat, x)
    # Logging to TensorBoard (if installed) by default
    # self.log("train_loss", loss)
    return loss