Skip to content

lightning

LightningLearner(module, num_workers=None, batch_size=32, max_epochs=100)

Bases: SupervisedLearner

A learner that uses PyTorch Lightning.

Initialize the learner.

Parameters:

Name Type Description Default
module LightningModule

The PyTorch Lightning module.

required
num_workers int | None

The number of workers to use for the DataLoader.

None
batch_size int

The batch size to use for training.

32
max_epochs int

The maximum number of epochs to train for.

100
Source code in src/flowcean/learners/lightning.py
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
def __init__(
    self,
    module: lightning.LightningModule,
    num_workers: int | None = None,
    batch_size: int = 32,
    max_epochs: int = 100,
) -> None:
    """Initialize the learner.

    Args:
        module: The PyTorch Lightning module.
        num_workers: The number of workers to use for the DataLoader.
        batch_size: The batch size to use for training.
        max_epochs: The maximum number of epochs to train for.
    """
    self.module = module
    self.num_workers = num_workers or os.cpu_count() or 0
    self.max_epochs = max_epochs
    self.batch_size = batch_size
    self.optimizer = None

MultilayerPerceptron(learning_rate, input_size, output_size, hidden_dimensions=None)

Bases: LightningModule

A multilayer perceptron.

Initialize the model.

Parameters:

Name Type Description Default
learning_rate float

The learning rate.

required
input_size int

The size of the input.

required
output_size int

The size of the output.

required
hidden_dimensions list[int] | None

The dimensions of the hidden layers.

None
Source code in src/flowcean/learners/lightning.py
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
def __init__(
    self,
    learning_rate: float,
    input_size: int,
    output_size: int,
    hidden_dimensions: list[int] | None = None,
) -> None:
    """Initialize the model.

    Args:
        learning_rate: The learning rate.
        input_size: The size of the input.
        output_size: The size of the output.
        hidden_dimensions: The dimensions of the hidden layers.
    """
    super().__init__()
    if hidden_dimensions is None:
        hidden_dimensions = []
    self.save_hyperparameters()
    self.learning_rate = learning_rate

    layers: list[Module] = []
    hidden_size = input_size
    for dimension in hidden_dimensions:
        layers.extend(
            (
                torch.nn.Linear(hidden_size, dimension),
                torch.nn.LeakyReLU(),
            ),
        )
        hidden_size = dimension
    layers.append(torch.nn.Linear(hidden_size, output_size))
    self.model = torch.nn.Sequential(*layers)