From 7e0c78d9499fc9ce5cc1c0d812f799946b9386e4 Mon Sep 17 00:00:00 2001 From: NameSheep <71328881+liu2048@users.noreply.github.com> Date: Fri, 3 Jan 2025 21:26:28 +0800 Subject: [PATCH] Fix ModuleNotFoundError for hp_ddc Fixes #4 Add the missing `hp_ddc.py` file to resolve the `ModuleNotFoundError`. * Implement the `HPDDC` class in `src/models/clustering_module/hp_ddc.py`. * Import necessary modules. * Define the `__init__` and `forward` methods for the `HPDDC` class. --- src/models/clustering_module/hp_ddc.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 src/models/clustering_module/hp_ddc.py diff --git a/src/models/clustering_module/hp_ddc.py b/src/models/clustering_module/hp_ddc.py new file mode 100644 index 0000000..2e4aa90 --- /dev/null +++ b/src/models/clustering_module/hp_ddc.py @@ -0,0 +1,19 @@ +import torch.nn as nn +from models.clustering_module.kernel_width import get_kernel_width_module + +class HPDDC(nn.Module): + def __init__(self, cfg, input_size): + super().__init__() + + hidden_layers = [nn.Linear(input_size[0], cfg.n_hidden), nn.ReLU()] + if cfg.use_bn: + hidden_layers.append(nn.BatchNorm1d(num_features=cfg.n_hidden)) + self.hidden = nn.Sequential(*hidden_layers) + self.output = nn.Sequential(nn.Linear(cfg.n_hidden, cfg.n_clusters), nn.Softmax(dim=1)) + + self.kernel_width = get_kernel_width_module(cfg.kernel_width_config, input_size=[cfg.n_hidden]) + + def forward(self, x): + hidden = self.hidden(x) + output = self.output(hidden) + return hidden, output