from typing import Any, Dict, List
from aeon.classification.deep_learning import FCNClassifier
import keras
from ml_grid.pipeline.data import pipe
from ml_grid.util.param_space import ParamSpace
[docs]
class FCNClassifier_class:
"""A wrapper for the aeon FCNClassifier time-series classifier."""
def __init__(self, ml_grid_object: pipe):
"""Initializes the FCNClassifier_class.
Args:
ml_grid_object (pipe): The main data pipeline object, which contains
data and global parameters.
"""
random_state_val = ml_grid_object.global_params.random_state_val
verbose_param = ml_grid_object.verbose
param_space = ParamSpace(
ml_grid_object.local_param_dict.get("param_space_size")
)
log_epoch = param_space.param_dict.get("log_epoch")
[docs]
self.algorithm_implementation: FCNClassifier = FCNClassifier()
[docs]
self.method_name: str = "FCNClassifier"
[docs]
self.parameter_space: Dict[str, List[Any]] = {
"n_layers": [3],
"n_filters": [128, 256, 128],
"kernel_size": [8, 5, 3],
"dilation_rate": [1],
"strides": [1],
"padding": ["same"],
"activation": ["relu"],
"use_bias": [True],
"n_epochs": [log_epoch],
"batch_size": [16],
"use_mini_batch_size": [True],
"random_state": [random_state_val],
"verbose": [verbose_param],
"loss": ["categorical_crossentropy"],
"metrics": [None],
"optimizer": [keras.optimizers.Adam(0.01), keras.optimizers.SGD(0.01)],
#'n_jobs':[1] #not a param
#'file_path': ['./'],
#'save_best_model': [False],
#'save_last_model': [False],
#'best_file_name': ['best_model'],
#'last_file_name': ['last_model'],
#'callbacks': [None]
}