base

class thefittest.base.EphemeralNode(generator: Callable)
class thefittest.base.FunctionalNode(value: Any, sign: str | None = None)
class thefittest.base.Net(inputs: Set | None = None, hidden_layers: List | None = None, outputs: Set | None = None, connects: ndarray[tuple[Any, ...], dtype[int64]] | None = None, weights: Tensor | None = None, activs: Dict[int, int] | None = None)
compile_torch(device: 'torch.device' | str | None = None) Net
copy() Net
cpu() Net
cuda() Net
ensure_compiled() None
forward(X: torch.Tensor, weights: 'torch.Tensor' | None = None, keep_weight_dim: bool = False, autocast_input: bool = True) torch.Tensor
get_graph() Dict
plot(ax=None) None
signature() str
to(device: str | 'torch.device', dtype: 'torch.dtype' | None = None) Net
class thefittest.base.TerminalNode(value: Any, name: str)
class thefittest.base.Tree(nodes: List[FunctionalNode | TerminalNode | EphemeralNode], n_args: List[int] | ndarray[tuple[Any, ...], dtype[int64]] | None = None)
concat(index: int, other_tree: Tree) Tree
copy() Tree
classmethod full_growing_method(uniset: UniversalSet, max_level: int) Tree
get_args_id(index: int) ndarray[tuple[Any, ...], dtype[int64]]
get_common_region(other_trees: List | ndarray[tuple[Any, ...], dtype[_ScalarT]]) Tuple
get_graph(keep_id: bool = False) Dict
get_levels(index: int) ndarray[tuple[Any, ...], dtype[int64]]
get_max_level() int64
classmethod growing_method(uniset: UniversalSet, max_level: int) Tree
plot(ax: Any = None) None
classmethod random_tree(uniset: UniversalSet, max_level: int) Tree
set_terminals(**kwargs: Any) Tree
signature() str
subtree(index: int) Tree
subtree_id(index: int) Tuple[int, int]
class thefittest.base.UniversalSet(functional_set: Tuple[FunctionalNode, ...], terminal_set: Tuple[TerminalNode | EphemeralNode, ...])
thefittest.base.init_symbolic_regression_uniset(X: ndarray[tuple[Any, ...], dtype[float64]], functional_set_names: Tuple[str, ...] = ('cos', 'sin', 'add', 'sub', 'mul', 'div'), ephemeral_node_generators: Tuple[Callable, ...] | None = None)