"""Approximation and plot of a 1D parametric function using a neural network. This example compares natural gradient preconditioning on three different parametric functions. """ import matplotlib.pyplot as plt import torch from scimba_torch.approximation_space.nn_space import ( NNxSpace, ) from scimba_torch.domain.meshless_domain.domain_1d import Segment1D from scimba_torch.integration.monte_carlo import DomainSampler, TensorizedSampler from scimba_torch.integration.monte_carlo_parameters import UniformParametricSampler from scimba_torch.neural_nets.coordinates_based_nets.mlp import GenericMLP from scimba_torch.numerical_solvers.collocation_projector import ( NaturalGradientProjector, ) from scimba_torch.plots.plots_nd import plot_abstract_approx_spaces from scimba_torch.utils.scimba_tensors import LabelTensor def func_test(x: LabelTensor, mu: LabelTensor): x1 = x.get_components() mu1 = mu.get_components() return mu1 * torch.sin(x1 * 2 * torch.pi) def func_test_no_params(x: LabelTensor, mu: LabelTensor): x1 = x.get_components() # mu1 = mu.get_components() return torch.sin(x1 * 2 * torch.pi) def func_test_2_params(x: LabelTensor, mu: LabelTensor): x1 = x.get_components() mu1, mu2 = mu.get_components() return mu1 * mu2 * torch.sin(x1 * 2 * torch.pi) domain_x = Segment1D((-1.0, 1.0), is_main_domain=True) sampler = TensorizedSampler( [DomainSampler(domain_x), UniformParametricSampler([(1.0, 2.0)])] ) domain_mu_empty = [] sampler_no_params = TensorizedSampler( [DomainSampler(domain_x), UniformParametricSampler(domain_mu_empty)] ) domain_mu_2 = [(1.0, 2.0), (1.0, 2.0)] sampler_2_params = TensorizedSampler( [DomainSampler(domain_x), UniformParametricSampler(domain_mu_2)] ) space = NNxSpace(1, 1, GenericMLP, domain_x, sampler, layer_sizes=[24] * 3) space_no_params = NNxSpace( 1, 0, GenericMLP, domain_x, sampler_no_params, layer_sizes=[24] * 3 ) space_2_params = NNxSpace( 1, 2, GenericMLP, domain_x, sampler_2_params, layer_sizes=[24] * 3 ) p = NaturalGradientProjector( space, func_test, type_linesearch="logarithmic_grid", data_linesearch={"M": 10, "interval": [0.0, 2.0]}, ) p_no_params = NaturalGradientProjector( space_no_params, func_test_no_params, type_linesearch="logarithmic_grid", data_linesearch={"M": 10, "interval": [0.0, 2.0]}, ) p_2_params = NaturalGradientProjector( space_2_params, func_test_2_params, type_linesearch="logarithmic_grid", data_linesearch={"M": 10, "interval": [0.0, 2.0]}, ) p.solve(epochs=90, n_collocation=2000, verbose=True) p.save("proj1d_p1") p_no_params.solve(epochs=90, n_collocation=2000, verbose=True) p_no_params.save("proj1d_no_params") p_2_params.solve(epochs=90, n_collocation=2000, verbose=True) p_2_params.save("proj1d_2_params") # p.load("proj1d_p1") # p.space.load_from_best_approx() # p_no_params.load("proj1d_no_params") # p_no_params.space.load_from_best_approx() # p_2_params.load("proj1d_2_params") # p_2_params.space.load_from_best_approx() plot_abstract_approx_spaces( p_no_params.space, domain_x, loss=p_no_params.losses, solution=func_test_no_params, error=func_test_no_params, ) plt.show() plot_abstract_approx_spaces( (p_no_params.space, p.space, p_2_params.space), domain_x, ([], [(1.0, 2.0)], domain_mu_2), loss=(p_no_params.losses, p.losses, p_2_params.losses), solution=(func_test_no_params, func_test, func_test_2_params), error=(func_test_no_params, func_test, func_test_2_params), parameters_values=([], [1.0], [1.0, 1.0]), ) plt.show() plot_abstract_approx_spaces( (p_2_params.space,), domain_x, (domain_mu_2,), loss=(p_2_params.losses,), solution=(func_test_2_params,), error=(func_test_2_params,), parameters_values=([1.0, 1.0], [1.5, 1.5]), ) plt.show()