r"""Solves a 3D Poisson PDE with Dirichlet boundary conditions using PINNs. .. math:: -\delta u & = f in \Omega where :math:`x = (x_1, x_2, x_3) \in \Omega = (0, 1) \times (0, 1) \times (0, 1)` and :math:`f` such that :math:`u(x_1, x_2, x_3) = 3 \pi^2 \sin(\pi x_1) \sin(\pi x_2) \sin(\pi x_3)`, and :math:`g = 0`. The boundary conditions are homogeneous Dirichlet conditions enforced strongly. The neural network is a simple MLP (Multilayer Perceptron). The optimization is done using either Natural Gradient Descent or Sketchy Natural Gradient Descent, with boundary conditions handled strongly. """ # %% import torch from scimba_torch.approximation_space.nn_space import NNxSpace from scimba_torch.domain.meshless_domain.domain_3d import Cube3D from scimba_torch.integration.monte_carlo import DomainSampler, TensorizedSampler from scimba_torch.integration.monte_carlo_parameters import UniformParametricSampler from scimba_torch.neural_nets.coordinates_based_nets.mlp import GenericMLP from scimba_torch.numerical_solvers.elliptic_pde.pinns import ( NaturalGradientPinnsElliptic, NystromNaturalGradientPinnsElliptic, SketchyNaturalGradientPinnsElliptic, ) from scimba_torch.physical_models.elliptic_pde.laplacians import ( Laplacian2DDirichletStrongForm, ) from scimba_torch.utils.scimba_tensors import LabelTensor def f_rhs(x: LabelTensor, mu: LabelTensor): x1, x2, x3 = x.get_components() return ( 3 * torch.pi**2 * torch.sin(torch.pi * x1) * torch.sin(torch.pi * x2) * torch.sin(torch.pi * x3) ) def f_bc(x: LabelTensor, mu: LabelTensor): x1, _ = x.get_components() return x1 * 0.0 def exact_sol(x: LabelTensor, mu: LabelTensor): x1, x2, x3 = x.get_components() return ( torch.sin(torch.pi * x1) * torch.sin(torch.pi * x2) * torch.sin(torch.pi * x3) ) class Laplacian3DDirichletStrongFormNoParam(Laplacian2DDirichletStrongForm): def operator(self, w, x, mu): u = w.get_components() u_x, u_y, u_z = self.grad(u, x) u_xx, _, _ = self.grad(u_x, x) _, u_yy, _ = self.grad(u_y, x) _, _, u_zz = self.grad(u_z, x) return -(u_xx + u_yy + u_zz) def functional_operator(self, func, x, mu, theta): grad_u = torch.func.jacrev(func, 0) hessian_u = torch.func.jacrev(grad_u, 0, chunk_size=None)(x, mu, theta) return -(hessian_u[..., 0, 0] + hessian_u[..., 1, 1] + hessian_u[..., 2, 2]) domain_x = Cube3D([(-1, 1), (-1, 1), (-1, 1)], is_main_domain=True) sampler = TensorizedSampler([DomainSampler(domain_x), UniformParametricSampler([])]) # %% def post_processing(inputs: torch.Tensor, x: LabelTensor, mu: LabelTensor): x1, x2, x3 = x.get_components() return inputs * (x1 + 1) * (1 - x1) * (x2 + 1) * (1 - x2) * (x3 + 1) * (1 - x3) def functional_post_processing(u, xy, mu, theta): return ( u(xy, mu, theta) * (xy[0] + 1) * (1 - xy[0]) * (xy[1] + 1) * (1 - xy[1]) * (xy[2] + 1) * (1 - xy[2]) ) print("@@@@@@@@@@@@@@@ ENG @@@@@@@@@@@@@@@@@@@") torch.manual_seed(3) space = NNxSpace( 1, 0, GenericMLP, domain_x, sampler, layer_sizes=[32, 32], post_processing=post_processing, ) pde = Laplacian3DDirichletStrongFormNoParam(space, f=f_rhs, g=f_bc) pinns = NaturalGradientPinnsElliptic( pde, bc_type="strong", functional_post_processing=functional_post_processing ) solving_mode = "load" # can be "new" for solve from scratch, # "resume" for load from file and continue solving, # or "load" for load from file load_post_fix = "pinns_ENG" # the post_fix of the file where to load the pinn save_post_fix = "pinns_ENG" # the post_fix of the file where to save the pinn new_solving = solving_mode == "new" resume_solving = solving_mode == "resume" try_load = resume_solving or (not new_solving) load_ok = try_load and pinns.load(__file__, load_post_fix) solve = (not load_ok) or (resume_solving or new_solving) if solve: pinns.solve(epochs=100, n_collocation=1000, verbose=False) pinns.save(__file__, save_post_fix) print(f"Training time: {pinns.training_time:.2f} seconds") print(f"Best Loss : {pinns.best_loss:.2e}") print("@@@@@@@@@@@@@@@ SNG @@@@@@@@@@@@@@@@@@@") torch.manual_seed(3) space2 = NNxSpace( 1, 0, GenericMLP, domain_x, sampler, layer_sizes=[32, 32], post_processing=post_processing, ) pde2 = Laplacian3DDirichletStrongFormNoParam(space2, f=f_rhs, g=f_bc) pinns2 = SketchyNaturalGradientPinnsElliptic( pde2, bc_type="strong", functional_post_processing=functional_post_processing, tol=1e-10, ) solving_mode = "load" # can be "new" for solve from scratch, # "resume" for load from file and continue solving, # or "load" for load from file load_post_fix = "pinns_SNG" # the post_fix of the file where to load the pinn save_post_fix = "pinns_SNG" # the post_fix of the file where to save the pinn new_solving = solving_mode == "new" resume_solving = solving_mode == "resume" try_load = resume_solving or (not new_solving) load_ok = try_load and pinns2.load(__file__, load_post_fix) solve = (not load_ok) or (resume_solving or new_solving) if solve: pinns2.solve(epochs=100, n_collocation=1000, verbose=False) pinns2.save(__file__, save_post_fix) print(f"Training time: {pinns2.training_time:.2f} seconds") print(f"Best Loss : {pinns2.best_loss:.2e}") print("@@@@@@@@@@@@@@@ NNG @@@@@@@@@@@@@@@@@@@") torch.manual_seed(3) space3 = NNxSpace( 1, 0, GenericMLP, domain_x, sampler, layer_sizes=[32, 32], post_processing=post_processing, ) pde3 = Laplacian3DDirichletStrongFormNoParam(space3, f=f_rhs, g=f_bc) pinns3 = NystromNaturalGradientPinnsElliptic( pde3, bc_type="strong", functional_post_processing=functional_post_processing, # debug=True ) solving_mode = "new" # can be "new" for solve from scratch, # "resume" for load from file and continue solving, # or "load" for load from file load_post_fix = "pinns_NNG" # the post_fix of the file where to load the pinn save_post_fix = "pinns_NNG" # the post_fix of the file where to save the pinn new_solving = solving_mode == "new" resume_solving = solving_mode == "resume" try_load = resume_solving or (not new_solving) load_ok = try_load and pinns3.load(__file__, load_post_fix) solve = (not load_ok) or (resume_solving or new_solving) if solve: pinns3.solve(epochs=100, n_collocation=1000, verbose=False) pinns3.save(__file__, save_post_fix) print(f"Training time: {pinns3.training_time:.2f} seconds") print(f"Best Loss : {pinns3.best_loss:.2e}") # plot_abstract_approx_spaces( # ( # pinns.space, # pinns2.space, # ), # domain_x, # loss=( # pinns.losses, # pinns2.losses, # ), # residual=( # pinns.pde, # pinns2.pde, # ), # error=exact_sol, # draw_contours=True, # n_drawn_contours=20, # ) # # plt.show()