r"""Solves a 1D advection equation with a time-dependent velocity. ..math:: \partial_t u(x, t) + a(t) \partial_x u(x, t) = 0 The initial condition is a sine function, and periodic boundary conditions are applied. The equation is solved using the neural semi-Lagrangian scheme and a discrete PINN. """ # %% import matplotlib.pyplot as plt import torch from scimba_torch.approximation_space.nn_space import NNxSpace from scimba_torch.domain.mesh_based_domain.cuboid import Cuboid from scimba_torch.domain.meshless_domain.domain_1d import Segment1D from scimba_torch.integration.mesh_based_quadrature import RectangleMethod from scimba_torch.integration.monte_carlo import DomainSampler, TensorizedSampler from scimba_torch.integration.monte_carlo_parameters import UniformParametricSampler from scimba_torch.neural_nets.coordinates_based_nets.features import PeriodicMLP from scimba_torch.neural_nets.coordinates_based_nets.mlp import GenericMLP from scimba_torch.numerical_solvers.temporal_pde.discrete_pinn import DiscretePINN from scimba_torch.numerical_solvers.temporal_pde.neural_semilagrangian import ( Characteristic, NeuralSemiLagrangian, ) from scimba_torch.numerical_solvers.temporal_pde.time_discrete import ( TimeDiscreteCollocationProjector, TimeDiscreteNaturalGradientProjector, ) from scimba_torch.physical_models.temporal_pde.advection_diffusion_equation import ( AdvectionReactionDiffusionDirichletStrongForm, ) from scimba_torch.physical_models.temporal_pde.transport_equation import Transport1D from scimba_torch.plots.plot_time_discrete_scheme import plot_time_discrete_scheme from scimba_torch.utils.scimba_tensors import LabelTensor def f_rhs(w, t, x, mu): x_ = x.get_components() return 0 * x_ def f_bc(w, t, x, n, mu): x_ = x.get_components() return 0 * x_ def f_ini(x, mu): return exact(LabelTensor(torch.zeros_like(x.x)), x, mu) def exact(t, x, mu): x_ = x.get_components() t_ = t.get_components() x_ = (x_ - (t_ + 1) * torch.log(t_ + 1)) % 1 return torch.sin(2 * torch.pi * x_) def exact_foot( t: torch.Tensor, x: LabelTensor, mu: LabelTensor, dt: float ) -> torch.Tensor: x_ = x.get_components() x_n = (1 + t) * torch.log(1 + t) x_np1 = (1 + t + dt) * torch.log(1 + t + dt) return x_ + x_n - x_np1 def a(t, x, mu): return 1 + torch.log(1 + t) # %% def solve_with_discrete_pinn(T: float, dt: float, scheme: str): torch.random.manual_seed(0) domain_x = Segment1D((0, 1), is_main_domain=True) domain_mu = [] sampler = TensorizedSampler( [ DomainSampler(domain_x), UniformParametricSampler(domain_mu), ] ) space = NNxSpace(1, 0, PeriodicMLP, domain_x, sampler, layer_sizes=[40, 40, 40]) pde = Transport1D(space, init=f_ini, f=f_rhs, g=f_bc) projector = TimeDiscreteCollocationProjector(pde, rhs=f_ini) scheme = DiscretePINN(pde, projector, scheme=scheme) scheme.initialization(epochs=1500, verbose=True, n_collocation=3000) scheme.projector.save("ini_transport") scheme.projector = TimeDiscreteCollocationProjector(pde, rhs=f_ini) scheme.projector.load("ini_transport") scheme.projector.space.load_from_best_approx() scheme.solve(dt=dt, T=T, epochs=1000, n_collocation=3000, verbose=False) # print(">>>> Begin natural gradient >>>>>") # space2 = NNxSpace(1, 0, PeriodicMLP, domain_x, sampler, layer_sizes=[16, 8]) # pde2 = Transport1D(space2, init=f_ini, f=f_rhs, g=f_bc) # projector2 = NaturalGradientProjector(space2, rhs=f_ini) # scheme2 = DiscretePINN(pde2, projector2, scheme="rk4") # scheme2.initialization(epochs=500, verbose=True, n_collocation=3000) # scheme2.solve(dt=dt, T=T, epochs=15, n_collocation=3000, verbose=False) return scheme # scheme2 # %% def solve_with_neural_sl( T: float, dt: float, with_natural_gradient: bool = True, with_classical_projector: bool = False, use_rectangle_method: bool = False, ): torch.random.manual_seed(0) if use_rectangle_method: domain_x = Cuboid([(0, 1)], is_main_domain=True) x_sampler = RectangleMethod else: domain_x = Segment1D((0, 1), is_main_domain=True) x_sampler = DomainSampler domain_mu = [] sampler = TensorizedSampler( [ x_sampler(domain_x), UniformParametricSampler(domain_mu), ] ) if with_classical_projector: space = NNxSpace(1, 0, GenericMLP, domain_x, sampler, layer_sizes=[20, 40, 20]) pde = AdvectionReactionDiffusionDirichletStrongForm( space, u0=f_ini, a=a, zero_diffusion=True ) characteristic = Characteristic(pde, periodic=True) projector = TimeDiscreteCollocationProjector(pde, rhs=f_ini) scheme = NeuralSemiLagrangian(characteristic, projector) scheme.initialization(epochs=750, verbose=True, n_collocation=3000) scheme.projector.save("ini_transport_SL") scheme.projector = TimeDiscreteCollocationProjector(pde, rhs=f_ini) scheme.projector.load("ini_transport_SL") scheme.projector.space.load_from_best_approx() scheme.solve( dt=dt, final_time=T, epochs=250, n_collocation=2000, verbose=True, n_sub_steps=5, ) if with_natural_gradient: space = NNxSpace(1, 0, PeriodicMLP, domain_x, sampler, layer_sizes=[16, 16, 16]) pde = AdvectionReactionDiffusionDirichletStrongForm( space, u0=f_ini, a=a, zero_diffusion=True ) characteristic = Characteristic(pde, periodic=True) projector = TimeDiscreteNaturalGradientProjector(pde, rhs=f_ini) scheme = NeuralSemiLagrangian(characteristic, projector) scheme.initialization(epochs=100, verbose=True, n_collocation=3000) scheme.projector.save("ini_transport_SL") scheme.solve( dt=dt, final_time=T, epochs=100, n_collocation=2000, verbose=True, n_sub_steps=5, ) return scheme # %% if __name__ == "__main__": scheme = solve_with_discrete_pinn(1.0, 0.025, "rk4") plot_time_discrete_scheme(scheme, solution=exact, error=exact) plt.show() # %% if __name__ == "__main__": scheme = solve_with_neural_sl( 0.1, 0.1, with_natural_gradient=True, with_classical_projector=False, use_rectangle_method=True, ) plot_time_discrete_scheme(scheme, solution=exact, error=exact) plt.show() # %%