PDEs with Dependent Variables on Heterogeneous Domains
A differential equation is said to have heterogeneous domains when its dependent variables depend on different independent variables:
\[u(x) + w(x, v) = \frac{\partial w(x, v)}{\partial w}\]
Here, we write an arbitrary heterogeneous system:
using NeuralPDE, Lux, ModelingToolkit, Optimization, OptimizationOptimJL
import DomainSets: Interval
@parameters x y
@variables p(..) q(..) r(..) s(..)
Dx = Differential(x)
Dy = Differential(y)
# 2D PDE
eq = p(x) + q(y) + Dx(r(x, y)) + Dy(s(y, x)) ~ 0
# Initial and boundary conditions
bcs = [p(1) ~ 0.0f0, q(-1) ~ 0.0f0,
r(x, -1) ~ 0.0f0, r(1, y) ~ 0.0f0,
s(y, 1) ~ 0.0f0, s(-1, x) ~ 0.0f0]
# Space and time domains
domains = [x ∈ Interval(0.0, 1.0),
y ∈ Interval(0.0, 1.0)]
numhid = 3
chains = [[Chain(Dense(1, numhid, σ), Dense(numhid, numhid, σ), Dense(numhid, 1))
for i in 1:2]
[Chain(Dense(2, numhid, σ), Dense(numhid, numhid, σ), Dense(numhid, 1))
for i in 1:2]]
discretization = PhysicsInformedNN(chains, QuadratureTraining())
@named pde_system = PDESystem(eq, bcs, domains, [x, y], [p(x), q(y), r(x, y), s(y, x)])
prob = SciMLBase.discretize(pde_system, discretization)
callback = function (p, l)
println("Current loss is: $l")
return false
end
res = Optimization.solve(prob, BFGS(); maxiters = 100)retcode: Failure
u: ComponentVector{Float64}(depvar = (p = (layer_1 = (weight = [-0.1406458350833573; -0.1321439043215287; 0.3925026949262014;;], bias = [0.21227405689202677, 0.5021682495351315, -0.5141423214557512]), layer_2 = (weight = [0.38363939086159987 -0.4300372112097751 -0.4197335844268617; -0.3616310689995143 -0.9721844624391526 -0.6158660659442419; -0.18065924725947574 -0.8092363982279931 -0.04124442788375596], bias = [-0.5221127520889758, -0.5285981793356656, -0.5268127400502821]), layer_3 = (weight = [0.3001299273857618 -0.019367899922530428 0.5552579877636223], bias = [-0.22812433435278767])), q = (layer_1 = (weight = [1.3629393332306727; -0.3487510917053357; -0.754509670570144;;], bias = [-0.37003986391939364, -0.6022139606710322, -0.26887490422908017]), layer_2 = (weight = [-0.29298038791007486 -0.8678810968636155 -0.9774343348034358; -0.11892862562596676 0.13250811620473243 -0.012476762292729352; -0.2459960372663214 -0.9492200124617902 -0.29326039765053635], bias = [0.3725550824282884, -0.24911411947181386, -0.41138722812929884]), layer_3 = (weight = [0.14480991889954914 0.30519094901088106 -0.23765136880979248], bias = [-0.12374334485512413])), r = (layer_1 = (weight = [0.881600666194122 0.18981711560167716; -0.9028872458588402 -0.30006808971561016; -0.5988322936295478 -0.6474109035942359], bias = [0.39353177137832157, -0.01882571445621295, -0.667099917978484]), layer_2 = (weight = [0.8920203474844486 -0.28798015554630757 0.2974617816247114; 0.24803175749800202 -0.16064628301483108 -0.42537605914568305; -0.803989685047362 0.1268961214901865 -0.17842951982563307], bias = [0.013159886504939601, -0.06476759492480927, -0.3709436036365363]), layer_3 = (weight = [0.3937232414996073 0.021560848440148425 0.5631825155425103], bias = [-0.42447670593192866])), s = (layer_1 = (weight = [-0.37943349676341726 -1.0015596574290506; 1.1114910145631614 -0.36349384258750533; -0.4179557798714296 -0.3544630210789688], bias = [0.054669917193385244, -0.20857909877974612, 0.6037169755488636]), layer_2 = (weight = [-0.7729592817792311 -0.7742407236779971 0.10681839731252156; -0.43010058473763063 -0.5577557076912649 -0.04592010866697238; -0.6013559797509467 -0.36004871375454145 0.3204610104281704], bias = [-0.4165629895104649, -0.37109294576550234, -0.6725705044101307]), layer_3 = (weight = [0.607169391298346 -0.5271801503190201 -0.39692889783106594], bias = [0.11505495565616258]))))