PDEs with Dependent Variables on Heterogeneous Domains
A differential equation is said to have heterogeneous domains when its dependent variables depend on different independent variables:
\[u(x) + w(x, v) = \frac{\partial w(x, v)}{\partial w}\]
Here, we write an arbitrary heterogeneous system:
using NeuralPDE, Lux, ModelingToolkit, Optimization, OptimizationOptimJL
import ModelingToolkit: Interval
@parameters x y
@variables p(..) q(..) r(..) s(..)
Dx = Differential(x)
Dy = Differential(y)
# 2D PDE
eq = p(x) + q(y) + Dx(r(x, y)) + Dy(s(y, x)) ~ 0
# Initial and boundary conditions
bcs = [p(1) ~ 0.0f0, q(-1) ~ 0.0f0,
r(x, -1) ~ 0.0f0, r(1, y) ~ 0.0f0,
s(y, 1) ~ 0.0f0, s(-1, x) ~ 0.0f0]
# Space and time domains
domains = [x ∈ Interval(0.0, 1.0),
y ∈ Interval(0.0, 1.0)]
numhid = 3
chains = [[Chain(Dense(1, numhid, σ), Dense(numhid, numhid, σ), Dense(numhid, 1))
for i in 1:2]
[Chain(Dense(2, numhid, σ), Dense(numhid, numhid, σ), Dense(numhid, 1))
for i in 1:2]]
discretization = PhysicsInformedNN(chains, QuadratureTraining())
@named pde_system = PDESystem(eq, bcs, domains, [x, y], [p(x), q(y), r(x, y), s(y, x)])
prob = SciMLBase.discretize(pde_system, discretization)
callback = function (p, l)
println("Current loss is: $l")
return false
end
res = Optimization.solve(prob, BFGS(); maxiters = 100)
retcode: Failure
u: ComponentVector{Float64}(depvar = (p = (layer_1 = (weight = [0.49224922097971324; 0.9273589890691615; 1.2496343107206611;;], bias = [-0.39972041276252346, -0.43496798681585025, 1.03401447892157]), layer_2 = (weight = [-0.8221248839506383 -0.30716559000201754 0.711568362855739; 0.7028376493805901 -0.3820252844935471 -0.8310924896976517; 0.1175328246402621 0.292958355674083 -0.6766614548180127], bias = [-0.6799187994518272, -0.6384496941583563, -0.3513474463298115]), layer_3 = (weight = [-0.1283639309479502 0.14651795926734204 -0.29788708165251415], bias = [0.10895098756947601])), q = (layer_1 = (weight = [1.2399255901532409; 0.2351095145158291; 0.5844363507052694;;], bias = [-0.7061001232257599, 0.9395367226828104, -0.6398861938313462]), layer_2 = (weight = [-0.06006810658453257 -0.1216194005829399 0.0074645949202506515; -0.3072746166402302 0.6101122521302884 0.45590622002012837; 0.052192763696323974 0.11990816373662219 -0.02678943849831555], bias = [-0.2053868765235563, -0.26154708717537434, -0.17549041879598387]), layer_3 = (weight = [0.1708955246899177 0.0023041284445177064 0.21835687572570223], bias = [-0.17831683237802473])), r = (layer_1 = (weight = [0.7124315147682954 -0.5630055344978386; -0.45510341282478295 -1.0683117435487548; -0.15207309801602786 -0.8764321844837268], bias = [-0.1375022629451947, -0.4774435353890732, -0.13077256609191984]), layer_2 = (weight = [-0.6939668077414578 -0.2913462257873376 0.6688863673048339; -1.015498036436993 0.467076482858611 0.3192997496393543; -0.7326176304993444 0.09908115603669365 0.18664879290129205], bias = [0.27592611977442477, -0.3410321886634498, -0.41406538588352754]), layer_3 = (weight = [0.00367904732747947 -0.00041445896525737954 -0.002567454065730789], bias = [-0.0009212160899471017])), s = (layer_1 = (weight = [-0.5297928225761894 -1.0612654955783936; -0.5836845842821791 -1.1845148337258593; 0.06567289992611937 0.11359276521331878], bias = [-0.24978897830146854, -0.2217307281783766, 0.6667167571576282]), layer_2 = (weight = [-0.6372628057060205 0.4896587679507149 0.1925306900327445; -0.31098282849951375 -0.1644303592686851 0.6688445238463429; 0.4458281838956514 -0.4655639407085606 0.7697574265336385], bias = [-0.37126699450011863, -0.02682010466004411, -0.26854649942424824]), layer_3 = (weight = [0.12275448798248449 0.15671155933805672 -0.6786118095857274], bias = [0.23936160658105685]))))