Skip to content

Commit

Permalink
Update talks and publications.
Browse files Browse the repository at this point in the history
  • Loading branch information
searhein committed Jul 16, 2024
1 parent dd19370 commit d56a6d0
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 6 deletions.
12 changes: 12 additions & 0 deletions _bibliography/papers.bib
Original file line number Diff line number Diff line change
Expand Up @@ -1002,6 +1002,18 @@ @techreport{Heinlein:2024:MDD

# submitted
@techreport{Yamazaki:2024:PCB,
author = {Ichitaro Yamazaki and Alexander Heinlein and Sivasankaran Rajamanickam},
title = {Predicting Coarse Basis Functions for Two-Level Domain Decomposition Methods Using Graph Neural Networks},
year = {2024},
month = {July},
abstract = {For the robustness and numerical scalability of domain decomposition-based linear solvers, the incorporation of a coarse level, which provides global transport of information, is crucial. State-of-the-art spectral, or adaptive, methods can generate the basis functions of the coarse space, which are adapted to the specific properties of the target problem, and yield provably robust convergence for certain classes of problems.
However, their construction is computationally expensive and requires non-algebraic information. To improve the practicability of the solver, in this paper, we design a hierarchical math-informed local Graph Neural Network (GNN) to generate effective coarse-space basis functions. Our GNN uses only the local subdomain matrices available as the input to the algebraic linear solvers. This approach has several advantages including: 1) it is algebraic; 2) it is local and therefore as scalable as the classical domain decomposition solvers; and 3) the cost for training, inference, and generating data sets is much lower than that needed for approaches relying on the global matrix.
To study the potential of our GNN architecture, we present numerical results with homogeneous and heterogeneous problems.},
keywords = {submitted, reviewed, recent},
bibtex_show = {true}
}

@techreport{Howard:2024:FBK,
author = {Amanda A. Howard and Bruno Jacob and Sarah H. Murphy and Alexander Heinlein and Panos Stinis},
title = {Finite basis Kolmogorov-Arnold networks: domain decomposition for data-driven and physics-informed problems},
Expand Down
6 changes: 3 additions & 3 deletions _bibliography/talks.bib
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,12 @@ @misc{Heinlein:2024:IGHASC:TBA
}

@misc{Heinlein:2024:NHR2024:TBA,
title = {TBA},
title = {Domain decomposition for neural networks},
author = {Alexander Heinlein},
year = {2024},
abbr = {NHR2024},
note = {Invited talk. NHR Conference 2024, Darmstadt, Germany, September 9-12},
abstract = {TBA},
abstract = {Scientific machine learning (SciML) is a rapidly evolving research field that combines techniques from scientific computing and machine learning. This talk specifically addresses the application of domain decomposition methods to design neural network architectures and enhance neural network training. The discussion will explore the use of these techniques in neural network-based discretizations for solving partial differential equations with physics-informed neural networks (PINNs) and operator learning, as well as in classical machine learning tasks like semantic image segmentation using convolutional neural networks (CNNs). Computational results show that domain decomposition methods can improve efficiency — both in terms of time and memory — as well as enhance accuracy and robustness.},
url = {https://www.nhr4ces.de/2024/01/26/nhr-conference-2024/},
keywords = {}
}
Expand All @@ -72,7 +72,7 @@ @misc{Heinlein:2024:IRMA2024:DDN
year = {2024},
abbr = {SU},
note = {Invited talk. Workshop on Scientific Machine Learning, Strasbourg University, Strasbourg, France, July 8-12},
abstract = {TBA},
abstract = {Scientific machine learning (SciML) is a rapidly evolving field of research that combines techniques from scientific computing and machine learning. In this context, this talks focuses on the enhancement of machine learning using classical numerical methods, in particular, on improving neural networks using domain decomposition-inspired architectures. In the first part of this talk, the domain decomposition paradigm is applied to the approximation of the solutions of partial differential equations (PDEs) using physics-informed neural networks (PINNs). It is observed that network architectures inspired by multi-level Schwarz domain decomposition methods can improve the performance for certain challenging problems, such as multiscale problems. Moreover, a classical machine learning task is considered, that is, image segmentation using convolutional neural networks (CNNs). Domain decomposition techniques offer a way of scaling up common CNN architectures, such as the U-Net. In particular, local subdomain networks learn local features and are coupled via a coarse network which incorporates global features.},
url = {https://irma.math.unistra.fr/~micheldansac/SciML2024/participants.html},
slides = {2024/2024-heinlein-irma2024-ddn/2024-heinlein-dd_nns.pdf},
keywords = {}
Expand Down
18 changes: 18 additions & 0 deletions _teaching/msc-thesis-2024-digital-twins-conveyor-belts.markdown
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
---
layout: page_thesis
university: tud
collaboration:
title: Digital Twins
co-supervisor:
student: Hangyu Xia
runningindex: 29
nolink: false
redirect:
project_description:
interim_thesis:
interim_presentation:
final_thesis:
final_presentation:
category: master_thesis
status: ongoing
---
2 changes: 1 addition & 1 deletion _teaching/msc-thesis-2024-geometry-learning-cells.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ collaboration: Leiden University
title: Geometry Learning for Complex Shaped Cells
co-supervisor: Qiyao Peng (Leiden University)
student:
runningindex: 25
runningindex: 26
nolink: false
redirect: /assets/pdf/thesis_projects/2024/2024-heinlein_peng-geometry_learning_cells/project_description.pdf
project_description: /assets/pdf/thesis_projects/2024/2024-heinlein_peng-geometry_learning_cells/project_description.pdf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ collaboration: TU Berlin
title: Machine Learning-Accelerated Solvers for Computational Fluid Dynamics Simulations
co-supervisor: Merten Stender (TU Berlin)
student:
runningindex: 25
runningindex: 27
nolink: false
redirect:
project_description: /assets/pdf/thesis_projects/2024/2024-heinlein_stender-nn-convergence_acceleration/project_description.pdf
Expand Down
2 changes: 1 addition & 1 deletion _teaching/msc-thesis-2024-stabiliation-pinns.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ collaboration: German Aerospace Center (DLR)
title: Stabilization Methods for Physics-Informed Neural Networks
co-supervisor: Franziska Griese, Philipp Knechtges (DLR)
student:
runningindex: 26
runningindex: 28
nolink: false
redirect: /assets/pdf/thesis_projects/2024/2024-griese_heinlein_knetchges-stabilization_pinns/project_description.pdf
project_description: /assets/pdf/thesis_projects/2024/2024-griese_heinlein_knetchges-stabilization_pinns/project_description.pdf
Expand Down

0 comments on commit d56a6d0

Please sign in to comment.