diff --git a/_bibliography/papers.bib b/_bibliography/papers.bib index 7d0cbd31162a..8e06af6e3d7b 100644 --- a/_bibliography/papers.bib +++ b/_bibliography/papers.bib @@ -1002,6 +1002,18 @@ @techreport{Heinlein:2024:MDD # submitted +@techreport{Yamazaki:2024:PCB, + author = {Ichitaro Yamazaki and Alexander Heinlein and Sivasankaran Rajamanickam}, + title = {Predicting Coarse Basis Functions for Two-Level Domain Decomposition Methods Using Graph Neural Networks}, + year = {2024}, + month = {July}, + abstract = {For the robustness and numerical scalability of domain decomposition-based linear solvers, the incorporation of a coarse level, which provides global transport of information, is crucial. State-of-the-art spectral, or adaptive, methods can generate the basis functions of the coarse space, which are adapted to the specific properties of the target problem, and yield provably robust convergence for certain classes of problems. +However, their construction is computationally expensive and requires non-algebraic information. To improve the practicability of the solver, in this paper, we design a hierarchical math-informed local Graph Neural Network (GNN) to generate effective coarse-space basis functions. Our GNN uses only the local subdomain matrices available as the input to the algebraic linear solvers. This approach has several advantages including: 1) it is algebraic; 2) it is local and therefore as scalable as the classical domain decomposition solvers; and 3) the cost for training, inference, and generating data sets is much lower than that needed for approaches relying on the global matrix. +To study the potential of our GNN architecture, we present numerical results with homogeneous and heterogeneous problems.}, + keywords = {submitted, reviewed, recent}, + bibtex_show = {true} +} + @techreport{Howard:2024:FBK, author = {Amanda A. Howard and Bruno Jacob and Sarah H. Murphy and Alexander Heinlein and Panos Stinis}, title = {Finite basis Kolmogorov-Arnold networks: domain decomposition for data-driven and physics-informed problems}, diff --git a/_bibliography/talks.bib b/_bibliography/talks.bib index 34be9f61dd60..5dc46fd2a357 100644 --- a/_bibliography/talks.bib +++ b/_bibliography/talks.bib @@ -56,12 +56,12 @@ @misc{Heinlein:2024:IGHASC:TBA } @misc{Heinlein:2024:NHR2024:TBA, - title = {TBA}, + title = {Domain decomposition for neural networks}, author = {Alexander Heinlein}, year = {2024}, abbr = {NHR2024}, note = {Invited talk. NHR Conference 2024, Darmstadt, Germany, September 9-12}, - abstract = {TBA}, + abstract = {Scientific machine learning (SciML) is a rapidly evolving research field that combines techniques from scientific computing and machine learning. This talk specifically addresses the application of domain decomposition methods to design neural network architectures and enhance neural network training. The discussion will explore the use of these techniques in neural network-based discretizations for solving partial differential equations with physics-informed neural networks (PINNs) and operator learning, as well as in classical machine learning tasks like semantic image segmentation using convolutional neural networks (CNNs). Computational results show that domain decomposition methods can improve efficiency — both in terms of time and memory — as well as enhance accuracy and robustness.}, url = {https://www.nhr4ces.de/2024/01/26/nhr-conference-2024/}, keywords = {} } @@ -72,7 +72,7 @@ @misc{Heinlein:2024:IRMA2024:DDN year = {2024}, abbr = {SU}, note = {Invited talk. Workshop on Scientific Machine Learning, Strasbourg University, Strasbourg, France, July 8-12}, - abstract = {TBA}, + abstract = {Scientific machine learning (SciML) is a rapidly evolving field of research that combines techniques from scientific computing and machine learning. In this context, this talks focuses on the enhancement of machine learning using classical numerical methods, in particular, on improving neural networks using domain decomposition-inspired architectures. In the first part of this talk, the domain decomposition paradigm is applied to the approximation of the solutions of partial differential equations (PDEs) using physics-informed neural networks (PINNs). It is observed that network architectures inspired by multi-level Schwarz domain decomposition methods can improve the performance for certain challenging problems, such as multiscale problems. Moreover, a classical machine learning task is considered, that is, image segmentation using convolutional neural networks (CNNs). Domain decomposition techniques offer a way of scaling up common CNN architectures, such as the U-Net. In particular, local subdomain networks learn local features and are coupled via a coarse network which incorporates global features.}, url = {https://irma.math.unistra.fr/~micheldansac/SciML2024/participants.html}, slides = {2024/2024-heinlein-irma2024-ddn/2024-heinlein-dd_nns.pdf}, keywords = {} diff --git a/_teaching/msc-thesis-2024-digital-twins-conveyor-belts.markdown b/_teaching/msc-thesis-2024-digital-twins-conveyor-belts.markdown new file mode 100644 index 000000000000..4b74e3f5d606 --- /dev/null +++ b/_teaching/msc-thesis-2024-digital-twins-conveyor-belts.markdown @@ -0,0 +1,18 @@ +--- +layout: page_thesis +university: tud +collaboration: +title: Digital Twins +co-supervisor: +student: Hangyu Xia +runningindex: 29 +nolink: false +redirect: +project_description: +interim_thesis: +interim_presentation: +final_thesis: +final_presentation: +category: master_thesis +status: ongoing +--- diff --git a/_teaching/msc-thesis-2024-geometry-learning-cells.markdown b/_teaching/msc-thesis-2024-geometry-learning-cells.markdown index eb3a512bdfac..4888b1280e0b 100644 --- a/_teaching/msc-thesis-2024-geometry-learning-cells.markdown +++ b/_teaching/msc-thesis-2024-geometry-learning-cells.markdown @@ -5,7 +5,7 @@ collaboration: Leiden University title: Geometry Learning for Complex Shaped Cells co-supervisor: Qiyao Peng (Leiden University) student: -runningindex: 25 +runningindex: 26 nolink: false redirect: /assets/pdf/thesis_projects/2024/2024-heinlein_peng-geometry_learning_cells/project_description.pdf project_description: /assets/pdf/thesis_projects/2024/2024-heinlein_peng-geometry_learning_cells/project_description.pdf diff --git a/_teaching/msc-thesis-2024-nn-convergence-acceleration.markdown b/_teaching/msc-thesis-2024-nn-convergence-acceleration.markdown index dbd9a569da8f..ef1a2dc2b862 100644 --- a/_teaching/msc-thesis-2024-nn-convergence-acceleration.markdown +++ b/_teaching/msc-thesis-2024-nn-convergence-acceleration.markdown @@ -5,7 +5,7 @@ collaboration: TU Berlin title: Machine Learning-Accelerated Solvers for Computational Fluid Dynamics Simulations co-supervisor: Merten Stender (TU Berlin) student: -runningindex: 25 +runningindex: 27 nolink: false redirect: project_description: /assets/pdf/thesis_projects/2024/2024-heinlein_stender-nn-convergence_acceleration/project_description.pdf diff --git a/_teaching/msc-thesis-2024-stabiliation-pinns.markdown b/_teaching/msc-thesis-2024-stabiliation-pinns.markdown index cd11cce0a57c..4d1ac996d4aa 100644 --- a/_teaching/msc-thesis-2024-stabiliation-pinns.markdown +++ b/_teaching/msc-thesis-2024-stabiliation-pinns.markdown @@ -5,7 +5,7 @@ collaboration: German Aerospace Center (DLR) title: Stabilization Methods for Physics-Informed Neural Networks co-supervisor: Franziska Griese, Philipp Knechtges (DLR) student: -runningindex: 26 +runningindex: 28 nolink: false redirect: /assets/pdf/thesis_projects/2024/2024-griese_heinlein_knetchges-stabilization_pinns/project_description.pdf project_description: /assets/pdf/thesis_projects/2024/2024-griese_heinlein_knetchges-stabilization_pinns/project_description.pdf