@phdthesis{Alexander, author = {Alexander, Anne}, title = {Quantitative Erfassung von Risiken und Simulation ihrer Auswirkungen auf den Verlauf eines Bauprojektes}, doi = {10.25643/bauhaus-universitaet.2051}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130927-20514}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {171}, abstract = {Das Bauwesen hat sich in den letzten Jahren durch die Globalisierung des Marktes verbunden mit einer verst{\"a}rkten Nutzung moderner Technologien stark gewandelt. Die Planung und die Durchf{\"u}hrung von Bauvorhaben werden zunehmend komplexer und sind mit erh{\"o}hten Risiken verbunden. Geld- und Zeitressourcen werden bei einem immer h{\"a}rter werdenden Konkurrenzkampf knapper. Das Projektmanagement stellt L{\"o}sungsans{\"a}tze bereit, um Bauvorhaben auch unter erschwerten Bedingungen und erh{\"o}hten Risiken erfolgreich zum Abschluss zu bringen. Dabei hat ein systematisches Risikomanagement beginnend bei der Projektentwicklung bis zum Projektabschluss eine f{\"u}r den Projekterfolg entscheidende Bedeutung. Ziel der Arbeit ist es, eine quantitative Risikoerfassung f{\"u}r Projektmanager als professionelle Bauherrenvertretung und die Simulation der Risikoauswirkungen auf den Verlauf eines Projektes w{\"a}hrend der Planungs- und Bauphase zu erm{\"o}glichen. Mit Hilfe eines abstrakten Modells soll eine differenzierte, praxisnahe Simulation durchf{\"u}hrbar sein, die die verschiedenen Arten der Leistungs- und Kostenentstehung widerspiegelt. Parallel dazu soll die Beschreibung von Risiken so abstrahiert werden, dass beliebige Risiken quantitativ erfassbar und anschließend ihre Auswirkungen inklusive m{\"o}gliche Gegenmaßnahmen in das Modell integrierbar sind. Anhand zweier Beispiele werden die unterschiedlichen Einsatzm{\"o}glichkeiten der quantitativen Erfassung von Projektrisiken und der anschließenden Simulation ihrer Auswirkungen aufgezeigt. Bei dem ersten Beispiel, einem realen, bereits abgeschlossenen Schieneninfrastrukturprojekt, wird die Wirksamkeit einer vorbeugenden Maßnahme gegen ein Projektrisiko untersucht. Im zweiten Beispiel wird ein Planspielansatz zur praxisnahen Aus- und Weiterbildung von Projektmanagern entwickelt. Inhalt des Planspiels ist die Planung und Errichtung eines privatfinanzierten, {\"o}ffentlichen Repr{\"a}sentationsbaus mit teilweiser Fremdnutzung.}, subject = {Risiko}, language = {de} } @phdthesis{Budarapu, author = {Budarapu, Pattabhi Ramaiah}, title = {Adaptive multiscale methods for fracture}, doi = {10.25643/bauhaus-universitaet.2391}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150507-23918}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {One major research focus in the Material Science and Engineering Community in the past decade has been to obtain a more fundamental understanding on the phenomenon 'material failure'. Such an understanding is critical for engineers and scientists developing new materials with higher strength and toughness, developing robust designs against failure, or for those concerned with an accurate estimate of a component's design life. Defects like cracks and dislocations evolve at nano scales and influence the macroscopic properties such as strength, toughness and ductility of a material. In engineering applications, the global response of the system is often governed by the behaviour at the smaller length scales. Hence, the sub-scale behaviour must be computed accurately for good predictions of the full scale behaviour. Molecular Dynamics (MD) simulations promise to reveal the fundamental mechanics of material failure by modeling the atom to atom interactions. Since the atomistic dimensions are of the order of Angstroms ( A), approximately 85 billion atoms are required to model a 1 micro- m^3 volume of Copper. Therefore, pure atomistic models are prohibitively expensive with everyday engineering computations involving macroscopic cracks and shear bands, which are much larger than the atomistic length and time scales. To reduce the computational effort, multiscale methods are required, which are able to couple a continuum description of the structure with an atomistic description. In such paradigms, cracks and dislocations are explicitly modeled at the atomistic scale, whilst a self-consistent continuum model elsewhere. Many multiscale methods for fracture are developed for "fictitious" materials based on "simple" potentials such as the Lennard-Jones potential. Moreover, multiscale methods for evolving cracks are rare. Efficient methods to coarse grain the fine scale defects are missing. However, the existing multiscale methods for fracture do not adaptively adjust the fine scale domain as the crack propagates. Most methods, therefore only "enlarge" the fine scale domain and therefore drastically increase computational cost. Adaptive adjustment requires the fine scale domain to be refined and coarsened. One of the major difficulties in multiscale methods for fracture is to up-scale fracture related material information from the fine scale to the coarse scale, in particular for complex crack problems. Most of the existing approaches therefore were applied to examples with comparatively few macroscopic cracks. Key contributions The bridging scale method is enhanced using the phantom node method so that cracks can be modeled at the coarse scale. To ensure self-consistency in the bulk, a virtual atom cluster is devised providing the response of the intact material at the coarse scale. A molecular statics model is employed in the fine scale where crack propagation is modeled by naturally breaking the bonds. The fine scale and coarse scale models are coupled by enforcing the displacement boundary conditions on the ghost atoms. An energy criterion is used to detect the crack tip location. Adaptive refinement and coarsening schemes are developed and implemented during the crack propagation. The results were observed to be in excellent agreement with the pure atomistic simulations. The developed multiscale method is one of the first adaptive multiscale method for fracture. A robust and simple three dimensional coarse graining technique to convert a given atomistic region into an equivalent coarse region, in the context of multiscale fracture has been developed. The developed method is the first of its kind. The developed coarse graining technique can be applied to identify and upscale the defects like: cracks, dislocations and shear bands. The current method has been applied to estimate the equivalent coarse scale models of several complex fracture patterns arrived from the pure atomistic simulations. The upscaled fracture pattern agree well with the actual fracture pattern. The error in the potential energy of the pure atomistic and the coarse grained model was observed to be acceptable. A first novel meshless adaptive multiscale method for fracture has been developed. The phantom node method is replaced by a meshless differential reproducing kernel particle method. The differential reproducing kernel particle method is comparatively more expensive but allows for a more "natural" coupling between the two scales due to the meshless interpolation functions. The higher order continuity is also beneficial. The centro symmetry parameter is used to detect the crack tip location. The developed multiscale method is employed to study the complex crack propagation. Results based on the meshless adaptive multiscale method were observed to be in excellent agreement with the pure atomistic simulations. The developed multiscale methods are applied to study the fracture in practical materials like Graphene and Graphene on Silicon surface. The bond stretching and the bond reorientation were observed to be the net mechanisms of the crack growth in Graphene. The influence of time step on the crack propagation was studied using two different time steps. Pure atomistic simulations of fracture in Graphene on Silicon surface are presented. Details of the three dimensional multiscale method to study the fracture in Graphene on Silicon surface are discussed.}, subject = {Material}, language = {en} } @phdthesis{Flohr, author = {Flohr, Alexander}, title = {Der Einfluss von Polymermodifikationen, unterschiedlichen Gesteinsk{\"o}rnungen und Gesteinsk{\"o}rnungssubstitutionsmaterial auf das Verformungs- und Bruchverhalten von Beton}, doi = {10.25643/bauhaus-universitaet.2003}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130806-20035}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {148}, abstract = {In dieser Arbeit werden die Ergebnisse von experimentellen Untersuchungen an unbewehrten und bewehrten modifizierten Betonen unter monoton steigender Belastung bis zum Bruch, einfacher Kurzzeitbelastung im Grenzbereich der Tragf{\"a}higkeit und mehrfach wiederholter Belastung mit kontinuierlicher Be- und Entlastungsgeschwindigkeit vorgestellt und ausgewertet. F{\"u}r die Modifizierung der Betone wurden zwei grunds{\"a}tzliche Vorgehens¬weisen angewendet: die Variation der Gesteinsk{\"o}rnung und die Modifizierung der Bindemittelphase mit thermoplastischen Polymeren. Die Auswirkungen der Modifikationen auf die Festigkeitseigenschaften und das Form{\"a}nderungsverhalten des Betons bei Kurzzeitbelastung waren dabei von besonderem Interesse. Die beobachteten Ver{\"a}nderungen der Festbetoneigenschaften sowie der nichtlineare Zu-sammenhang zwischen den elastischen und nichtelastischen Verformungsanteilen signali-sieren, dass derartige Modifizierungen das Verformungs- und Bruchverhalten von Beton sig-nifikant beeinflussen und somit beim Nachweis der Tragf{\"a}higkeit und Gebrauchstauglichkeit ber{\"u}cksichtigt werden m{\"u}ssen. Neben der Evaluierung des beanspruchungsabh{\"a}ngigen Form{\"a}nderungsverhaltens werden die etablierten Ans{\"a}tze zur Beschreibung der Gef{\"u}gezu-standsbereiche bei Druckbelastung weiter entwickelt, so dass die {\"U}berg{\"a}nge zwischen den Bereichen exakt ermittelt und die Auspr{\"a}gung der Bereiche quantifiziert werden k{\"o}nnen. Damit ist ein genauerer Vergleich der durch die Modifizierungen hervorgerufenen Ver{\"a}nde-rungen m{\"o}glich.}, subject = {Beton}, language = {de} } @phdthesis{Froebel, author = {Fr{\"o}bel, Toni}, title = {Data coupled civil engineering applications: Modeling and quality assessment methods}, publisher = {Verlag der Bauhaus-Universit{\"a}t Weimar 2013}, address = {Weimar}, isbn = {978-3-86068-486-3}, doi = {10.25643/bauhaus-universitaet.1836}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130128-18366}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {153}, abstract = {The planning process in civil engineering is highly complex and not manageable in its entirety. The state of the art decomposes complex tasks into smaller, manageable sub-tasks. Due to the close interrelatedness of the sub-tasks, it is essential to couple them. However, from a software engineering point of view, this is quite challenging to do because of the numerous incompatible software applications on the market. This study is concerned with two main objectives: The first is the generic formulation of coupling strategies in order to support engineers in the implementation and selection of adequate coupling strategies. This has been achieved by the use of a coupling pattern language combined with a four-layered, metamodel architecture, whose applicability has been performed on a real coupling scenario. The second one is the quality assessment of coupled software. This has been developed based on the evaluated schema mapping. This approach has been described using mathematical expressions derived from the set theory and graph theory by taking the various mapping patterns into account. Moreover, the coupling quality has been evaluated within the formalization process by considering the uncertainties that arise during mapping and has resulted in global quality values, which can be used by the user to assess the exchange. Finally, the applicability of the proposed approach has been shown using an engineering case study.}, subject = {Data exchange, Schema mapping, Quality assessment, Uncertainty, Coupling, BIM, Design patterns, Metamodel architecture}, language = {en} } @phdthesis{Giebson, author = {Giebson, Colin}, title = {Die Alkali-Kiesels{\"a}ure-Reaktion in Beton f{\"u}r Fahrbahndecken und Flugbetriebsfl{\"a}chen unter Einwirkung alkalihaltiger Enteisungsmittel}, isbn = {978-3-00-044366-4 (Druckversion)}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20131217-20916}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {208}, abstract = {Das Hauptziel der Arbeit war es zu kl{\"a}ren, ob alkalihaltige Enteisungsmittel eine Alkali-Kiesels{\"a}ure-Reaktion (AKR) ausl{\"o}sen und/oder beschleunigen k{\"o}nnen und was die dabei ggf. zugrunde liegenden Mechanismen sind. Die Untersuchungen dazu ergaben, dass die auf Verkehrsfl{\"a}chen eingesetzten alkalihaltigen Enteisungsmittel auf Basis von Natriumchlorid (Fahrbahndecken) bzw. auf Basis der Alkaliacetate und -formiate (Flugbetriebsfl{\"a}chen) den Ablauf einer AKR in Betonen mit alkalireaktiven Gesteinsk{\"o}rnungen ausl{\"o}sen und mitunter stark beschleunigen k{\"o}nnen. Dabei nimmt die AKR-f{\"o}rdernde Wirkung der Enteisungsmittel in der Reihenfolge Natriumchlorid - Alkaliacetate - Alkaliformiate erheblich zu. Es zeigte sich, dass im Fall der Alkaliacetate und -formiate nicht allein die Zufuhr von Alkalien von Bedeutung ist, sondern dass es außerdem zu einer Freisetzung von OH-Ionen aus dem Portlandit und folglich zu einem Anstieg des pH-Wertes in der Porenl{\"o}sung kommt. Dadurch wird der Angriff auf alkalireaktives SiO2 in Gesteinsk{\"o}rnungen verst{\"a}rkt und der Ablauf einer AKR beschleunigt. Unter {\"a}ußerer NaCl-Zufuhr kommt es hingegen nicht zu einem Anstieg des pH-Wertes, was der Grund f{\"u}r die weniger stark AKR-f{\"o}rdernde Wirkung von NaCl ist. Von Bedeutung sind hier die zugef{\"u}hrten Na-Ionen und offenbar ein sich andeutender, direkter Einfluss von NaCl auf das SiO2-L{\"o}severhalten. Sind pH-Wert und Na-Konzentration in der Porenl{\"o}sung ausreichend hoch, wird sich thermodynamisch bedingt AKR-Gel bilden. Die Bildung von FRIEDEL'schem Salz ist dabei nur eine Begleiterscheinung, aber keine Voraussetzung f{\"u}r den Ablauf einer AKR unter {\"a}ußerer NaCl-Zufuhr. Es zeigte sich weiter, dass sich mit der FIB-Klimawechsellagerung als Performance-Pr{\"u}fung das AKR-Sch{\"a}digungspotential von Betonen f{\"u}r Fahrbahndecken und Flugbetriebsfl{\"a}chen zuverl{\"a}ssig beurteilen l{\"a}sst. Die Vorteile der FIB-Klimawechsellagerung liegen in der Pr{\"u}fung kompletter, projektspezifischer Betonzusammensetzungen unter Beachtung aller praxisrelevanten klimatischen Einwirkungen und vor allem in der Ber{\"u}cksichtigung einer {\"a}ußeren Alkalizufuhr. Innerhalb von 36 Wochen kann das AKR-Sch{\"a}digungspotential einer Betonzusammensetzung f{\"u}r eine Nutzungsdauer von 20-30 Jahren in der Praxis sicher beurteilt werden.}, subject = {Beton}, language = {de} } @phdthesis{Hamdia, author = {Hamdia, Khader}, title = {On the fracture toughness of polymeric nanocomposites: Comprehensive stochastic and numerical studies}, doi = {10.25643/bauhaus-universitaet.3765}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20180712-37652}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {Polymeric nanocomposites (PNCs) are considered for numerous nanotechnology such as: nano-biotechnology, nano-systems, nanoelectronics, and nano-structured materials. Commonly , they are formed by polymer (epoxy) matrix reinforced with a nanosized filler. The addition of rigid nanofillers to the epoxy matrix has offered great improvements in the fracture toughness without sacrificing other important thermo-mechanical properties. The physics of the fracture in PNCs is rather complicated and is influenced by different parameters. The presence of uncertainty in the predicted output is expected as a result of stochastic variance in the factors affecting the fracture mechanism. Consequently, evaluating the improved fracture toughness in PNCs is a challenging problem. Artificial neural network (ANN) and adaptive neuro-fuzzy inference system (ANFIS) have been employed to predict the fracture energy of polymer/particle nanocomposites. The ANN and ANFIS models were constructed, trained, and tested based on a collection of 115 experimental datasets gathered from the literature. The performance evaluation indices of the developed ANN and ANFIS showed relatively small error, with high coefficients of determination (R2), and low root mean square error and mean absolute percentage error. In the framework for uncertainty quantification of PNCs, a sensitivity analysis (SA) has been conducted to examine the influence of uncertain input parameters on the fracture toughness of polymer/clay nanocomposites (PNCs). The phase-field approach is employed to predict the macroscopic properties of the composite considering six uncertain input parameters. The efficiency, robustness, and repeatability are compared and evaluated comprehensively for five different SA methods. The Bayesian method is applied to develop a methodology in order to evaluate the performance of different analytical models used in predicting the fracture toughness of polymeric particles nanocomposites. The developed method have considered the model and parameters uncertainties based on different reference data (experimental measurements) gained from the literature. Three analytical models differing in theory and assumptions were examined. The coefficients of variation of the model predictions to the measurements are calculated using the approximated optimal parameter sets. Then, the model selection probability is obtained with respect to the different reference data. Stochastic finite element modeling is implemented to predict the fracture toughness of polymer/particle nanocomposites. For this purpose, 2D finite element model containing an epoxy matrix and rigid nanoparticles surrounded by an interphase zone is generated. The crack propagation is simulated by the cohesive segments method and phantom nodes. Considering the uncertainties in the input parameters, a polynomial chaos expansion (PCE) surrogate model is construed followed by a sensitivity analysis.}, subject = {Bruch}, language = {en} } @phdthesis{Hanna, author = {Hanna, John}, title = {Computational Fracture Modeling and Design of Encapsulation-Based Self-Healing Concrete Using XFEM and Cohesive Surface Technique}, doi = {10.25643/bauhaus-universitaet.4746}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20221124-47467}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {125}, abstract = {Encapsulation-based self-healing concrete (SHC) is the most promising technique for providing a self-healing mechanism to concrete. This is due to its capacity to heal fractures effectively without human interventions, extending the operational life and lowering maintenance costs. The healing mechanism is created by embedding capsules containing the healing agent inside the concrete. The healing agent will be released once the capsules are fractured and the healing occurs in the vicinity of the damaged part. The healing efficiency of the SHC is still not clear and depends on several factors; in the case of microcapsules SHC the fracture of microcapsules is the most important aspect to release the healing agents and hence heal the cracks. This study contributes to verifying the healing efficiency of SHC and the fracture mechanism of the microcapsules. Extended finite element method (XFEM) is a flexible, and powerful discrete crack method that allows crack propagation without the requirement for re-meshing and has been shown high accuracy for modeling fracture in concrete. In this thesis, a computational fracture modeling approach of Encapsulation-based SHC is proposed based on the XFEM and cohesive surface technique (CS) to study the healing efficiency and the potential of fracture and debonding of the microcapsules or the solidified healing agents from the concrete matrix as well. The concrete matrix and a microcapsule shell both are modeled by the XFEM and combined together by CS. The effects of the healed-crack length, the interfacial fracture properties, and microcapsule size on the load carrying capability and fracture pattern of the SHC have been studied. The obtained results are compared to those obtained from the zero thickness cohesive element approach to demonstrate the significant accuracy and the validity of the proposed simulation. The present fracture simulation is developed to study the influence of the capsular clustering on the fracture mechanism by varying the contact surface area of the CS between the microcapsule shell and the concrete matrix. The proposed fracture simulation is expanded to 3D simulations to validate the 2D computational simulations and to estimate the accuracy difference ratio between 2D and 3D simulations. In addition, a proposed design method is developed to design the size of the microcapsules consideration of a sufficient volume of healing agent to heal the expected crack width. This method is based on the configuration of the unit cell (UC), Representative Volume Element (RVE), Periodic Boundary Conditions (PBC), and associated them to the volume fraction (Vf) and the crack width as variables. The proposed microcapsule design is verified through computational fracture simulations.}, subject = {Beton}, language = {en} } @phdthesis{JeanBaptiste, author = {Jean-Baptiste, Nathalie}, title = {People centered approach towards food waste management in the urban environment of Mexico}, address = {Weimar}, doi = {10.25643/bauhaus-universitaet.2063}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20131024-20633}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {297}, abstract = {A more careful consideration of food waste is needed for planning the urban environment. The research signals links between the organization of individuals, the built environment and food waste management through a study conducted in Mexico. It recognizes the different scales within which solid waste management operates, explores food waste production at household levels, and investigates the urban circumstances that influence its management. This is based on the idea that sustainable food waste management in cities requires a constellation of processes through which a 'people centered' approach offers added value to technical and biological facts. This distinction addresses how urban systems react to waste and what behavioral and structural factors affect current sanitary practices in Mexico. Food waste is a resource-demanding item, which makes for a considerable amount of refuse being disposed of in landfills in developing cities. The existing data shortage on waste generation at household levels debilitates implementation strategies and there is a need for more contextual knowledge associated with waste. The evidence-based study includes an explorative phase on the culture of waste management and a more in-depth examination of domestic waste composition. Mixed data collection tools including a household based survey, a food waste diary and weighing recording system were developed to enquire into the daily practices of waste disposal in households. The contrasting urban environment of Mexico City Metropolitan Area holds indistinctive boundaries between the core and the periphery, which hinder the implementation of integrated environmental plans. External determinants are different modes of urban transformation and internal determinants are building features and their consolidation processes. At the household level, less and more affluents groups responded differently to external environmental stressors. A targeted planning proposition is required for each group. Local alternative waste management is more likely to be implement in less affluent contexts. Further, more effective demand-driven service delivery implies better integration between the formal and informal sectors. The results show that efforts toward securing long-term changes in Mexico and other cities with similar circumstances require creating synergy between education, building consolidation, local infrastructure and social engagement.}, subject = {Food Waste Management}, language = {en} } @phdthesis{Jia, author = {Jia, Yue}, title = {Methods based on B-splines for model representation, numerical analysis and image registration}, doi = {10.25643/bauhaus-universitaet.2484}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20151210-24849}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {200}, abstract = {The thesis consists of inter-connected parts for modeling and analysis using newly developed isogeometric methods. The main parts are reproducing kernel triangular B-splines, extended isogeometric analysis for solving weakly discontinuous problems, collocation methods using superconvergent points, and B-spline basis in image registration applications. Each topic is oriented towards application of isogeometric analysis basis functions to ease the process of integrating the modeling and analysis phases of simulation. First, we develop reproducing a kernel triangular B-spline-based FEM for solving PDEs. We review the triangular B-splines and their properties. By definition, the triangular basis function is very flexible in modeling complicated domains. However, instability results when it is applied for analysis. We modify the triangular B-spline by a reproducing kernel technique, calculating a correction term for the triangular kernel function from the chosen surrounding basis. The improved triangular basis is capable to obtain the results with higher accuracy and almost optimal convergence rates. Second, we propose an extended isogeometric analysis for dealing with weakly discontinuous problems such as material interfaces. The original IGA is combined with XFEM-like enrichments which are continuous functions themselves but with discontinuous derivatives. Consequently, the resulting solution space can approximate solutions with weak discontinuities. The method is also applied to curved material interfaces, where the inverse mapping and the curved triangular elements are considered. Third, we develop an IGA collocation method using superconvergent points. The collocation methods are efficient because no numerical integration is needed. In particular when higher polynomial basis applied, the method has a lower computational cost than Galerkin methods. However, the positions of the collocation points are crucial for the accuracy of the method, as they affect the convergent rate significantly. The proposed IGA collocation method uses superconvergent points instead of the traditional Greville abscissae points. The numerical results show the proposed method can have better accuracy and optimal convergence rates, while the traditional IGA collocation has optimal convergence only for even polynomial degrees. Lastly, we propose a novel dynamic multilevel technique for handling image registration. It is application of the B-spline functions in image processing. The procedure considered aims to align a target image from a reference image by a spatial transformation. The method starts with an energy function which is the same as a FEM-based image registration. However, we simplify the solving procedure, working on the energy function directly. We dynamically solve for control points which are coefficients of B-spline basis functions. The new approach is more simple and fast. Moreover, it is also enhanced by a multilevel technique in order to prevent instabilities. The numerical testing consists of two artificial images, four real bio-medical MRI brain and CT heart images, and they show our registration method is accurate, fast and efficient, especially for large deformation problems.}, subject = {Finite-Elemente-Methode}, language = {en} } @phdthesis{Kessler2018, author = {Keßler, Andrea}, title = {Matrix-free voxel-based finite element method for materials with heterogeneous microstructures}, doi = {10.25643/bauhaus-universitaet.3844}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20190116-38448}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {113}, year = {2018}, abstract = {Modern image detection techniques such as micro computer tomography (μCT), magnetic resonance imaging (MRI) and scanning electron microscopy (SEM) provide us with high resolution images of the microstructure of materials in a non-invasive and convenient way. They form the basis for the geometrical models of high-resolution analysis, so called image-based analysis. However especially in 3D, discretizations of these models reach easily the size of 100 Mill. degrees of freedoms and require extensive hardware resources in terms of main memory and computing power to solve the numerical model. Consequently, the focus of this work is to combine and adapt numerical solution methods to reduce the memory demand first and then the computation time and therewith enable an execution of the image-based analysis on modern computer desktops. Hence, the numerical model is a straightforward grid discretization of the voxel-based (pixels with a third dimension) geometry which omits the boundary detection algorithms and allows reduced storage of the finite element data structure and a matrix-free solution algorithm. This in turn reduce the effort of almost all applied grid-based solution techniques and results in memory efficient and numerically stable algorithms for the microstructural models. Two variants of the matrix-free algorithm are presented. The efficient iterative solution method of conjugate gradients is used with matrix-free applicable preconditioners such as the Jacobi and the especially suited multigrid method. The jagged material boundaries of the voxel-based mesh are smoothed through embedded boundary elements which contain different material information at the integration point and are integrated sub-cell wise though without additional boundary detection. The efficiency of the matrix-free methods can be retained.}, subject = {Dissertation}, language = {en} }