@article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Die Stadt der Agenten und Automaten}, series = {FORUM - Architektur \& Bauforum}, journal = {FORUM - Architektur \& Bauforum}, doi = {10.25643/bauhaus-universitaet.2608}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26083}, abstract = {PLANUNGSUNTERST{\"U}TZUNG DURCH DIE ANALYSE R{\"A}UMLICHER PROZESSE MITTELS COMPUTERSIMULATIONEN. Erst wenn man - zumindest im Prinzip - versteht, wie eine Stadt mit ihren komplexen, verwobenen Vorg{\"a}ngen im Wesentlichen funktioniert, ist eine sinnvolle Stadtplanung m{\"o}glich. Denn jede Planung bedeutet einen Eingriff in den komplexen Organismus einer Stadt. Findet dieser Eingriff ohne Wissen {\"u}ber die Funktionsweise des Organismus statt, k{\"o}nnen auch die Auswirkungen nicht abgesch{\"a}tzt werden. Dieser Beitrag stellt dar, wie urbane Prozesse mittels Computersimulationen unter Zuhilfenahme so genannter Multi-Agenten-Systeme und Zellul{\"a}rer Automaten verstanden werden k{\"o}nnen. von}, subject = {CAD}, language = {de} } @phdthesis{Schwedler, author = {Schwedler, Michael}, title = {Integrated structural analysis using isogeometric finite element methods}, doi = {10.25643/bauhaus-universitaet.2737}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170130-27372}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {209}, abstract = {The gradual digitization in the architecture, engineering, and construction industry over the past fifty years led to an extremely heterogeneous software environment, which today is embodied by the multitude of different digital tools and proprietary data formats used by the many specialists contributing to the design process in a construction project. Though these projects become increasingly complex, the demands on financial efficiency and the completion within a tight schedule grow at the same time. The digital collaboration of project partners has been identified as one key issue in successfully dealing with these challenges. Yet currently, the numerous software applications and their respective individual views on the design process severely impede that collaboration. An approach to establish a unified basis for the digital collaboration, regardless of the existing software heterogeneity, is a comprehensive digital building model contributed to by all projects partners. This type of data management known as building information modeling (BIM) has many benefits, yet its adoption is associated with many difficulties and thus, proceeds only slowly. One aspect in the field of conflicting requirements on such a digital model is the cooperation of architects and structural engineers. Traditionally, these two disciplines use different abstractions of reality for their models that in consequence lead to incompatible digital representations thereof. The onset of isogeometric analysis (IGA) promised to ease the discrepancy in design and analysis model representations. Yet, that initial focus quickly shifted towards using these methods as a more powerful basis for numerical simulations. Furthermore, the isogeometric representation alone is not capable of solving the model abstraction problem. It is thus the intention of this work to contribute to an improved digital collaboration of architects and engineers by exploring an integrated analysis approach on the basis of an unified digital model and solid geometry expressed by splines. In the course of this work, an analysis framework is developed that utilizes such models to automatically conduct numerical simulations commonly required in construction projects. In essence, this allows to retrieve structural analysis results from BIM models in a fast and simple manner, thereby facilitating rapid design iterations and profound design feedback. The BIM implementation Industry Foundation Classes (IFC) is reviewed with regard to its capabilities of representing the unified model. The current IFC schema strongly supports the use of redundant model data, a major pitfall in digital collaboration. Additionally, it does not allow to describe the geometry by volumetric splines. As the pursued approach builds upon a unique model for both, architectural and structural design, and furthermore requires solid geometry, necessary schema modifications are suggested. Structural entities are modeled by volumetric NURBS patches, each of which constitutes an individual subdomain that, with regard to the analysis, is incompatible with the remaining full model. The resulting consequences for numerical simulation are elaborated in this work. The individual subdomains have to be weakly coupled, for which the mortar method is used. Different approaches to discretize the interface traction fields are implemented and their respective impact on the analysis results is evaluated. All necessary coupling conditions are automatically derived from the related geometry model. The weak coupling procedure leads to a linear system of equations in saddle point form, which, owed to the volumetric modeling, is large in size and, the associated coefficient matrix has, due to the use of higher degree basis functions, a high bandwidth. The peculiarities of the system require adapted solution methods that generally cause higher numerical costs than the standard procedures for symmetric, positive-definite systems do. Different methods to solve the specific system are investigated and an efficient parallel algorithm is finally proposed. When the structural analysis model is derived from the unified model in the BIM data, it does in general initially not meet the requirements on the discretization that are necessary to obtain sufficiently accurate analysis results. The consequently necessary patch refinements must be controlled automatically to allowfor an entirely automatic analysis procedure. For that purpose, an empirical refinement scheme based on the geometrical and possibly mechanical properties of the specific entities is proposed. The level of refinement may be selectively manipulated by the structural engineer in charge. Furthermore, a Zienkiewicz-Zhu type error estimator is adapted for the use with isogeometric analysis results. It is shown that also this estimator can be used to steer an adaptive refinement procedure.}, subject = {Finite-Elemente-Methode}, language = {en} } @phdthesis{Amiri, author = {Amiri, Fatemeh}, title = {Computational modelling of fracture with local maximum entropy approximations}, doi = {10.25643/bauhaus-universitaet.2631}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160719-26310}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {130}, abstract = {The key objective of this research is to study fracture with a meshfree method, local maximum entropy approximations, and model fracture in thin shell structures with complex geometry and topology. This topic is of high relevance for real-world applications, for example in the automotive industry and in aerospace engineering. The shell structure can be described efficiently by meshless methods which are capable of describing complex shapes as a collection of points instead of a structured mesh. In order to find the appropriate numerical method to achieve this goal, the first part of the work was development of a method based on local maximum entropy (LME) shape functions together with enrichment functions used in partition of unity methods to discretize problems in linear elastic fracture mechanics. We obtain improved accuracy relative to the standard extended finite element method (XFEM) at a comparable computational cost. In addition, we keep the advantages of the LME shape functions,such as smoothness and non-negativity. We show numerically that optimal convergence (same as in FEM) for energy norm and stress intensity factors can be obtained through the use of geometric (fixed area) enrichment with no special treatment of the nodes near the crack such as blending or shifting. As extension of this method to three dimensional problems and complex thin shell structures with arbitrary crack growth is cumbersome, we developed a phase field model for fracture using LME. Phase field models provide a powerful tool to tackle moving interface problems, and have been extensively used in physics and materials science. Phase methods are gaining popularity in a wide set of applications in applied science and engineering, recently a second order phase field approximation for brittle fracture has gathered significant interest in computational fracture such that sharp cracks discontinuities are modeled by a diffusive crack. By minimizing the system energy with respect to the mechanical displacements and the phase-field, subject to an irreversibility condition to avoid crack healing, this model can describe crack nucleation, propagation, branching and merging. One of the main advantages of the phase field modeling of fractures is the unified treatment of the interfacial tracking and mechanics, which potentially leads to simple, robust, scalable computer codes applicable to complex systems. In other words, this approximation reduces considerably the implementation complexity because the numerical tracking of the fracture is not needed, at the expense of a high computational cost. We present a fourth-order phase field model for fracture based on local maximum entropy (LME) approximations. The higher order continuity of the meshfree LME approximation allows to directly solve the fourth-order phase field equations without splitting the fourth-order differential equation into two second order differential equations. Notably, in contrast to previous discretizations that use at least a quadratic basis, only linear completeness is needed in the LME approximation. We show that the crack surface can be captured more accurately in the fourth-order model than the second-order model. Furthermore, less nodes are needed for the fourth-order model to resolve the crack path. Finally, we demonstrate the performance of the proposed meshfree fourth order phase-field formulation for 5 representative numerical examples. Computational results will be compared to analytical solutions within linear elastic fracture mechanics and experimental data for three-dimensional crack propagation. In the last part of this research, we present a phase-field model for fracture in Kirchoff-Love thin shells using the local maximum-entropy (LME) meshfree method. Since the crack is a natural outcome of the analysis it does not require an explicit representation and tracking, which is advantageous over techniques as the extended finite element method that requires tracking of the crack paths. The geometric description of the shell is based on statistical learning techniques that allow dealing with general point set surfaces avoiding a global parametrization, which can be applied to tackle surfaces of complex geometry and topology. We show the flexibility and robustness of the present methodology for two examples: plate in tension and a set of open connected pipes.}, language = {en} } @inproceedings{KoenigSchmitt, author = {K{\"o}nig, Reinhard and Schmitt, Gerhard}, title = {Backcasting and a new way of command in computational design : Proceedings}, series = {CAADence in Architecture Conference}, booktitle = {CAADence in Architecture Conference}, editor = {Szoboszlai, Mih{\´a}ly}, address = {Budapest}, doi = {10.25643/bauhaus-universitaet.2599}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-25996}, pages = {15 -- 25}, abstract = {It's not uncommon that analysis and simulation methods are used mainly to evaluate finished designs and to proof their quality. Whereas the potential of such methods is to lead or control a design process from the beginning on. Therefore, we introduce a design method that move away from a "what-if" forecasting philosophy and increase the focus on backcasting approaches. We use the power of computation by combining sophisticated methods to generate design with analysis methods to close the gap between analysis and synthesis of designs. For the development of a future-oriented computational design support we need to be aware of the human designer's role. A productive combination of the excellence of human cognition with the power of modern computing technology is needed. We call this approach "cognitive design computing". The computational part aim to mimic the way a designer's brain works by combining state-of-the-art optimization and machine learning approaches with available simulation methods. The cognition part respects the complex nature of design problems by the provision of models for human-computation interaction. This means that a design problem is distributed between computer and designer. In the context of the conference slogan "back to command", we ask how we may imagine the command over a cognitive design computing system. We expect that designers will need to let go control of some parts of the design process to machines, but in exchange they will get a new powerful command on complex computing processes. This means that designers have to explore the potentials of their role as commanders of partially automated design processes. In this contribution we describe an approach for the development of a future cognitive design computing system with the focus on urban design issues. The aim of this system is to enable an urban planner to treat a planning problem as a backcasting problem by defining what performance a design solution should achieve and to automatically query or generate a set of best possible solutions. This kind of computational planning process offers proof that the designer meets the original explicitly defined design requirements. A key way in which digital tools can support designers is by generating design proposals. Evolutionary multi-criteria optimization methods allow us to explore a multi-dimensional design space and provide a basis for the designer to evaluate contradicting requirements: a task urban planners are faced with frequently. We also reflect why designers will give more and more control to machines. Therefore, we investigate first approaches learn how designers use computational design support systems in combination with manual design strategies to deal with urban design problems by employing machine learning methods. By observing how designers work, it is possible to derive more complex artificial solution strategies that can help computers make better suggestions in the future.}, subject = {CAD}, language = {en} } @article{KoenigBauriedel, author = {K{\"o}nig, Reinhard and Bauriedel, Christian}, title = {Generating settlement structures: a method for urban planning and analysis supported by cellular automata}, series = {Environment and Planning B: Planning and Design}, journal = {Environment and Planning B: Planning and Design}, doi = {10.25643/bauhaus-universitaet.2605}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160624-26054}, pages = {602 -- 624}, abstract = {Previous models for the explanation of settlement processes pay little attention to the interactions between settlement spreading and road networks. On the basis of a dielectric breakdown model in combination with cellular automata, we present a method to steer precisely the generation of settlement structures with regard to their global and local density as well as the size and number of forming clusters. The resulting structures depend on the logic of how the dependence of the settlements and the road network is implemented to the simulation model. After analysing the state of the art we begin with a discussion of the mutual dependence of roads and land development. Next, we elaborate a model that permits the precise control of permeability in the developing structure as well as the settlement density, using the fewest necessary control parameters. On the basis of different characteristic values, possible settlement structures are analysed and compared with each other. Finally, we reflect on the theoretical contribution of the model with regard to the context of urban dynamics.}, language = {en} } @inproceedings{KoenigVaroudis, author = {K{\"o}nig, Reinhard and Varoudis, Tasos}, title = {Spatial Optimizations: Merging depthmapX , spatial graph networks and evolutionary design in Grasshopper}, series = {Proceedings of ecaade 34: Complexity \& Simplicity}, booktitle = {Proceedings of ecaade 34: Complexity \& Simplicity}, address = {Oulu, Finland}, doi = {10.25643/bauhaus-universitaet.2604}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26040}, pages = {1 -- 6}, abstract = {In the Space Syntax community, the standard tool for computing all kinds of spatial graph network measures is depthmapX (Turner, 2004; Varoudis, 2012). The process of evaluating many design variants of networks is relatively complicated, since they need to be drawn in a separated CAD system, exported and imported in depthmapX via dxf file format. This procedure disables a continuous integration into a design process. Furthermore, the standalone character of depthmapX makes it impossible to use its network centrality calculation for optimization processes. To overcome this limitations, we present in this paper the first steps of experimenting with a Grasshopper component (reference omitted until final version) that can access the functions of depthmapX and integrate them into Grasshopper/Rhino3D. Here the component is implemented in a way that it can be used directly for an evolutionary algorithm (EA) implemented in a Python scripting component in Grasshopper}, language = {en} } @phdthesis{Ehrhardt, author = {Ehrhardt, Dirk}, title = {ZUM EINFLUSS DER NACHBEHANDLUNG AUF DIE GEF{\"U}GEAUSBILDUNG UND DEN FROST-TAUMITTELWIDERSTAND DER BETONRANDZONE}, doi = {10.25643/bauhaus-universitaet.3688}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20171120-36889}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {235}, abstract = {Die Festigkeitsentwicklung des Zementbetons basiert auf der chemischen Reaktion des Zementes mit dem Anmachwasser. Durch Nachbehandlungsmaßnahmen muss daf{\"u}r gesorgt werden, dass dem Zement gen{\"u}gend Wasser f{\"u}r seine Reaktion zur Verf{\"u}gung steht, da sonst ein Beton mit minderer Qualit{\"a}t entsteht. Die vorliegende Arbeit behandelt die grunds{\"a}tzlichen Fragen der Betonnachbehandlung bei Anwendung von Straßenbetonen. Im Speziellen wird die Frage des erforderlichen Nachbehandlungsbedarfs von h{\"u}ttensandhaltigen Kompositzementen betrachtet. Die Wirkung der Nachbehandlung wird anhand des erreichten Frost-Tausalz-Widerstandes und der Gef{\"u}geausbildung in der unmittelbaren Betonrandzone bewertet. Der Fokus der Untersuchungen lag auf abgezogenen Betonoberfl{\"a}chen. Es wurde ein Modell zur Austrocknung des jungen Betons erarbeitet. Es konnte gezeigt werden, dass in einer fr{\"u}hen Austrocknung (Kapillarphase) keine kritische Austrocknung der Betonrandzone einsetzt, sondern der Beton ann{\"a}hrend gleichm{\"a}ßig {\"u}ber die H{\"o}he austrocknet. Es wurde ein Nomogramm entwickelt, mit dem die Dauer der Kapillarphase in Abh{\"a}ngigkeit der Witterung f{\"u}r Straßenbetone abgesch{\"a}tzt werden kann. Eine kritische Austrocknung der wichtigen Randzone setzt nach Ende der Kapillarphase ein. F{\"u}r Betone unter Verwendung von Zementen mit langsamer Festigkeitsentwicklung ist die Austrocknung der Randzone nach Ende der Kapillarphase besonders ausgepr{\"a}gt. Im Ergebnis zeigen diese Betone dann einen geringen Frost-Tausalz-Widerstand. Mit Zementen, die eine 2d-Zementdruckfestigkeit ≥ 23,0 N/mm² aufweisen, wurde unabh{\"a}ngig von der Zementart (CEM I oder CEM II/B-S) auch dann ein hoher Frost-Tausalz-Widerstand erreicht, wenn keine oder eine schlechtere Nachbehandlung angewendet wurde. F{\"u}r die Praxis ergibt sich damit eine einfache M{\"o}glichkeit der Vorauswahl von geeigneten Zementen f{\"u}r den Verkehrsfl{\"a}chenbau. Betone, die unter Verwendung von Zementen mit langsamere Festigkeitsentwicklung hergestellt werden, erreichen einen hohen Frost-Tausalz-Widerstand nur mit einer geeigneten Nachbehandlung. Die Anwendung von fl{\"u}ssigen Nachbehandlungsmitteln (NBM gem{\"a}ß TL NBM-StB) erreicht eine {\"a}hnliche Wirksamkeit wie eine 5 t{\"a}gige Feuchtnachbehandlung. Voraussetzung f{\"u}r die Wirksamkeit der NBM ist, dass sie auf eine Betonoberfl{\"a}che ohne sichtbaren Feuchtigkeitsfilm (feuchter Glanz) aufgespr{\"u}ht werden. Besonders wichtig ist die Beachtung des richtigen Auftragszeitpunktes bei k{\"u}hler Witterung, da hier aufgrund der verlangsamten Zementreaktion der Beton l{\"a}nger Anmachwasser abst{\"o}ßt. Ein zu fr{\"u}her Auftrag des Nachbehandlungsmittels f{\"u}hrt zu einer Verschlechterung der Qualit{\"a}t der Betonrandzone. Durch Bereitstellung hydratationsabh{\"a}ngiger Transportkenngr{\"o}ßen (Feuchtetransport im Beton) konnten numerische Berechnungen zum Zusammenspiel zwischen der Austrocknung, der Nachbehandlung und der Gef{\"u}geentwicklung durchgef{\"u}hrt werden. Mit dem erstellten Berechnungsmodell wurden Parameterstudien durchgef{\"u}hrt. Die Berechnungen best{\"a}tigen die wesentlichen Erkenntnisse der Laboruntersuchungen. Dar{\"u}ber hinaus l{\"a}sst sich mit dem Berechnungsmodell zeigen, dass gerade bei langsam reagierenden Zementen und k{\"u}hler Witterung ohne eine Nachbehandlung eine sehr d{\"u}nne Randzone (ca. 500 µm - 1000 µm) mit stark erh{\"o}hter Kapillarporosit{\"a}t entsteht.}, subject = {Beton}, language = {de} } @phdthesis{Mueller, author = {M{\"u}ller, Matthias}, title = {Salt-frost Attack on Concrete - New Findings regarding the Damage Mechanism}, doi = {10.25643/bauhaus-universitaet.4868}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20230103-48681}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {The reduction of the cement clinker content is an important prerequisite for the improvement of the CO2-footprint of concrete. Nevertheless, the durability of such concretes must be sufficient to guarantee a satisfactory service life of structures. Salt frost scaling resistance is a critical factor in this regard, as it is often diminished at increased clinker substitution rates. Furthermore, only insufficient long-term experience for such concretes exists. A high salt frost scaling resistance thus cannot be achieved by applying only descriptive criteria, such as the concrete composition. It is therefore to be expected, that in the long term a performance based service life prediction will replace the descriptive concept. To achieve the important goal of clinker reduction for concretes also in cold and temperate climates it is important to understand the underlying mechanisms for salt frost scaling. However, conflicting damage theories dominate the current State of the Art. It was consequently derived as the goal of this thesis to evaluate existing damage theories and to examine them experimentally. It was found that only two theories have the potential to describe the salt frost attack satisfactorily - the glue spall theory and the cryogenic suction theory. The glue spall theory attributes the surface scaling to the interaction of an external ice layer with the concrete surface. Only when moderate amounts of deicing salt are present in the test solution the resulting mechanical properties of the ice can cause scaling. However, the results in this thesis indicate that severe scaling also occurs at deicing salt levels, at which the ice is much too soft to damage concrete. Thus, the inability of the glue spall theory to account for all aspects of salt frost scaling was shown. The cryogenic suction theory is based on the eutectic behavior of salt solutions, which consist of two phases - water ice and liquid brine - between the freezing point and the eutectic temperature. The liquid brine acts as an additional moisture reservoir, which facilitates the growth of ice lenses in the surface layer of the concrete. The experiments in this thesis confirmed, that the ice formation in hardened cement paste increases due to the suction of brine at sub-zero temperatures. The extent of additional ice formation was influenced mainly by the porosity and by the chloride binding capacity of the hardened cement paste. Consequently, the cryogenic suction theory plausibly describes the actual generation of scaling, but it has to be expanded by some crucial aspects to represent the salt frost scaling attack completely. The most important aspect is the intensive saturation process, which is ascribed to the so-called micro ice lens pump. Therefore a combined damage theory was proposed, which considers multiple saturation processes. Important aspects of this combined theory were confirmed experimentally. As a result, the combined damage theory constitutes a good basis to understand the salt frost scaling attack on concrete on a fundamental level. Furthermore, a new approach was identified, to account for the reduced salt frost scaling resistance of concretes with reduced clinker content.}, subject = {Beton}, language = {en} } @masterthesis{Krtschil, type = {Bachelor Thesis}, author = {Krtschil, Anna}, title = {Vergleich verschiedener Indikatoren in Bezug auf die {\"O}kobilanz von Geb{\"a}uden}, doi = {10.25643/bauhaus-universitaet.2434}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150716-24340}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {73}, abstract = {Im Rahmen der Bachelorarbeit werden zwei Indikatoren zur Auswertung einer {\"O}kobilanz gegen{\"u}bergestellt. Die Umweltbelastungspunkte der Schweiz werden mit dem niederl{\"a}ndischen ReCiPe verglichen.}, subject = {Umweltbilanz}, language = {de} } @phdthesis{Harirchian, author = {Harirchian, Ehsan}, title = {Improved Rapid Assessment of Earthquake Hazard Safety of Existing Buildings Using a Hierarchical Type-2 Fuzzy Logic Model}, doi = {10.25643/bauhaus-universitaet.4396}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20210326-43963}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {143}, abstract = {Although it is impractical to avert subsequent natural disasters, advances in simulation science and seismological studies make it possible to lessen the catastrophic damage. There currently exists in many urban areas a large number of structures, which are prone to damage by earthquakes. These were constructed without the guidance of a national seismic code, either before it existed or before it was enforced. For instance, in Istanbul, Turkey, as a high seismic area, around 90\% of buildings are substandard, which can be generalized into other earthquakeprone regions in Turkey. The reliability of this building stock resulting from earthquake-induced collapse is currently uncertain. Nonetheless, it is also not feasible to perform a detailed seismic vulnerability analysis on each building as a solution to the scenario, as it will be too complicated and expensive. This indicates the necessity of a reliable, rapid, and computationally easy method for seismic vulnerability assessment, commonly known as Rapid Visual Screening (RVS). In RVS methodology, an observational survey of buildings is performed, and according to the data collected during the visual inspection, a structural score is calculated without performing any structural calculations to determine the expected damage of a building and whether the building needs detailed assessment. Although this method might save time and resources due to the subjective/qualitative judgments of experts who performed the inspection, the evaluation process is dominated by vagueness and uncertainties, where the vagueness can be handled adequately through the fuzzy set theory but do not cover all sort of uncertainties due to its crisp membership functions. In this study, a novel method of rapid visual hazard safety assessment of buildings against earthquake is introduced in which an interval type-2 fuzzy logic system (IT2FLS) is used to cover uncertainties. In addition, the proposed method provides the possibility to evaluate the earthquake risk of the building by considering factors related to the building importance and exposure. A smartphone app prototype of the method has been introduced. For validation of the proposed method, two case studies have been selected, and the result of the analysis presents the robust efficiency of the proposed method.}, subject = {Fuzzy-Logik}, language = {en} } @phdthesis{Hollberg, author = {Hollberg, Alexander}, title = {A parametric method for building design optimization based on Life Cycle Assessment - Appendix}, series = {A parametric method for building design optimization based on Life Cycle Assessment}, journal = {A parametric method for building design optimization based on Life Cycle Assessment}, doi = {10.25643/bauhaus-universitaet.2688}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20161101-26884}, abstract = {The building sector is responsible for a large share of human environmental impacts, over which architects and planners have a major influence. The main objective of this thesis is to develop a method for environmental building design optimization based on Life Cycle Assessment (LCA) that is applicable as part of the design process. The research approach includes a thorough analysis of LCA for buildings in relation to the architectural design stages and the establishment of a requirement catalogue. The key concept of the novel method called Parametric Life Cycle Assessment(PLCA) is to combine LCA with parametric design. The application of this method to three examples shows that building designs can be optimized time-efficiently and holistically from the beginning of the most influential early design stages, an achievement which has not been possible until now.}, subject = {{\"O}kobilanz}, language = {en} } @article{AlaladeReichertKoehnetal., author = {Alalade, Muyiwa and Reichert, Ina and K{\"o}hn, Daniel and Wuttke, Frank and Lahmer, Tom}, title = {A Cyclic Multi-Stage Implementation of the Full-Waveform Inversion for the Identification of Anomalies in Dams}, series = {Infrastructures}, volume = {2022}, journal = {Infrastructures}, number = {Volume 7, issue 12, article 161}, editor = {Qu, Chunxu and Gao, Chunxu and Zhang, Rui and Jia, Ziguang and Li, Jiaxiang}, publisher = {MDPI}, address = {Basel}, doi = {10.3390/infrastructures7120161}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20221201-48396}, pages = {19}, abstract = {For the safe and efficient operation of dams, frequent monitoring and maintenance are required. These are usually expensive, time consuming, and cumbersome. To alleviate these issues, we propose applying a wave-based scheme for the location and quantification of damages in dams. To obtain high-resolution "interpretable" images of the damaged regions, we drew inspiration from non-linear full-multigrid methods for inverse problems and applied a new cyclic multi-stage full-waveform inversion (FWI) scheme. Our approach is less susceptible to the stability issues faced by the standard FWI scheme when dealing with ill-posed problems. In this paper, we first selected an optimal acquisition setup and then applied synthetic data to demonstrate the capability of our approach in identifying a series of anomalies in dams by a mixture of reflection and transmission tomography. The results had sufficient robustness, showing the prospects of application in the field of non-destructive testing of dams.}, subject = {Damm}, language = {en} } @phdthesis{Blickling2006, author = {Blickling, Arno}, title = {Spezifikation des Bau-Solls durch interaktive Modellierung auf virtuellen Baustellen}, doi = {10.25643/bauhaus-universitaet.790}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20061105-8311}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {Heutige Methoden zur Soll-Spezifikation von Bauleistungen (Kostenermittlung und zeitliche Ablaufplanung) gehen von einer abstrahierten und vereinfachten Betrachtung der Zusammenh{\"a}nge bei Bauprojekten aus. Leistungsverzeichnisse, Kostenermittlungen und Bauzeitpl{\"a}ne orientieren sich nur indirekt an der Geometrie des Bauwerks und der Baustelle. Die dabei verwendeten Medien wie Papier, 2D-Dateien, digitale Leistungsbeschreibungen oder 3D-Darstellungen lassen die Suche nach Informationen auf der Baustelle zu einem zeitaufw{\"a}ndigen und in Anbetracht existierender Medientechnologien ineffizienten Prozess werden. Interaktive virtuelle Umgebungen erlauben die Aufl{\"o}sung starrer Zusammenh{\"a}nge durch interaktive Eingriffe des Anwenders und visualisieren komplexe bauproduktionstechnische Vorg{\"a}nge. Das Konzept der visuellen interaktiven Simulation der Bauproduktion sieht vor, die Soll-Spezifikation anhand eines interaktiven 3D-Modells zu entwickeln, um r{\"a}umliche Ver{\"a}nderungen und parallele Prozesse auf der virtuellen Baustelle im Rahmen der Entscheidungsfindung zum Bauablauf besser ber{\"u}cksichtigen zu k{\"o}nnen. Verlangt man einen hohen Grad an Interaktivit{\"a}t mit dem 3D-Modell, dann bieten sich Computerspieltechnologien sehr gut zu Verifikationszwecken an. Die visuelle interaktive Simulation der Bauproduktion ist damit als eine 3D-modellbasierte Methode der Prozessmodellierung zu verstehen, die Entscheidungen als Input ben{\"o}tigt und die Kostenermittlung sowie die zeitliche Ablaufplanung als Output liefert.}, subject = {Virtuelle Realit{\"a}t}, language = {de} } @article{ChowdhuryZabel, author = {Chowdhury, Sharmistha and Zabel, Volkmar}, title = {Influence of loading sequence on wind induced fatigue assessment of bolts in TV-tower connection block}, series = {Results in Engineering}, volume = {2022}, journal = {Results in Engineering}, number = {Volume 16, article 100603}, publisher = {Elsevier}, address = {Amsterdam}, doi = {10.1016/j.rineng.2022.100603}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20221028-47303}, pages = {1 -- 18}, abstract = {Bolted connections are widely employed in structures like transmission poles, wind turbines, and television (TV) towers. The behaviour of bolted connections is often complex and plays a significant role in the overall dynamic characteristics of the structure. The goal of this work is to conduct a fatigue lifecycle assessment of such a bolted connection block of a 193 m tall TV tower, for which 205 days of real measurement data have been obtained from the installed monitoring devices. Based on the recorded data, the best-fit stochastic wind distribution for 50 years, the decisive wind action, and the locations to carry out the fatigue analysis have been decided. A 3D beam model of the entire tower is developed to extract the nodal forces corresponding to the connection block location under various mean wind speeds, which is later coupled with a detailed complex finite element model of the connection block, with over three million degrees of freedom, for acquiring stress histories on some pre-selected bolts. The random stress histories are analysed using the rainflow counting algorithm (RCA) and the damage is estimated using Palmgren-Miner's damage accumulation law. A modification is proposed to integrate the loading sequence effect into the RCA, which otherwise is ignored, and the differences between the two RCAs are investigated in terms of the accumulated damage.}, subject = {Schadensakkumulation}, language = {en} } @article{ArnoldKraus, author = {Arnold, Robert and Kraus, Matthias}, title = {On the nonstationary identification of climate-influenced loads for the semi-probabilistic approach using measured and projected data}, series = {Cogent Engineering}, volume = {2022}, journal = {Cogent Engineering}, number = {Volume 9, issue 1, article 2143061}, editor = {Pham, Duc}, publisher = {Taylor \& Francis}, address = {London}, doi = {10.1080/23311916.2022.2143061}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20221117-47363}, pages = {1 -- 26}, abstract = {A safe and economic structural design based on the semi-probabilistic concept requires statistically representative safety elements, such as characteristic values, design values, and partial safety factors. Regarding climate loads, the safety levels of current design codes strongly reflect experiences based on former measurements and investigations assuming stationary conditions, i.e. involving constant frequencies and intensities. However, due to climate change, occurrence of corresponding extreme weather events is expected to alter in the future influencing the reliability and safety of structures and their components. Based on established approaches, a systematically refined data-driven methodology for the determination of design parameters considering nonstationarity as well as standardized targets of structural reliability or safety, respectively, is therefore proposed. The presented procedure picks up fundamentals of European standardization and extends them with respect to nonstationarity by applying a shifting time window method. Taking projected snow loads into account, the application of the method is exemplarily demonstrated and various influencing parameters are discussed.}, subject = {Reliabilit{\"a}t}, language = {en} } @article{ChowdhuryKraus, author = {Chowdhury, Sharmistha and Kraus, Matthias}, title = {Design-related reassessment of structures integrating Bayesian updating of model safety factors}, series = {Results in Engineering}, volume = {2022}, journal = {Results in Engineering}, number = {Volume 16, article 100560}, publisher = {Elsevier}, address = {Amsterdam}, doi = {10.1016/j.rineng.2022.100560}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20221028-47294}, pages = {1 -- 1}, abstract = {In the semi-probabilistic approach of structural design, the partial safety factors are defined by considering some degree of uncertainties to actions and resistance, associated with the parameters' stochastic nature. However, uncertainties for individual structures can be better examined by incorporating measurement data provided by sensors from an installed health monitoring scheme. In this context, the current study proposes an approach to revise the partial safety factor for existing structures on the action side, γE by integrating Bayesian model updating. A simple numerical example of a beam-like structure with artificially generated measurement data is used such that the influence of different sensor setups and data uncertainties on revising the safety factors can be investigated. It is revealed that the health monitoring system can reassess the current capacity reserve of the structure by updating the design safety factors, resulting in a better life cycle assessment of structures. The outcome is furthermore verified by analysing a real life small railway steel bridge ensuring the applicability of the proposed method to practical applications.}, subject = {Lebenszyklus}, language = {en} } @article{KumariHarirchianLahmeretal., author = {Kumari, Vandana and Harirchian, Ehsan and Lahmer, Tom and Rasulzade, Shahla}, title = {Evaluation of Machine Learning and Web-Based Process for Damage Score Estimation of Existing Buildings}, series = {Buildings}, volume = {2022}, journal = {Buildings}, number = {Volume 12, issue 5, article 578}, publisher = {MDPI}, address = {Basel}, doi = {10.3390/buildings12050578}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220509-46387}, pages = {1 -- 23}, abstract = {The seismic vulnerability assessment of existing reinforced concrete (RC) buildings is a significant source of disaster mitigation plans and rescue services. Different countries evolved various Rapid Visual Screening (RVS) techniques and methodologies to deal with the devastating consequences of earthquakes on the structural characteristics of buildings and human casualties. Artificial intelligence (AI) methods, such as machine learning (ML) algorithm-based methods, are increasingly used in various scientific and technical applications. The investigation toward using these techniques in civil engineering applications has shown encouraging results and reduced human intervention, including uncertainties and biased judgment. In this study, several known non-parametric algorithms are investigated toward RVS using a dataset employing different earthquakes. Moreover, the methodology encourages the possibility of examining the buildings' vulnerability based on the factors related to the buildings' importance and exposure. In addition, a web-based application built on Django is introduced. The interface is designed with the idea to ease the seismic vulnerability investigation in real-time. The concept was validated using two case studies, and the achieved results showed the proposed approach's potential efficiency}, subject = {Maschinelles Lernen}, language = {en} } @masterthesis{Nguyen, type = {Bachelor Thesis}, author = {Nguyen, Thai Cuong}, title = {Fl{\"a}chen zweiter Ordnung - D{\"a}cher m{\"u}ssen nicht eben sein}, doi = {10.25643/bauhaus-universitaet.3749}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20181024-37496}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {47}, abstract = {In dieser Arbeit geht es um die Quadriken in der Ebene und im Raum. Dabei werden die Transformation in die Normalform und die Klassifikation untersucht. Aus den geometrischen Eigenschaften werden einige Anwendungsbeispiele der Quadriken in der Technik und dem allt{\"a}glichen Leben vorgestellt.}, subject = {Quadrik}, language = {de} } @techreport{VogelVoelkerArnoldetal., author = {Vogel, Albert and V{\"o}lker, Conrad and Arnold, J{\"o}rg and Schmidt, Jens and Thurow, Torsten and Braunes, J{\"o}rg and Tonn, Christian and Bode, Kay-Andr{\´e} and Baldy, Franziska and Erfurt, Wolfgang and Tatarin, Ren{\´e}}, title = {Methoden und Baustoffe zur nutzerorientierten Bausanierung. Schlussbericht zum InnoProfile Forschungsvorhaben}, organization = {Bauhaus-Universit{\"a}t Weimar}, isbn = {978-3-86068-501-3 (Printausg.)}, doi = {10.25643/bauhaus-universitaet.2022}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130830-20229}, pages = {106}, abstract = {Nutzerorientierte Bausanierung bedeutet eine gegen{\"u}ber dem konventionellen Vorgehen deutlich verst{\"a}rkte Ausrichtung des Planungs- und Sanierungsprozesses auf die Anforderungen und Bed{\"u}rfnisse des zuk{\"u}nftigen Nutzers eines Geb{\"a}udes. Dies hat einerseits ein hochwertigeres Produkt zum Ergebnis, erfordert andererseits aber auch den Einsatz neuer Methoden und Baustoffe sowie ein vernetztes Zusammenarbeiten aller am Bauprozess Beteiligten. Der Fokus der Publikation liegt dabei auf den Bereichen, die eine hohe Relevanz f{\"u}r die nutzerorientierte Bausanierung aufweisen. Dabei handelt es sich insbesondere um: Computergest{\"u}tztes Bauaufmaß und digitale Bauwerksmodellierung (BIM), bauphysikalische Methoden zur Optimierung von Energieeffizienz und Behaglichkeit bei der Sanierung von Bestandsgeb{\"a}uden, zerst{\"o}rungsfreie Untersuchungsmethoden im Rahmen einer substanzschonenden Bauzustandsanalyse und Entwicklung von Erg{\"a}nzungsbaustoffen. Das Projekt nuBau ist eine Kooperation zwischen den Fakult{\"a}ten Bauingenieurwesen und Architektur der Bauhaus-Universit{\"a}t Weimar. Die beteiligten Professuren sind: Bauphysik, Informatik in der Architektur, Polymere Werkstoffe und Werkstoffe des Bauens.}, subject = {Nutzerorientierte Bausanierung}, language = {de} } @article{HarirchianKumariJadhavetal., author = {Harirchian, Ehsan and Kumari, Vandana and Jadhav, Kirti and Rasulzade, Shahla and Lahmer, Tom and Raj Das, Rohan}, title = {A Synthesized Study Based on Machine Learning Approaches for Rapid Classifying Earthquake Damage Grades to RC Buildings}, series = {Applied Sciences}, volume = {2021}, journal = {Applied Sciences}, number = {Volume 11, issue 16, article 7540}, publisher = {MDPI}, address = {Basel}, doi = {10.3390/app11167540}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20210818-44853}, pages = {1 -- 33}, abstract = {A vast number of existing buildings were constructed before the development and enforcement of seismic design codes, which run into the risk of being severely damaged under the action of seismic excitations. This poses not only a threat to the life of people but also affects the socio-economic stability in the affected area. Therefore, it is necessary to assess such buildings' present vulnerability to make an educated decision regarding risk mitigation by seismic strengthening techniques such as retrofitting. However, it is economically and timely manner not feasible to inspect, repair, and augment every old building on an urban scale. As a result, a reliable rapid screening methods, namely Rapid Visual Screening (RVS), have garnered increasing interest among researchers and decision-makers alike. In this study, the effectiveness of five different Machine Learning (ML) techniques in vulnerability prediction applications have been investigated. The damage data of four different earthquakes from Ecuador, Haiti, Nepal, and South Korea, have been utilized to train and test the developed models. Eight performance modifiers have been implemented as variables with a supervised ML. The investigations on this paper illustrate that the assessed vulnerability classes by ML techniques were very close to the actual damage levels observed in the buildings.}, subject = {Maschinelles Lernen}, language = {en} }