@article{KleinKoenig, author = {Klein, Bernhard and K{\"o}nig, Reinhard}, title = {Computational Urban Planning: Using the Value Lab as Control Center}, series = {FCL Magazine, Special Issue Simulation Platform}, journal = {FCL Magazine, Special Issue Simulation Platform}, doi = {10.25643/bauhaus-universitaet.2601}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26011}, pages = {38 -- 45}, abstract = {Urban planning involves many aspects and various disciplines, demanding an asynchronous planning approach. The level of complexity rises with each aspect to be considered and makes it difficult to find universally satisfactory solutions. To improve this situation we propose a new approach, which complement traditional design methods with a computational urban plan- ning method that can fulfil formalizable design requirements automatically. Based on this approach we present a design space exploration framework for complex urban planning projects. For a better understanding of the idea of design space exploration, we introduce the concept of a digital scout which guides planners through the design space and assists them in their creative explorations. The scout can support planners during manual design by informing them about potential im- pacts or by suggesting different solutions that fulfill predefined quality requirements. The planner can change flexibly between a manually controlled and a completely automated design process. The developed system is presented using an exemplary urban planning scenario on two levels from the street layout to the placement of building volumes. Based on Self-Organizing Maps we implemented a method which makes it possible to visualize the multi-dimensional solution space in an easily analysable and comprehensible form.}, subject = {Stadtgestaltung}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Die Stadt der Agenten und Automaten}, series = {FORUM - Architektur \& Bauforum}, journal = {FORUM - Architektur \& Bauforum}, doi = {10.25643/bauhaus-universitaet.2608}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26083}, abstract = {PLANUNGSUNTERST{\"U}TZUNG DURCH DIE ANALYSE R{\"A}UMLICHER PROZESSE MITTELS COMPUTERSIMULATIONEN. Erst wenn man - zumindest im Prinzip - versteht, wie eine Stadt mit ihren komplexen, verwobenen Vorg{\"a}ngen im Wesentlichen funktioniert, ist eine sinnvolle Stadtplanung m{\"o}glich. Denn jede Planung bedeutet einen Eingriff in den komplexen Organismus einer Stadt. Findet dieser Eingriff ohne Wissen {\"u}ber die Funktionsweise des Organismus statt, k{\"o}nnen auch die Auswirkungen nicht abgesch{\"a}tzt werden. Dieser Beitrag stellt dar, wie urbane Prozesse mittels Computersimulationen unter Zuhilfenahme so genannter Multi-Agenten-Systeme und Zellul{\"a}rer Automaten verstanden werden k{\"o}nnen. von}, subject = {CAD}, language = {de} } @phdthesis{Schwedler, author = {Schwedler, Michael}, title = {Integrated structural analysis using isogeometric finite element methods}, doi = {10.25643/bauhaus-universitaet.2737}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170130-27372}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {209}, abstract = {The gradual digitization in the architecture, engineering, and construction industry over the past fifty years led to an extremely heterogeneous software environment, which today is embodied by the multitude of different digital tools and proprietary data formats used by the many specialists contributing to the design process in a construction project. Though these projects become increasingly complex, the demands on financial efficiency and the completion within a tight schedule grow at the same time. The digital collaboration of project partners has been identified as one key issue in successfully dealing with these challenges. Yet currently, the numerous software applications and their respective individual views on the design process severely impede that collaboration. An approach to establish a unified basis for the digital collaboration, regardless of the existing software heterogeneity, is a comprehensive digital building model contributed to by all projects partners. This type of data management known as building information modeling (BIM) has many benefits, yet its adoption is associated with many difficulties and thus, proceeds only slowly. One aspect in the field of conflicting requirements on such a digital model is the cooperation of architects and structural engineers. Traditionally, these two disciplines use different abstractions of reality for their models that in consequence lead to incompatible digital representations thereof. The onset of isogeometric analysis (IGA) promised to ease the discrepancy in design and analysis model representations. Yet, that initial focus quickly shifted towards using these methods as a more powerful basis for numerical simulations. Furthermore, the isogeometric representation alone is not capable of solving the model abstraction problem. It is thus the intention of this work to contribute to an improved digital collaboration of architects and engineers by exploring an integrated analysis approach on the basis of an unified digital model and solid geometry expressed by splines. In the course of this work, an analysis framework is developed that utilizes such models to automatically conduct numerical simulations commonly required in construction projects. In essence, this allows to retrieve structural analysis results from BIM models in a fast and simple manner, thereby facilitating rapid design iterations and profound design feedback. The BIM implementation Industry Foundation Classes (IFC) is reviewed with regard to its capabilities of representing the unified model. The current IFC schema strongly supports the use of redundant model data, a major pitfall in digital collaboration. Additionally, it does not allow to describe the geometry by volumetric splines. As the pursued approach builds upon a unique model for both, architectural and structural design, and furthermore requires solid geometry, necessary schema modifications are suggested. Structural entities are modeled by volumetric NURBS patches, each of which constitutes an individual subdomain that, with regard to the analysis, is incompatible with the remaining full model. The resulting consequences for numerical simulation are elaborated in this work. The individual subdomains have to be weakly coupled, for which the mortar method is used. Different approaches to discretize the interface traction fields are implemented and their respective impact on the analysis results is evaluated. All necessary coupling conditions are automatically derived from the related geometry model. The weak coupling procedure leads to a linear system of equations in saddle point form, which, owed to the volumetric modeling, is large in size and, the associated coefficient matrix has, due to the use of higher degree basis functions, a high bandwidth. The peculiarities of the system require adapted solution methods that generally cause higher numerical costs than the standard procedures for symmetric, positive-definite systems do. Different methods to solve the specific system are investigated and an efficient parallel algorithm is finally proposed. When the structural analysis model is derived from the unified model in the BIM data, it does in general initially not meet the requirements on the discretization that are necessary to obtain sufficiently accurate analysis results. The consequently necessary patch refinements must be controlled automatically to allowfor an entirely automatic analysis procedure. For that purpose, an empirical refinement scheme based on the geometrical and possibly mechanical properties of the specific entities is proposed. The level of refinement may be selectively manipulated by the structural engineer in charge. Furthermore, a Zienkiewicz-Zhu type error estimator is adapted for the use with isogeometric analysis results. It is shown that also this estimator can be used to steer an adaptive refinement procedure.}, subject = {Finite-Elemente-Methode}, language = {en} } @phdthesis{Amiri, author = {Amiri, Fatemeh}, title = {Computational modelling of fracture with local maximum entropy approximations}, doi = {10.25643/bauhaus-universitaet.2631}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160719-26310}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {130}, abstract = {The key objective of this research is to study fracture with a meshfree method, local maximum entropy approximations, and model fracture in thin shell structures with complex geometry and topology. This topic is of high relevance for real-world applications, for example in the automotive industry and in aerospace engineering. The shell structure can be described efficiently by meshless methods which are capable of describing complex shapes as a collection of points instead of a structured mesh. In order to find the appropriate numerical method to achieve this goal, the first part of the work was development of a method based on local maximum entropy (LME) shape functions together with enrichment functions used in partition of unity methods to discretize problems in linear elastic fracture mechanics. We obtain improved accuracy relative to the standard extended finite element method (XFEM) at a comparable computational cost. In addition, we keep the advantages of the LME shape functions,such as smoothness and non-negativity. We show numerically that optimal convergence (same as in FEM) for energy norm and stress intensity factors can be obtained through the use of geometric (fixed area) enrichment with no special treatment of the nodes near the crack such as blending or shifting. As extension of this method to three dimensional problems and complex thin shell structures with arbitrary crack growth is cumbersome, we developed a phase field model for fracture using LME. Phase field models provide a powerful tool to tackle moving interface problems, and have been extensively used in physics and materials science. Phase methods are gaining popularity in a wide set of applications in applied science and engineering, recently a second order phase field approximation for brittle fracture has gathered significant interest in computational fracture such that sharp cracks discontinuities are modeled by a diffusive crack. By minimizing the system energy with respect to the mechanical displacements and the phase-field, subject to an irreversibility condition to avoid crack healing, this model can describe crack nucleation, propagation, branching and merging. One of the main advantages of the phase field modeling of fractures is the unified treatment of the interfacial tracking and mechanics, which potentially leads to simple, robust, scalable computer codes applicable to complex systems. In other words, this approximation reduces considerably the implementation complexity because the numerical tracking of the fracture is not needed, at the expense of a high computational cost. We present a fourth-order phase field model for fracture based on local maximum entropy (LME) approximations. The higher order continuity of the meshfree LME approximation allows to directly solve the fourth-order phase field equations without splitting the fourth-order differential equation into two second order differential equations. Notably, in contrast to previous discretizations that use at least a quadratic basis, only linear completeness is needed in the LME approximation. We show that the crack surface can be captured more accurately in the fourth-order model than the second-order model. Furthermore, less nodes are needed for the fourth-order model to resolve the crack path. Finally, we demonstrate the performance of the proposed meshfree fourth order phase-field formulation for 5 representative numerical examples. Computational results will be compared to analytical solutions within linear elastic fracture mechanics and experimental data for three-dimensional crack propagation. In the last part of this research, we present a phase-field model for fracture in Kirchoff-Love thin shells using the local maximum-entropy (LME) meshfree method. Since the crack is a natural outcome of the analysis it does not require an explicit representation and tracking, which is advantageous over techniques as the extended finite element method that requires tracking of the crack paths. The geometric description of the shell is based on statistical learning techniques that allow dealing with general point set surfaces avoiding a global parametrization, which can be applied to tackle surfaces of complex geometry and topology. We show the flexibility and robustness of the present methodology for two examples: plate in tension and a set of open connected pipes.}, language = {en} } @inproceedings{KoenigSchmitt, author = {K{\"o}nig, Reinhard and Schmitt, Gerhard}, title = {Backcasting and a new way of command in computational design : Proceedings}, series = {CAADence in Architecture Conference}, booktitle = {CAADence in Architecture Conference}, editor = {Szoboszlai, Mih{\´a}ly}, address = {Budapest}, doi = {10.25643/bauhaus-universitaet.2599}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-25996}, pages = {15 -- 25}, abstract = {It's not uncommon that analysis and simulation methods are used mainly to evaluate finished designs and to proof their quality. Whereas the potential of such methods is to lead or control a design process from the beginning on. Therefore, we introduce a design method that move away from a "what-if" forecasting philosophy and increase the focus on backcasting approaches. We use the power of computation by combining sophisticated methods to generate design with analysis methods to close the gap between analysis and synthesis of designs. For the development of a future-oriented computational design support we need to be aware of the human designer's role. A productive combination of the excellence of human cognition with the power of modern computing technology is needed. We call this approach "cognitive design computing". The computational part aim to mimic the way a designer's brain works by combining state-of-the-art optimization and machine learning approaches with available simulation methods. The cognition part respects the complex nature of design problems by the provision of models for human-computation interaction. This means that a design problem is distributed between computer and designer. In the context of the conference slogan "back to command", we ask how we may imagine the command over a cognitive design computing system. We expect that designers will need to let go control of some parts of the design process to machines, but in exchange they will get a new powerful command on complex computing processes. This means that designers have to explore the potentials of their role as commanders of partially automated design processes. In this contribution we describe an approach for the development of a future cognitive design computing system with the focus on urban design issues. The aim of this system is to enable an urban planner to treat a planning problem as a backcasting problem by defining what performance a design solution should achieve and to automatically query or generate a set of best possible solutions. This kind of computational planning process offers proof that the designer meets the original explicitly defined design requirements. A key way in which digital tools can support designers is by generating design proposals. Evolutionary multi-criteria optimization methods allow us to explore a multi-dimensional design space and provide a basis for the designer to evaluate contradicting requirements: a task urban planners are faced with frequently. We also reflect why designers will give more and more control to machines. Therefore, we investigate first approaches learn how designers use computational design support systems in combination with manual design strategies to deal with urban design problems by employing machine learning methods. By observing how designers work, it is possible to derive more complex artificial solution strategies that can help computers make better suggestions in the future.}, subject = {CAD}, language = {en} } @article{KoenigBauriedel, author = {K{\"o}nig, Reinhard and Bauriedel, Christian}, title = {Generating settlement structures: a method for urban planning and analysis supported by cellular automata}, series = {Environment and Planning B: Planning and Design}, journal = {Environment and Planning B: Planning and Design}, doi = {10.25643/bauhaus-universitaet.2605}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160624-26054}, pages = {602 -- 624}, abstract = {Previous models for the explanation of settlement processes pay little attention to the interactions between settlement spreading and road networks. On the basis of a dielectric breakdown model in combination with cellular automata, we present a method to steer precisely the generation of settlement structures with regard to their global and local density as well as the size and number of forming clusters. The resulting structures depend on the logic of how the dependence of the settlements and the road network is implemented to the simulation model. After analysing the state of the art we begin with a discussion of the mutual dependence of roads and land development. Next, we elaborate a model that permits the precise control of permeability in the developing structure as well as the settlement density, using the fewest necessary control parameters. On the basis of different characteristic values, possible settlement structures are analysed and compared with each other. Finally, we reflect on the theoretical contribution of the model with regard to the context of urban dynamics.}, language = {en} } @inproceedings{KoenigVaroudis, author = {K{\"o}nig, Reinhard and Varoudis, Tasos}, title = {Spatial Optimizations: Merging depthmapX , spatial graph networks and evolutionary design in Grasshopper}, series = {Proceedings of ecaade 34: Complexity \& Simplicity}, booktitle = {Proceedings of ecaade 34: Complexity \& Simplicity}, address = {Oulu, Finland}, doi = {10.25643/bauhaus-universitaet.2604}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26040}, pages = {1 -- 6}, abstract = {In the Space Syntax community, the standard tool for computing all kinds of spatial graph network measures is depthmapX (Turner, 2004; Varoudis, 2012). The process of evaluating many design variants of networks is relatively complicated, since they need to be drawn in a separated CAD system, exported and imported in depthmapX via dxf file format. This procedure disables a continuous integration into a design process. Furthermore, the standalone character of depthmapX makes it impossible to use its network centrality calculation for optimization processes. To overcome this limitations, we present in this paper the first steps of experimenting with a Grasshopper component (reference omitted until final version) that can access the functions of depthmapX and integrate them into Grasshopper/Rhino3D. Here the component is implemented in a way that it can be used directly for an evolutionary algorithm (EA) implemented in a Python scripting component in Grasshopper}, language = {en} } @phdthesis{Ehrhardt, author = {Ehrhardt, Dirk}, title = {ZUM EINFLUSS DER NACHBEHANDLUNG AUF DIE GEF{\"U}GEAUSBILDUNG UND DEN FROST-TAUMITTELWIDERSTAND DER BETONRANDZONE}, doi = {10.25643/bauhaus-universitaet.3688}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20171120-36889}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {235}, abstract = {Die Festigkeitsentwicklung des Zementbetons basiert auf der chemischen Reaktion des Zementes mit dem Anmachwasser. Durch Nachbehandlungsmaßnahmen muss daf{\"u}r gesorgt werden, dass dem Zement gen{\"u}gend Wasser f{\"u}r seine Reaktion zur Verf{\"u}gung steht, da sonst ein Beton mit minderer Qualit{\"a}t entsteht. Die vorliegende Arbeit behandelt die grunds{\"a}tzlichen Fragen der Betonnachbehandlung bei Anwendung von Straßenbetonen. Im Speziellen wird die Frage des erforderlichen Nachbehandlungsbedarfs von h{\"u}ttensandhaltigen Kompositzementen betrachtet. Die Wirkung der Nachbehandlung wird anhand des erreichten Frost-Tausalz-Widerstandes und der Gef{\"u}geausbildung in der unmittelbaren Betonrandzone bewertet. Der Fokus der Untersuchungen lag auf abgezogenen Betonoberfl{\"a}chen. Es wurde ein Modell zur Austrocknung des jungen Betons erarbeitet. Es konnte gezeigt werden, dass in einer fr{\"u}hen Austrocknung (Kapillarphase) keine kritische Austrocknung der Betonrandzone einsetzt, sondern der Beton ann{\"a}hrend gleichm{\"a}ßig {\"u}ber die H{\"o}he austrocknet. Es wurde ein Nomogramm entwickelt, mit dem die Dauer der Kapillarphase in Abh{\"a}ngigkeit der Witterung f{\"u}r Straßenbetone abgesch{\"a}tzt werden kann. Eine kritische Austrocknung der wichtigen Randzone setzt nach Ende der Kapillarphase ein. F{\"u}r Betone unter Verwendung von Zementen mit langsamer Festigkeitsentwicklung ist die Austrocknung der Randzone nach Ende der Kapillarphase besonders ausgepr{\"a}gt. Im Ergebnis zeigen diese Betone dann einen geringen Frost-Tausalz-Widerstand. Mit Zementen, die eine 2d-Zementdruckfestigkeit ≥ 23,0 N/mm² aufweisen, wurde unabh{\"a}ngig von der Zementart (CEM I oder CEM II/B-S) auch dann ein hoher Frost-Tausalz-Widerstand erreicht, wenn keine oder eine schlechtere Nachbehandlung angewendet wurde. F{\"u}r die Praxis ergibt sich damit eine einfache M{\"o}glichkeit der Vorauswahl von geeigneten Zementen f{\"u}r den Verkehrsfl{\"a}chenbau. Betone, die unter Verwendung von Zementen mit langsamere Festigkeitsentwicklung hergestellt werden, erreichen einen hohen Frost-Tausalz-Widerstand nur mit einer geeigneten Nachbehandlung. Die Anwendung von fl{\"u}ssigen Nachbehandlungsmitteln (NBM gem{\"a}ß TL NBM-StB) erreicht eine {\"a}hnliche Wirksamkeit wie eine 5 t{\"a}gige Feuchtnachbehandlung. Voraussetzung f{\"u}r die Wirksamkeit der NBM ist, dass sie auf eine Betonoberfl{\"a}che ohne sichtbaren Feuchtigkeitsfilm (feuchter Glanz) aufgespr{\"u}ht werden. Besonders wichtig ist die Beachtung des richtigen Auftragszeitpunktes bei k{\"u}hler Witterung, da hier aufgrund der verlangsamten Zementreaktion der Beton l{\"a}nger Anmachwasser abst{\"o}ßt. Ein zu fr{\"u}her Auftrag des Nachbehandlungsmittels f{\"u}hrt zu einer Verschlechterung der Qualit{\"a}t der Betonrandzone. Durch Bereitstellung hydratationsabh{\"a}ngiger Transportkenngr{\"o}ßen (Feuchtetransport im Beton) konnten numerische Berechnungen zum Zusammenspiel zwischen der Austrocknung, der Nachbehandlung und der Gef{\"u}geentwicklung durchgef{\"u}hrt werden. Mit dem erstellten Berechnungsmodell wurden Parameterstudien durchgef{\"u}hrt. Die Berechnungen best{\"a}tigen die wesentlichen Erkenntnisse der Laboruntersuchungen. Dar{\"u}ber hinaus l{\"a}sst sich mit dem Berechnungsmodell zeigen, dass gerade bei langsam reagierenden Zementen und k{\"u}hler Witterung ohne eine Nachbehandlung eine sehr d{\"u}nne Randzone (ca. 500 µm - 1000 µm) mit stark erh{\"o}hter Kapillarporosit{\"a}t entsteht.}, subject = {Beton}, language = {de} } @phdthesis{Mueller, author = {M{\"u}ller, Matthias}, title = {Salt-frost Attack on Concrete - New Findings regarding the Damage Mechanism}, doi = {10.25643/bauhaus-universitaet.4868}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20230103-48681}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {The reduction of the cement clinker content is an important prerequisite for the improvement of the CO2-footprint of concrete. Nevertheless, the durability of such concretes must be sufficient to guarantee a satisfactory service life of structures. Salt frost scaling resistance is a critical factor in this regard, as it is often diminished at increased clinker substitution rates. Furthermore, only insufficient long-term experience for such concretes exists. A high salt frost scaling resistance thus cannot be achieved by applying only descriptive criteria, such as the concrete composition. It is therefore to be expected, that in the long term a performance based service life prediction will replace the descriptive concept. To achieve the important goal of clinker reduction for concretes also in cold and temperate climates it is important to understand the underlying mechanisms for salt frost scaling. However, conflicting damage theories dominate the current State of the Art. It was consequently derived as the goal of this thesis to evaluate existing damage theories and to examine them experimentally. It was found that only two theories have the potential to describe the salt frost attack satisfactorily - the glue spall theory and the cryogenic suction theory. The glue spall theory attributes the surface scaling to the interaction of an external ice layer with the concrete surface. Only when moderate amounts of deicing salt are present in the test solution the resulting mechanical properties of the ice can cause scaling. However, the results in this thesis indicate that severe scaling also occurs at deicing salt levels, at which the ice is much too soft to damage concrete. Thus, the inability of the glue spall theory to account for all aspects of salt frost scaling was shown. The cryogenic suction theory is based on the eutectic behavior of salt solutions, which consist of two phases - water ice and liquid brine - between the freezing point and the eutectic temperature. The liquid brine acts as an additional moisture reservoir, which facilitates the growth of ice lenses in the surface layer of the concrete. The experiments in this thesis confirmed, that the ice formation in hardened cement paste increases due to the suction of brine at sub-zero temperatures. The extent of additional ice formation was influenced mainly by the porosity and by the chloride binding capacity of the hardened cement paste. Consequently, the cryogenic suction theory plausibly describes the actual generation of scaling, but it has to be expanded by some crucial aspects to represent the salt frost scaling attack completely. The most important aspect is the intensive saturation process, which is ascribed to the so-called micro ice lens pump. Therefore a combined damage theory was proposed, which considers multiple saturation processes. Important aspects of this combined theory were confirmed experimentally. As a result, the combined damage theory constitutes a good basis to understand the salt frost scaling attack on concrete on a fundamental level. Furthermore, a new approach was identified, to account for the reduced salt frost scaling resistance of concretes with reduced clinker content.}, subject = {Beton}, language = {en} } @masterthesis{Krtschil, type = {Bachelor Thesis}, author = {Krtschil, Anna}, title = {Vergleich verschiedener Indikatoren in Bezug auf die {\"O}kobilanz von Geb{\"a}uden}, doi = {10.25643/bauhaus-universitaet.2434}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150716-24340}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {73}, abstract = {Im Rahmen der Bachelorarbeit werden zwei Indikatoren zur Auswertung einer {\"O}kobilanz gegen{\"u}bergestellt. Die Umweltbelastungspunkte der Schweiz werden mit dem niederl{\"a}ndischen ReCiPe verglichen.}, subject = {Umweltbilanz}, language = {de} }