@inproceedings{AibaMaegaitoSuzuki, author = {Aiba, Yoshihisa and Maegaito, Kentaro and Suzuki, Osamu}, title = {Iteration dynamical systems of discrete Laplacians on the plane lattice(I) (Basic properties and computer simulations of the dynamical systems)}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2917}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29178}, pages = {3}, abstract = {In this study we introduce a concept of discrete Laplacian on the plane lattice and consider its iteration dynamical system. At first we discuss some basic properties on the dynamical system to be proved. Next making their computer simulations, we show that we can realize the following phenomena quite well:(1) The crystal of waters (2) The designs of carpets, embroideries (3) The time change of the numbers of families of extinct animals, and (4) The echo systems of life things. Hence we may expect that we can understand the evolutions and self organizations by use of the dynamical systems. Here we want to make a stress on the following fact: Although several well known chaotic dynamical systems can describe chaotic phenomena, they have difficulties in the descriptions of the evolutions and self organizations.}, subject = {Architektur }, language = {en} } @inproceedings{AlDiabHenryBoulemia2003, author = {Al Diab, Ali and Henry, Eric and Boulemia, Cherif}, title = {A concerted and multi-criterion approach for helping to choose a Structure- Foundation system of building}, doi = {10.25643/bauhaus-universitaet.273}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2732}, year = {2003}, abstract = {The research of the best building design requires a concerted design approach of both structure and foundation. Our work is an application of this approach. Our objective is also to create an interactive tool, which will be able to define, at the early design stages, the orientations of structure and foundation systems that satisfy as well as possible the client and the architect. If the concerns of these two actors are primarily technical and economical, they also wish to apprehend the environmental and social dimensions of their projects. Thus, this approach bases on alternative studies and on a multi-criterion analysis. In this paper, we present the context of our work, the problem formulation, which allows a concerted design of Structure and Foundation systems and the feasible solutions identifying process.}, subject = {Bauwerk}, language = {en} } @inproceedings{BaitschHartmann, author = {Baitsch, Matthias and Hartmann, Dietrich}, title = {A FRAMEWORK FOR THE INTERACTIVE VISUALIZATION OF ENGINEERING MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2919}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29194}, pages = {9}, abstract = {Interactive visualization based on 3D computer graphics nowadays is an indispensable part of any simulation software used in engineering. Nevertheless, the implementation of such visualization software components is often avoided in research projects because it is a challenging and potentially time consuming task. In this contribution, a novel Java framework for the interactive visualization of engineering models is introduced. It supports the task of implementing engineering visualization software by providing adequate program logic as well as high level classes for the visual representation of entities typical for engineering models. The presented framework is built on top of the open source visualization toolkit VTK. In VTK, a visualization model is established by connecting several filter objects in a so called visualization pipeline. Although designing and implementing a good pipeline layout is demanding, VTK does not support the reuse of pipeline layouts directly. Our framework tailors VTK to engineering applications on two levels. On the first level it adds new - engineering model specific - filter classes to VTK. On the second level, ready made pipeline layouts for certain aspects of engineering models are provided. For instance there is a pipeline class for one-dimensional elements like trusses and beams that is capable of showing the elements along with deformations and member forces. In order to facilitate the implementation of a graphical user interface (GUI) for each pipeline class, there exists a reusable Java Swing GUI component that allows the user to configure the appearance of the visualization model. Because of the flexible structure, the framework can be easily adapted and extended to new problem domains. Currently it is used in (i) an object-oriented p-version finite element code for design optimization, (ii) an agent based monitoring system for dam structures and (iii) the simulation of destruction processes by controlled explosives based on multibody dynamics. Application examples from all three domains illustrates that the approach presented is powerful as well as versatile.}, subject = {Architektur }, language = {en} } @inproceedings{BartelsZimmermann, author = {Bartels, Jan-Hendrik and Zimmermann, J{\"u}rgen}, title = {MINIMIZING THE TOTAL DISCOUNTED COST OF DISMANTLING A NUCLEAR POWER PLANT}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2920}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29200}, pages = {9}, abstract = {Due to economical, technical or political reasons all over the world about 100 nuclear power plants have been disconnected until today. All these power stations are still waiting for their complete dismantling which, considering one reactor, causes cost of up to one Bil. Euros and lasts up to 15 years. In our contribution we present a resource-constrained project scheduling approach minimizing the total discounted cost of dismantling a nuclear power plant. A project of dismantling a nuclear power plant can be subdivided into a number of disassembling activities. The execution of these activities requires time and scarce resources like manpower, special equipment or storage facilities for the contaminated material arising from the dismantling. Moreover, we have to regard several minimum and maximum time lags (temporal constraints) between the start times of the different activities. Finally, each disassembling activity can be processed in two alternative execution modes, which lead to different disbursements and determine the resource requirements of the considered activity. The optimization problem is to determine a start time and an execution mode for each activity, such that the discounted cost of the project is minimum, and neither the temporal constraints are violated nor the activities' resource requirements exceed the availability of any scarce resource at any point in time. In our contribution we introduce an appropriate multi-mode project scheduling model with minimum and maximum time lags as well as renewable and cumulative resources for the described optimization problem. Furthermore, we show that the considered optimization problem is NP-hard in the strong sense. For small problem instances, optimal solutions can be gained from a relaxation based enumeration approach which is incorporated into a branch and bound algorithm. In order to be able to solve large problem instances, we also propose a truncated version of the devised branch and bound algorithm.}, subject = {Architektur }, language = {en} } @inproceedings{BauerKandlerWeiss, author = {Bauer, Marek and Kandler, A. and Weiß, Hendrik}, title = {MODEL OF TRAM LINE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2921}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29217}, pages = {11}, abstract = {From passenger's perspective punctuality is one of the most important features of trams operations. Unfortunately in most cases this feature is only insufficiently fulfilled. In this paper we present a simulation model for trams operation with special focus on punctuality. The aim is to get a helpful tool for designing time-tables and for analyzing the effects by changing priorities for trams in traffic lights respectively the kind of track separation. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops. In this paper the running time is modeled by the sum of its mean value and a zero-mean random variable. With the help of multiple regression we find out that the average running time is a function depending on the length of the sections and the number of intersections. The random component is modeled by a sum of two independent zero-mean random variables. One of these variables describes the disturbance caused by the process of waiting at an intersection and the other the disturbance caused by the process of driving. The time spent at a stop is assumed to be a random variable, too. Its distribution is estimated from given measurements of these stop times for different tram lines in Krak{\´o}w. Finally a special case of the introduced model is considered and numerical results are presented. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauerRichter, author = {Bauer, Marek and Richter, Matthias}, title = {STATISTICAL ANALYSIS OF TIME LOST BY TRAMS BEFORE DEPARTURE FROM STOPS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2922}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29226}, pages = {18}, abstract = {The ride of the tram along the line, defined by a time-table, consists of the travel time between the subsequent sections and the time spent by tram on the stops. In the paper, statistical data collected in the city of Krakow is presented and evaluated. In polish conditions, for trams the time spent on stops makes up the remarkable amount of 30 \% of the total time of tram line operation. Moreover, this time is characterized by large variability. The time spent by tram on a stop consists of alighting and boarding time and time lost by tram on stop after alighting and boarding time ending, but before departure. Alighting and boarding time itself usually depends on the random number of alighting and boarding passengers and also on the number of passengers which are inside the vehicle. However, the time spent by tram on stop after alighting and boarding time ending is an effect of certain random events, mainly because of impossibility of departure from stop, caused by lack of priorities for public transport vehicles. The main focus of the talk lies on the description and the modelling of these effects. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauriedelDonathKoenig, author = {Bauriedel, Christian and Donath, Dirk and K{\"o}nig, Reinhard}, title = {COMPUTER-SUPPORTED SIMULATIONS FOR URBAN PLANNING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2923}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29235}, pages = {10}, abstract = {The idea about a simulation program to support urban planning is explained: Four different, clearly defined developing paths can be calculated for the rebuilding of a shrinking town. Aided by self-organization principles, a complex system can be created. The dynamics based on the action patterns of single actors, whose behaviour is cyclically depends on the generated structure. Global influences, which control the development, can be divided at a spatial, socioeconomic, and organizational-juridical level. The simulation model should offer conclusions on new planning strategies, especially in the context of the creation process of rebuilding measures. An example of a transportation system is shown by means of prototypes for the visualisation of the dynamic development process.}, subject = {Architektur }, language = {en} } @inproceedings{BeerFirmenich2003, author = {Beer, Daniel G. and Firmenich, Berthold}, title = {Freigabebest{\"a}nde von strukturierten Objektversionsmengen in Bauprojekten}, doi = {10.25643/bauhaus-universitaet.8}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-80}, year = {2003}, abstract = {Die verteilte Bearbeitung gemeinsamer Produktmodelle ist im Bauwesen Gegenstand der aktuellen Forschung. Der vorgestellte L{\"o}sungsansatz bewegt sich in einem Spannungsfeld: Zum einen sollen die zu bearbeitenden Teilmengen des Produktmodells sehr flexibel durch die Planer zu bilden sein, zum anderen m{\"u}ssen Revisions- und Freigabest{\"a}nde dauerhaft und unver{\"a}nderlich definiert werden. In einer versionierten Umgebung mit vielen Abh{\"a}ngigkeiten sind diese Anforderungen schwierig zu erf{\"u}llen. Der vorgestellte L{\"o}sungsansatz zeigt die Bildung von Revisions- und Freigabest{\"a}nden, ohne die flexible verteilte Bearbeitung einzuschr{\"a}nken. Die Freigabest{\"a}nde m{\"u}ssen bestimmte Eigenschaften erf{\"u}llen: Es darf beispielsweise nur eine Version eines Objekts enthalten sein und es m{\"u}ssen die Bindungen zu anderen Objektversionen in einer konsistenten Weise ber{\"u}cksichtigt werden. Es wird eine mathematische Beschreibung gew{\"a}hlt, die auf der Mengenlehre und der Graphentheorie basiert.}, subject = {Bauvorhaben}, language = {de} } @article{BeerFirmenichRichter2004, author = {Beer, Daniel G. and Firmenich, Berthold and Richter, Torsten}, title = {A Concept for CAD Systems with Persistent Versioned Data Models}, doi = {10.25643/bauhaus-universitaet.204}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2046}, year = {2004}, abstract = {The synchronous distributed processing of common source code in the software development process is supported by well proven methods. The planning process has similarities with the software development process. However, there are no consistent and similarly successful methods for applications in construction projects. A new approach is proposed in this contribution.}, subject = {Produktmodell}, language = {en} } @inproceedings{BeranDlask, author = {Beran, V{\´a}clav and Dlask, Petr}, title = {CONSTRUCTION SPEED AND CASH FLOW OPTIMISATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2926}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29269}, pages = {10}, abstract = {Practical examples show that the improvement in cost flow and total amount of money spend in construction and further use may be cut significantly. The calculation is based on spreadsheets calculation, very easy to develop on most PC´s now a days. Construction works, are a field where the evaluation of Cash Flow can be and should be applied. Decisions about cash flow in construction are decisions with long-term impact and long-term memory. Mistakes from the distant past have a massive impact on situations in the present and into the far economic future of economic activities. Two approaches exist. The Just-in-Time (JIT) approach and life cycle costs (LCC) approach. The calculation example shows the dynamic results for the production speed in opposition to stable flow of production in duration of activities. More sophisticated rescheduling in optimal solution might bring in return extra profit. In the technologies and organizational processes for industrial buildings, railways and road reconstruction, public utilities and housing developments there are assembly procedures that are very appropriate for the given purpose, complicated research-, development-, innovation-projects are all very good aspects of these kinds of applications. The investors of large investments and all public invested money may be spent more efficiently if an optimisation speed-strategy can be calculated.}, subject = {Architektur }, language = {en} } @inproceedings{BeranHromada, author = {Beran, V{\´a}clav and Hromada, E.}, title = {SOFTWARE FOR PROJECT RELIABILITY ESTIMATION AND RISK EVALUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2925}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29255}, pages = {16}, abstract = {The contribution presents a model that is able to simulate construction duration and cost for a building project. This model predicts set of expected project costs and duration schedule depending on input parameters such as production speed, scope of work, time schedule, bonding conditions and maximum and minimum deviations from scope of work and production speed. The simulation model is able to calculate, on the basis of input level of probability, the adequate construction cost and time duration of a project. The reciprocal view attends to finding out the adequate level of probability for construction cost and activity durations. Among interpretive outputs of the application software belongs the compilation of a presumed dynamic progress chart. This progress chart represents the expected scenario of development of a building project with the mapping of potential time dislocations for particular activities. The calculation of a presumed dynamic progress chart is based on an algorithm, which calculates mean values as a partial result of the simulated building project. Construction cost and time models are, in many ways, useful tools in project management. Clients are able to make proper decisions about the time and cost schedules of their investments. Consequently, building contractors are able to schedule predicted project cost and duration before any decision is finalized.}, subject = {Architektur }, language = {en} } @inproceedings{Bilchuk, author = {Bilchuk, Irina}, title = {GEOMETRIC IDENTIFICATION OF OBJECTS IN CIVIL ENGINEERING APPLICATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2927}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29274}, pages = {21}, abstract = {Objects for civil engineering applications can be identified with their reference in memory, their alpha-numeric name or their geometric location. Particularly in graphic user interfaces, it is common to identify objects geometrically by selection with the mouse. As the number of geometric objects in a graphic user interface grows, it becomes increasingly more important to treat the basic operations add, search and remove for geometric objects with great efficiency. Guttmann has proposed the Region-Tree (R-tree) for geometric identification in an environment which uses pages on disc as data structure. Minimal bounding rectangles are used to structure the data in such a way that neighborhood relations can be described effectively. The literature shows that the parameters which influence the efficiency of the R-trees have been studied extensively, but without conclusive results. The goal of the research which is reported in this paper is to determine reliably the parameters which significantly influence the efficiency of R-trees for geometric identification in technical drawings. In order to make this investigation conclusive, it must be performed with the best available software technology. Therefore an object-oriented software for the method is developed. This implementation is tested with technical drawings containing many thousands of geometric objects. These drawings are created automatically by a stochastic generator which is incorporated into a test bed consisting of an editor and a visualisor. This test bed is used to obtain statistics for the main factors which affect the efficiency of R-trees. The investigation shows that the following main factors which affect the efficiency can be identified reliably : number of geometric objects on the drawing the minimum und maximum number of children of a node of the tree the maximum width and height of the minimal bounding rectangles of the geometric objects relative to the size of the drawing.}, subject = {Architektur }, language = {en} } @inproceedings{BockGuerlebeck, author = {Bock, Sebastian and G{\"u}rlebeck, Klaus}, title = {A Coupled Ritz-Galerkin Approach Using Holomorphic and Anti-holomorphic Functions}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2928}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29281}, pages = {14}, abstract = {The contribution focuses on the development of a basic computational scheme that provides a suitable calculation environment for the coupling of analytical near-field solutions with numerical standard procedures in the far-field of the singularity. The proposed calculation scheme uses classical methods of complex function theory, which can be generalized to 3-dimensional problems by using the framework of hypercomplex analysis. The adapted approach is mainly based on the factorization of the Laplace operator EMBED Equation.3 by the Cauchy-Riemann operator EMBED Equation.3 , where exact solutions of the respective differential equation are constructed by using an orthonormal basis of holomorphic and anti-holomorphic functions.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeKnockDeSchepper, author = {Brackx, Fred and De Knock, B. and De Schepper, Hennie}, title = {A MULTI--DIMENSIONAL HILBERT TRANSFORM IN ANISOTROPIC CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2929}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29297}, pages = {15}, abstract = {In earlier research, generalized multidimensional Hilbert transforms have been constructed in m-dimensional Euclidean space, in the framework of Clifford analysis. Clifford analysis, centred around the notion of monogenic functions, may be regarded as a direct and elegant generalization to higher dimension of the theory of the holomorphic functions in the complex plane. The considered Hilbert transforms, usually obtained as a part of the boundary value of an associated Cauchy transform in m+1 dimensions, might be characterized as isotropic, since the metric in the underlying space is the standard Euclidean one. In this paper we adopt the idea of a so-called anisotropic Clifford setting, which leads to the introduction of a metric dependent m-dimensional Hilbert transform, showing, at least formally, the same properties as the isotropic one. The Hilbert transform being an important tool in signal analysis, this metric dependent setting has the advantage of allowing the adjustment of the co-ordinate system to possible preferential directions in the signals to be analyzed. A striking result to be mentioned is that the associated anisotropic (m+1)-dimensional Cauchy transform is no longer uniquely determined, but may stem from a diversity of (m+1)-dimensional "mother" metrics.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperDeSchepperetal., author = {Brackx, Fred and De Schepper, Hennie and De Schepper, Nele and Sommen, Frank}, title = {HERMITIAN CLIFFORD-HERMITE WAVELETS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2931}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29313}, pages = {13}, abstract = {The one-dimensional continuous wavelet transform is a successful tool for signal and image analysis, with applications in physics and engineering. Clifford analysis offers an appropriate framework for taking wavelets to higher dimension. In the usual orthogonal case Clifford analysis focusses on monogenic functions, i.e. null solutions of the rotation invariant vector valued Dirac operator ∂, defined in terms of an orthogonal basis for the quadratic space Rm underlying the construction of the Clifford algebra R0,m. An intrinsic feature of this function theory is that it encompasses all dimensions at once, as opposed to a tensorial approach with products of one-dimensional phenomena. This has allowed for a very specific construction of higher dimensional wavelets and the development of the corresponding theory, based on generalizations of classical orthogonal polynomials on the real line, such as the radial Clifford-Hermite polynomials introduced by Sommen. In this paper, we pass to the Hermitian Clifford setting, i.e. we let the same set of generators produce the complex Clifford algebra C2n (with even dimension), which we equip with a Hermitian conjugation and a Hermitian inner product. Hermitian Clifford analysis then focusses on the null solutions of two mutually conjugate Hermitian Dirac operators which are invariant under the action of the unitary group. In this setting we construct new Clifford-Hermite polynomials, starting in a natural way from a Rodrigues formula which now involves both Dirac operators mentioned. Due to the specific features of the Hermitian setting, four different types of polynomials are obtained, two types of even degree and two types of odd degree. These polynomials are used to introduce a new continuous wavelet transform, after thorough investigation of all necessary properties of the involved polynomials, the mother wavelet and the associated family of wavelet kernels.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperSommen, author = {Brackx, Fred and De Schepper, Nele and Sommen, Frank}, title = {Clifford-Hermite and Two-Dimensional Clifford-Gabor Filters For Early Vision}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2930}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29303}, pages = {22}, abstract = {Image processing has been much inspired by the human vision, in particular with regard to early vision. The latter refers to the earliest stage of visual processing responsible for the measurement of local structures such as points, lines, edges and textures in order to facilitate subsequent interpretation of these structures in higher stages (known as high level vision) of the human visual system. This low level visual computation is carried out by cells of the primary visual cortex. The receptive field profiles of these cells can be interpreted as the impulse responses of the cells, which are then considered as filters. According to the Gaussian derivative theory, the receptive field profiles of the human visual system can be approximated quite well by derivatives of Gaussians. Two mathematical models suggested for these receptive field profiles are on the one hand the Gabor model and on the other hand the Hermite model which is based on analysis filters of the Hermite transform. The Hermite filters are derivatives of Gaussians, while Gabor filters, which are defined as harmonic modulations of Gaussians, provide a good approximation to these derivatives. It is important to note that, even if the Gabor model is more widely used than the Hermite model, the latter offers some advantages like being an orthogonal basis and having better match to experimental physiological data. In our earlier research both filter models, Gabor and Hermite, have been developed in the framework of Clifford analysis. Clifford analysis offers a direct, elegant and powerful generalization to higher dimension of the theory of holomorphic functions in the complex plane. In this paper we expose the construction of the Hermite and Gabor filters, both in the classical and in the Clifford analysis framework. We also generalize the concept of complex Gaussian derivative filters to the Clifford analysis setting. Moreover, we present further properties of the Clifford-Gabor filters, such as their relationship with other types of Gabor filters and their localization in the spatial and in the frequency domain formalized by the uncertainty principle.}, subject = {Architektur }, language = {en} } @inproceedings{BraunesDonath, author = {Braunes, J{\"o}rg and Donath, Dirk}, title = {COMPUTERGEST{\"U}TZTE PLANUNG IM BESTAND VON DER DIGITALEN BESTANDSERFASSUNG ZUR PLANUNGSUNTERST{\"U}TZUNG IM CAAD}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2933}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29338}, pages = {10}, abstract = {F{\"u}r eine gesicherte Planung im Bestand, sind eine F{\"u}lle verschiedenster Informationen zu ber{\"u}cksichtigen, welche oft erst w{\"a}hrend des Planungs- oder Bauprozesses gewonnen werden. Voraussetzung hierf{\"u}r bildet immer eine Bestandserfassung. Zwar existieren Computerprogramme zur Unterst{\"u}tzung der Bestandserfassung, allerdings handelt es sich hierbei ausschließlich um Insell{\"o}sungen. Der Export der aufgenommenen Daten in ein Planungssystem bedingt Informationsverluste. Trotz der potentiellen M{\"o}glichkeit aktueller CAAD/BIM Systeme zur Verwaltung von Bestandsdaten, sind diese vorrangig f{\"u}r die Neubauplanung konzipiert. Die durchg{\"a}ngige Bearbeitung von Sanierungsprojekten von der Erfassung des Bestandes {\"u}ber die Entwurfs- und Genehmigungsplanung bis zur Ausf{\"u}hrungsplanung innerhalb eines CAAD/BIM Systems wird derzeit nicht ad{\"a}quat unterst{\"u}tzt. An der Professur Informatik in der Architektur (InfAR) der Fakult{\"a}t Architektur der Bauhaus-Universit{\"a}t Weimar entstanden im Rahmen des DFG Sonderforschungsbereich 524 "Werkzeuge und Konstruktionen f{\"u}r die Revitalisierung von Bauwerken" in den letzten Jahren Konzepte und Prototypen zur fachlich orientierten Unterst{\"u}tzung der Planung im Bestand. Der Fokus lag dabei in der Erfassung aller planungsrelevanter Bestandsdaten und der Abbildung dieser in einem dynamischen Bauwerksmodell. Aufbauend auf diesen Forschungsarbeiten befasst sich der Artikel mit der kontextbezogenen Weiterverwendung und gezielten Bereitstellung von Bestandsdaten im Prozess des Planens im Bestand und der Integration von Konzepten der planungsrelevanten Bestandserfassung in markt{\"u}bliche CAAD/BIM Systeme.}, subject = {Architektur }, language = {de} } @inproceedings{BrossmannMueller, author = {Broßmann, Marko and M{\"u}ller, Karl-Heinz}, title = {STOCHASTISCHE ANALYSE VON STAHLBETONBALKEN IM GRENZZUSTAND DER ADAPTION UNTER BER{\"u}CKSICHTIGUNG DER STEIFIGKEITSDEGRADATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2934}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29341}, pages = {20}, abstract = {Am Beispiel eines 3-feldrigen Durchlauftr{\"a}gers wird die Versagenswahrscheinlichkeit von wechselnd belasteten Stahlbetonbalken bez{\"u}glich des Grenzzustandes der Adaption (Einspielen, shakedown) untersucht. Die Adaptionsanalyse erfolgt unter Ber{\"u}cksichtigung der beanspruchungschabh{\"a}ngigen Degradation der Biegesteifigkeit infolge Rissbildung. Die damit verbundene mechanische Problemstellung kann auf die Adaptionsanalyse linear elastisch - ideal plastischer Balkentragwerke mit unbekannter aber begrenzter Biegesteifigkeit zur{\"u}ckgef{\"u}hrt werden. Die Versagenswahrscheinlichkeit wird unter Ber{\"u}cksichtigung stochastischer Tragwerks- und Belastungsgr{\"o}ßen berechnet. Tragwerkseigenschaften und st{\"a}ndige Lasten gelten als zeitunabh{\"a}ngige Zufallsgr{\"o}ßen. Zeitlich ver{\"a}nderliche Lasten werden als nutzungsdauerbezogene Extremwerte POISSONscher Rechteck-Pulsprozesse unter Ber{\"u}cksichtigung zeitlicher {\"U}berlagerungseffekte modelliert, so dass die Versagenswahrscheinlichkeit ebenfalls eine nutzungsdauerbezogene Gr{\"o}ße ist. Die mechanischen Problemstellungen werden numerisch mit der mathematischen Optimierung gel{\"o}st. Die Versagenswahrscheinlichkeit wird auf statistischem Weg mit der Monte-Carlo-Methode gesch{\"a}tzt.}, subject = {Architektur }, language = {de} } @inproceedings{BubnerThierfelder2000, author = {Bubner, Andre and Thierfelder, Jan}, title = {Austausch von planungsrelevanten Daten im Stahlbau}, doi = {10.25643/bauhaus-universitaet.614}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-6146}, year = {2000}, abstract = {Datenaustausch, Daten resp. Produktdatenmodelle sind seit mehreren Jahren Themen in der Forschung. Verschiedene Forschungsprojekte und Initiativen diverser Firmen f{\"u}hrten zu bereichs{\"u}bergreifenden Ans{\"a}tzen wie IFC und verschiedenen STEP-AP´s. Speziell im Stahlbau sind die Projekte >Produktschnittstelle Stahlbau< und >CIMsteel< entwickelt, weiterentwickelt und {\"u}berarbeitet worden. Als Weiterentwicklung der bisher existierenden Austauschformate versuchen neuere Ans{\"a}tze den Nutzen {\"u}ber die reine Daten{\"u}bermittlung hinaus zu erweitern. So integrieren diese L{\"o}sungsvorschl{\"a}ge Aspekte der Kommunikation, der Zusammenarbeit und des Managements. Des weiteren {\"u}bernehmen sie Aufgaben der Daten- und Modellverwaltung. Somit erfolgt eine digitale Abbildung unter Einbezug s{\"a}mtlicher ermittelter Daten. Resultierend aus den besonderen Randbedingungen im Bauwesen, wird ein Bauwerksmodell aus untereinander in Beziehung gesetzten Dom{\"a}nenmodellen aufgebaut}, subject = {Datenaustausch}, language = {de} } @inproceedings{BultheelJansenMaesetal., author = {Bultheel, Adhemar and Jansen, M. and Maes, J. and Van Aerschot, W. and Vanraes, E.}, title = {SUBDIVIDE AND CONQUER RESOLUTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2909}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29091}, pages = {47}, abstract = {This contribution will be freewheeling in the domain of signal, image and surface processing and touch briefly upon some topics that have been close to the heart of people in our research group. A lot of the research of the last 20 years in this domain that has been carried out world wide is dealing with multiresolution. Multiresolution allows to represent a function (in the broadest sense) at different levels of detail. This was not only applied in signals and images but also when solving all kinds of complex numerical problems. Since wavelets came into play in the 1980's, this idea was applied and generalized by many researchers. Therefore we use this as the central idea throughout this text. Wavelets, subdivision and hierarchical bases are the appropriate tools to obtain these multiresolution effects. We shall introduce some of the concepts in a rather informal way and show that the same concepts will work in one, two and three dimensions. The applications in the three cases are however quite different, and thus one wants to achieve very different goals when dealing with signals, images or surfaces. Because completeness in our treatment is impossible, we have chosen to describe two case studies after introducing some concepts in signal processing. These case studies are still the subject of current research. The first one attempts to solve a problem in image processing: how to approximate an edge in an image efficiently by subdivision. The method is based on normal offsets. The second case is the use of Powell-Sabin splines to give a smooth multiresolution representation of a surface. In this context we also illustrate the general method of construction of a spline wavelet basis using a lifting scheme.}, subject = {Architektur }, language = {en} } @inproceedings{CacaoConstalesKrausshar, author = {Cacao, Isabel and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {BESSEL FUNCTIONS AND HIGHER DIMENSIONAL DIRAC TYPE EQUATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2936}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29366}, pages = {8}, abstract = {In this paper we study the structure of the solutions to higher dimensional Dirac type equations generalizing the known λ-hyperholomorphic functions, where λ is a complex parameter. The structure of the solutions to the system of partial differential equations (D- λ) f=0 show a close connection with Bessel functions of first kind with complex argument. The more general system of partial differential equations that is considered in this paper combines Dirac and Euler operators and emphasizes the role of the Bessel functions. However, contrary to the simplest case, one gets now Bessel functions of any arbitrary complex order.}, subject = {Architektur }, language = {en} } @article{Carpo2003, author = {Carpo, Mario}, title = {Die digitale Architektur nach der ersten Begeisterungswelle : vom irrationalen {\"U}berschwang zur irrationalen Mutlosigkeit}, doi = {10.25643/bauhaus-universitaet.1249}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20080228-13158}, year = {2003}, abstract = {Wissenschaftliches Kolloquium vom 24. bis 27. April 2003 in Weimar an der Bauhaus-Universit{\"a}t zum Thema: ‚MediumArchitektur - Zur Krise der Vermittlung'}, subject = {Architektur}, language = {de} } @inproceedings{ChangChang, author = {Chang, Wei-Tsang and Chang, Teng-Wen}, title = {TIME-BASED FORM TRANSFORMATION WITH FOLDING SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2937}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29371}, pages = {10}, abstract = {Design activity could be treated as state transition computationally. In stepwise processing, in-between form-states are not easily observed. However, in this research time-based concept is introduced and applied in order to bridge the gap. In architecture, folding is one method of form manipulation and architects also want to search for alternatives by this operation. Besides, folding operation has to be defined and parameterized before time factor is involved as a variable of folding. As a result, time-based transformation provides sequential form states and redirects design activity.}, subject = {Architektur }, language = {en} } @inproceedings{ChristovPetrova1997, author = {Christov, Christo T. and Petrova, Lyllia B.}, title = {Computer-Aided Static Analysis of Complex Prismatic Orthotropic Shell Structures by the Analytical Finite Strip Method}, doi = {10.25643/bauhaus-universitaet.435}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4358}, year = {1997}, abstract = {The paper describes a development of the analytical finite strip method (FSM) in displacements for linear elastic static analysis of simply supported at their transverse ends complex orthotropic prismatic shell structures with arbitrary open or closed deformable contour of the cross-section under general external loads. A number of bridge top structures, some roof structures and others are related to the studied class. By longitudinal sections the prismatic thin-walled structure is discretized to a limited number of plane straight strips which are connected continuously at their longitudinal ends to linear joints. As basic unknowns are assumed the three displacements of points from the joint lines and the rotation to these lines. In longitudinal direction of the strips the unknown quantities and external loads are presented by single Fourier series. In transverse direction of each strips the unknown values are expressed by hyperbolic functions presenting an exact solution of the corresponding differential equations of the plane straight strip. The basic equations and relations for the membrane state, for the bending state and for the total state of the finite strip are obtained. The rigidity matrix of the strip in the local and global co-ordinate systems is derived. The basic relations of the structure are given and the general stages of the analytical FSM are traced. For long structures FSM is more efficient than the classic finite element method (FEM), since the problem dimension is reduced by one and the number of unknowns decreases. In comparison with the semi-analytical FSM, the analytical FSM leads to a practically precise solution, especially for wider strips, and provides compatibility of the displacements and internal forces along the longitudinal linear joints.}, subject = {Tragwerk}, language = {en} } @article{Colomina2003, author = {Colomina, Beatriz}, title = {Skinless architecture}, doi = {10.25643/bauhaus-universitaet.1254}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20080304-13222}, year = {2003}, abstract = {Wissenschaftliches Kolloquium vom 24. bis 27. April 2003 in Weimar an der Bauhaus-Universit{\"a}t zum Thema: ‚MediumArchitektur - Zur Krise der Vermittlung'}, subject = {Architektur}, language = {en} } @inproceedings{ConstalesKrausshar, author = {Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {ON THE NAVIER-STOKES EQUATION WITH FREE CONVECTION IN STRIP DOMAINS AND 3D TRIANGULAR CHANNELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2938}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29387}, pages = {12}, abstract = {The Navier-Stokes equations and related ones can be treated very elegantly with the quaternionic operator calculus developed in a series of works by K. Guerlebeck, W. Sproeossig and others. This study will be extended in this paper. In order to apply the quaternionic operator calculus to solve these types of boundary value problems fully explicitly, one basically needs to evaluate two types of integral operators: the Teodorescu operator and the quaternionic Bergman projector. While the integral kernel of the Teodorescu transform is universal for all domains, the kernel function of the Bergman projector, called the Bergman kernel, depends on the geometry of the domain. With special variants of quaternionic holomorphic multiperiodic functions we obtain explicit formulas for three dimensional parallel plate channels, rectangular block domains and regular triangular channels. The explicit knowledge of the integral kernels makes it then possible to evaluate the operator equations in order to determine the solutions of the boundary value problem explicitly.}, subject = {Architektur }, language = {en} } @inproceedings{ContensinMaltret1997, author = {Contensin, M. and Maltret, J.-L.}, title = {Computer Aided Lighting for architects and designers}, doi = {10.25643/bauhaus-universitaet.448}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4488}, year = {1997}, abstract = {Designing lightings in a 3D-scene is a general complex task for building conception as it is submitted to many constraints such as aesthetics or ergonomics. This is often achieved by experimental trials until reaching an acceptable result. Several rendering softwares (such as Radiance) allow an accurate computation of lighting for each point in a scene, but this is a long process and any modification requires the whole scene to be rendered again to get the result. The first guess is empirical, provided by experience of the operator and rarely submitted to scientific considerations. Our aim is to provide a tool for helping designers to achieve this work in the scope of global illumination. We consider the problem when some data are asked for : on one hand the mean lighting in some zones (for example on a desktop) and on the other hand some qualitative information about location of sources (spotlights on the ceiling, halogens on north wall,...). The system we are conceiving computes the number of light sources, their position and intensities, in order to obtain the lighting effects defined by the user. The algorithms that we use bind together radiosity computations with resolution of a system of constraints.}, subject = {Architektur}, language = {en} } @phdthesis{Coulon1997, author = {Coulon, Carl-Helmut}, title = {Strukurorientiertes Fallbasiertes Schließen}, doi = {10.25643/bauhaus-universitaet.24}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20040212-265}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {1997}, abstract = {Das Ziel dieser Arbeit war es, durch Verwendung geeigneter vorhandener CAD-Pl{\"a}ne die Bearbeitung neuer CAD-Pl{\"a}ne zu unterst{\"u}tzen. Entstanden ist ein generischer Ansatz zum fallbasierten Schließens. Da in CAD-Pl{\"a}nen die r{\"a}umliche Struktur eine wichtige Rolle spielt, ist das Konzept auf strukturorientierte Anwendungen ausgerichtet. Deshalb bezeichne ich es als ein Konzept zum " strukturorientierten fallbasierten Schließen". Die Arbeit spezifiziert das Minimum an Wissen, welches zur Suche und Wiederverwendung von F{\"a}llen ben{\"o}tigt wird, wie das dar{\"u}ber hinausgehende Wissen verarbeitet wird, welche Zusammenh{\"a}nge es zum Beispiel zwischen Vergleichs- und Anpassungswissen gibt und wie man das Wissen modellieren kann. Zur Erl{\"a}uterung wird das ben{\"o}tigte Wissen anhand verschiedener Anwendungen dargestellt. Das in der Arbeit vorgestellte Konzept erlaubt die Erg{\"a}nzung, Detaillierung und Korrektur einer Anfrage. Die beiden entscheidenden Algorithmen dienen dem Vergleich von Anfrage und Fall und der Anpassung der Information des Falles zur Modifikation der Anfrage.}, subject = {Fallbasiertes Schließen}, language = {de} } @inproceedings{CruzFalcaoMalonek, author = {Cruz, J. F. and Falc{\~a}o, M. Irene and Malonek, Helmuth Robert}, title = {3D-MAPPINGS AND THEIR APPROXIMATION BY SERIES OF POWERS OF A SMALL PARAMETER}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2940}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29406}, pages = {14}, abstract = {In classical complex function theory the geometric mapping property of conformality is closely linked with complex differentiability. In contrast to the planar case, in higher dimensions the set of conformal mappings is only the set of M{\"o}bius transformations. Unfortunately, the theory of generalized holomorphic functions (by historical reasons they are called monogenic functions) developed on the basis of Clifford algebras does not cover the set of M{\"o}bius transformations in higher dimensions, since M{\"o}bius transformations are not monogenic. But on the other side, monogenic functions are hypercomplex differentiable functions and the question arises if from this point of view they can still play a special role for other types of 3D-mappings, for instance, for quasi-conformal ones. On the occasion of the 16th IKM 3D-mapping methods based on the application of Bergman's reproducing kernel approach (BKM) have been discussed. Almost all authors working before that with BKM in the Clifford setting were only concerned with the general algebraic and functional analytic background which allows the explicit determination of the kernel in special situations. The main goal of the abovementioned contribution was the numerical experiment by using a Maple software specially developed for that purpose. Since BKM is only one of a great variety of concrete numerical methods developed for mapping problems, our goal is to present a complete different from BKM approach to 3D-mappings. In fact, it is an extension of ideas of L. V. Kantorovich to the 3-dimensional case by using reduced quaternions and some suitable series of powers of a small parameter. Whereas until now in the Clifford case of BKM the recovering of the mapping function itself and its relation to the monogenic kernel function is still an open problem, this approach avoids such difficulties and leads to an approximation by monogenic polynomials depending on that small parameter.}, subject = {Architektur }, language = {en} } @inproceedings{DeaconvanRooyen, author = {Deacon, Michael-John and van Rooyen, G.C.}, title = {DISTRIBUTED COLLABORATION: ENGINEERING PRACTICE REQUIREMENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2941}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29410}, pages = {8}, abstract = {Designing a structure follows a pattern of creating a structural design concept, executing a finite element analysis and developing a design model. A project was undertaken to create computer support for executing these tasks within a collaborative environment. This study focuses on developing a software architecture that integrates the various structural design aspects into a seamless functional collaboratory that satisfies engineering practice requirements. The collaboratory is to support both homogeneous collaboration i.e. between users operating on the same model and heterogeneous collaboration i.e. between users operating on different model types. Collaboration can take place synchronously or asynchronously, and the information exchange is done either at the granularity of objects or at the granularity of models. The objective is to determine from practicing engineers which configurations they regard as best and what features are essential for working in a collaborative environment. Based on the suggestions of these engineers a specification of a collaboration configuration that satisfies engineering practice requirements will be developed.}, subject = {Architektur }, language = {en} } @inproceedings{DoganArditiGunaydin, author = {Dogan, Sevgi Zeynep and Arditi, D. and Gunaydin, H. Murat}, title = {COMPARISON OF ANN AND CBR MODELS FOR EARLY COST PREDICTION OF STRUCTURAL SYSTEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2942}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29421}, abstract = {Reasonably accurate cost estimation of the structural system is quite desirable at the early stages of the design process of a construction project. However, the numerous interactions among the many cost-variables make the prediction difficult. Artificial neural networks (ANN) and case-based reasoning (CBR) are reported to overcome this difficulty. This paper presents a comparison of CBR and ANN augmented by genetic algorithms (GA) conducted by using spreadsheet simulations. GA was used to determine the optimum weights for the ANN and CBR models. The cost data of twenty-nine actual cases of residential building projects were used as an example application. Two different sets of cases were randomly selected from the data set for training and testing purposes. Prediction rates of 84\% in the GA/CBR study and 89\% in the GA/ANN study were obtained. The advantages and disadvantages of the two approaches are discussed in the light of the experiments and the findings. It appears that GA/ANN is a more suitable model for this example of cost estimation where the prediction of numerical values is required and only a limited number of cases exist. The integration of GA into CBR and ANN in a spreadsheet format is likely to improve the prediction rates.}, subject = {Architektur }, language = {en} } @inproceedings{DudekRichter, author = {Dudek, Mariusz and Richter, Matthias}, title = {UNTERSUCHUNGEN ZUR ZUVERL{\"A}SSIGKEIT DES STRAßENBAHNNETZES IN KRAKAU}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2943}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29432}, pages = {19}, abstract = {Der Begriff der Zuverl{\"a}ssigkeit spielt eine zentrale Rolle bei der Bewertung von Verkehrsnetzen. Aus der Sicht der Nutzer des {\"o}ffentlichen Personennahverkehrs ({\"O}PNV) ist eines der wichtigsten Kriterien zur Beurteilung der Qualit{\"a}t des Liniennetzes, ob es m{\"o}glich ist, mit einer großen Sicherheit das Reiseziel in einer vorgegebenen Zeit zu erreichen. Im Vortrag soll dieser Zuverl{\"a}ssigkeitsbegriff mathematisch gefasst werden. Dabei wird zun{\"a}chst auf den {\"u}blichen Begriff der Zuverl{\"a}ssigkeit eines Netzes im Sinne paarweiser Zusammenhangswahrscheinlichkeiten eingegangen. Dieser Begriff wird erweitert durch die Betrachtung der Zuverl{\"a}ssigkeit unter Einbeziehung einer maximal zul{\"a}ssigen Reisezeit. In vergangenen Arbeiten hat sich die Ring-Radius-Struktur als bew{\"a}hrtes Modell f{\"u}r die theoretische Beschreibung von Verkehrsnetzen erwiesen. Diese {\"U}berlegungen sollen nun durch Einbeziehung realer Verkehrsnetzstrukturen erweitert werden. Als konkretes Beispiel dient das Straßenbahnnetz von Krakau. Hier soll insbesondere untersucht werden, welche Auswirkungen ein geplanter Ausbau des Netzes auf die Zuverl{\"a}ssigkeit haben wird. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {de} } @inproceedings{DzwigonHempel, author = {Dzwigon, Wieslaw and Hempel, Lorenz}, title = {ZUR SYNCHRONISATION VON LINIEN IM {\"O}PNV}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2944}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29442}, pages = {12}, abstract = {Wir betrachten im {\"O}PNV ({\"O}ffentlichen Personennahverkehr) diejenige Situation, daß zwei Bus- oder Straßenbahnlinien gemeinsame Haltestellen haben. Ziel unserer Untersuchungen ist es, f{\"u}r beide Linien einen solchen Fahrplan zu finden, der f{\"u}r die Fahrg{\"a}ste m{\"o}glichst viel Bequemlichkeit bietet. Die Bedarfsstruktur - die Anzahl von Personen, die die beiden Linien benutzen - setzt dabei gewisse Beschr{\"a}nkungen f{\"u}r die Taktzeiten der beiden Linien. Die verbleibenden Entscheidungsfreiheiten sollen im Sinne der Zielstellung ausgenutzt werden. Im Vortrag wird folgenden Fragen nachgegangen: - nach welchen Kriterien kann man die "Bequemlichkeit" oder die "Synchonisationsg{\"u}te" messen? - wie kann man die einzelnen "Synchronisationsmaße" berechnen ? - wie kann man die verbleibenden Entscheidungsfreiheiten nutzen, um eine m{\"o}glichst gute Synchronisation zu erreichen ? Die Ergebnisse werden dann auf einige Beispiele angewandt und mit den bereitgestellten Methoden L{\"o}sungsvorschl{\"a}ge unterbreitet.}, subject = {Architektur }, language = {de} } @inproceedings{EbertLenzen, author = {Ebert, Carsten and Lenzen, Armin}, title = {OUTPUT-ONLY ANALYSIS FOR EXPERIMENTAL DAMAGE DETECTION OF A TIED-ARCH BRIDGE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2945}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29452}, pages = {13}, abstract = {In civil engineering it is very difficult and often expensive to excite constructions such as bridges and buildings with an impulse hammer or shaker. This problem can be avoided with the output-only method as special feature of stochastic system identification. The permanently existing ambient noise (e.g. wind, traffic, waves) is sufficient to excite the structures in their operational conditions. The output-only method is able to estimate the observable part of a state-space-model which contains the dynamic characteristics of the measured mechanical system. Because of the assumption that the ambient excitation is white there is no requirement to measure the input. Another advantage of the output-only method is the possibility to get high detailed models by a special method, called polyreference setup. To pretend the availability of a much larger set of sensors the data from varying sensor locations will be collected. Several successive data sets are recorded with sensors at different locations (moving sensors) and fixed locations (reference sensors). The covariance functions of the reference sensors are bases to normalize the moving sensors. The result of the following subspace-based system identification is a high detailed black-box-model that contains the weighting function including the well-known dynamic parameters eigenfrequencies and mode shapes of the mechanical system. Emphasis of this lecture is the presentation of an extensive damage detection experiment. A 53-year old prestressed concrete tied-arch-bridge in H{\"u}nxe (Germany) was deconstructed in 2005. Preliminary numerous vibration measurements were accomplished. The first experiment for system modification was an additional support near the bridge bearing of one main girder. During a further experiment one hanger from one tied arch was cut through as an induced damage. Some first outcomes of the described experiments will be presented.}, subject = {Architektur }, language = {en} } @inproceedings{EblingScheuermann, author = {Ebling, Julia and Scheuermann, G.}, title = {TEMPLATE MATCHING ON VECTOR FIELDS USING CLIFFORD ALGEBRA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2946}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29464}, pages = {25}, abstract = {Due to the amount of flow simulation and measurement data, automatic detection, classification and visualization of features is necessary for an inspection. Therefore, many automated feature detection methods have been developed in recent years. However, only one feature class is visualized afterwards in most cases, and many algorithms have problems in the presence of noise or superposition effects. In contrast, image processing and computer vision have robust methods for feature extraction and computation of derivatives of scalar fields. Furthermore, interpolation and other filter can be analyzed in detail. An application of these methods to vector fields would provide a solid theoretical basis for feature extraction. The authors suggest Clifford algebra as a mathematical framework for this task. Clifford algebra provides a unified notation for scalars and vectors as well as a multiplication of all basis elements. The Clifford product of two vectors provides the complete geometric information of the relative positions of these vectors. Integration of this product results in Clifford correlation and convolution which can be used for template matching of vector fields. For frequency analysis of vector fields and the behavior of vector-valued filters, a Clifford Fourier transform has been derived for 2D and 3D. Convolution and other theorems have been proved, and fast algorithms for the computation of the Clifford Fourier transform exist. Therefore the computation of Clifford convolution can be accelerated by computing it in Clifford Fourier domain. Clifford convolution and Fourier transform can be used for a thorough analysis and subsequent visualization of flow fields.}, subject = {Architektur }, language = {en} } @inproceedings{EckardtKoenke, author = {Eckardt, Stefan and K{\"o}nke, Carsten}, title = {ADAPTIVE SIMULATION OF THE DAMAGE BEHAVIOR OF CONCRETE USING HETEROGENEOUS MULTISCALE MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2947}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29478}, pages = {15}, abstract = {In this paper an adaptive heterogeneous multiscale model, which couples two substructures with different length scales into one numerical model is introduced for the simulation of damage in concrete. In the presented approach the initiation, propagation and coalescence of microcracks is simulated using a mesoscale model, which explicitly represents the heterogeneous material structure of concrete. The mesoscale model is restricted to the damaged parts of the structure, whereas the undamaged regions are simulated on the macroscale. As a result an adaptive enlargement of the mesoscale model during the simulation is necessary. In the first part of the paper the generation of the heterogeneous mesoscopic structure of concrete, the finite element discretization of the mesoscale model, the applied isotropic damage model and the cohesive zone model are briefly introduced. Furthermore the mesoscale simulation of a uniaxial tension test of a concrete prism is presented and own obtained numerical results are compared to experimental results. The second part is focused on the adaptive heterogeneous multiscale approach. Indicators for the model adaptation and for the coupling between the different numerical models will be introduced. The transfer from the macroscale to the mesoscale and the adaptive enlargement of the mesoscale substructure will be presented in detail. A nonlinear simulation of a realistic structure using an adaptive heterogeneous multiscale model is presented at the end of the paper to show the applicability of the proposed approach to large-scale structures.}, subject = {Architektur }, language = {en} } @inproceedings{Eickelkamp, author = {Eickelkamp, Jens Peter}, title = {LIQUIDIT{\"A}TSPLANUNG VON BAUPROJEKTEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2948}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29483}, pages = {12}, abstract = {Die Liquidit{\"a}tsplanung von Bauunternehmen XE "Liquidit{\"a}tsplanung" gilt als ein wesentliches Steuerungs-, Kontroll- sowie Informationsinstrument f{\"u}r interne und externe Adressaten und {\"u}bt eine Entscheidungsunterst{\"u}tzungsfunktion aus. Da die einzelnen Bauprojekte einen wesentlichen Anteil an den Gesamtkosten des Unternehmens ausmachen, besitzen diese auch einen erheblichen Einfluß auf die Liquidit{\"a}t und die Zahlungsf{\"a}higkeit der Bauunternehmung. Dem folgend ist es in der Baupraxis eine {\"u}bliche Verfahrensweise, die Liquidit{\"a}tsplanung zuerst projektbezogen zu erstellen und anschließend auf Unternehmensebene zu verdichten. Ziel der Ausf{\"u}hrungen ist es, die Zusammenh{\"a}nge von Arbeitskalkulation XE "Arbeitskalkulation" , Ergebnisrechnung XE "Ergebnisrechnung" und Finanzrechnung XE "Finanzrechnung" in Form eines deterministischen XE "Erkl{\"a}rungsmodells" Planungsmodells auf Projektebene darzustellen. Hierbei soll das Verst{\"a}ndnis und die Bedeutung der Verkn{\"u}pfungen zwischen dem technisch-orientierten Bauablauf und dessen Darstellung im Rechnungs- und Finanzwesen herausgestellt werden. Die Vorg{\"a}nge aus der Bauabwicklung, das heißt die Abarbeitung der Bauleistungsverzeichnispositionen und deren zeitliche Darstellung in einem Bauzeitenplan sind periodisiert in Gr{\"o}ßen der Betriebsbuchhaltung (Leistung, Kosten) zu transformieren und anschließend in der Finanzrechnung (Einzahlungen., Auszahlungen) nach Kreditoren und Debitoren aufzuschl{\"u}sseln.}, subject = {Architektur }, language = {de} } @inproceedings{EiermannErnstUllmann, author = {Eiermann, Michael and Ernst, O. and Ullmann, Elisabeth}, title = {SOLUTION STRATEGIES FOR STOCHASTIC FINITE ELEMENT DISCRETIZATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2949}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29493}, pages = {11}, abstract = {We consider efficient numerical methods for the solution of partial differential equations with stochastic coefficients or right hand side. The discretization is performed by the stochastic finite element method (SFEM). Separation of spatial and stochastic variables in the random input data is achieved via a Karhunen-Lo{\`e}ve expansion or Wiener's polynomial chaos expansion. We discuss solution strategies for the Galerkin system that take advantage of the special structure of the system matrix. For stochastic coefficients linear in a set of independent random variables we employ Krylov subspace recycling techniques after having decoupled the large SFEM stiffness matrix.}, subject = {Architektur }, language = {en} } @inproceedings{EngelkeSchuster, author = {Engelke, Gerald and Schuster, Otmar}, title = {OPENING THE RESERVE OF ECONOMIC EFFICIENCY IN LOGISTICAL AND FACILITY MANAGEMENT SERVICES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3017}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30177}, pages = {8}, abstract = {In many branches companies often lose the visibility of their human and technical resources of their field service. On the one hand the people in the fieldservice are often free like kings on the other hand they do not take part of the daily communication in the central office and suffer under the lacking involvement in the decisions inside the central office. The result is inefficiency. Reproaches in both directions follow. With the radio systems and then mobile phones the ditch began to dry up. But the solutions are far from being productive.}, subject = {Architektur }, language = {en} } @inproceedings{ErikssonKettunen, author = {Eriksson, Sirkka-Liisa and Kettunen, Jarkko}, title = {HYPERMONOGENIC POLYNOMIALS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2950}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29509}, pages = {22}, abstract = {It is well know that the power function is not monogenic. There are basically two ways to include the power function into the set of solutions: The hypermonogenic functions or holomorphic Cliffordian functions. L. Pernas has found out the dimension of the space of homogenous holomorphic Cliffordian polynomials of degree m, but his approach did not include a basis. It is known that the hypermonogenic functions are included in the space of holomorphic Cliffordian functions. As our main result we show that we can construct a basis for the right module of homogeneous holomorphic Cliffordian polynomials of degree m using hypermonogenic polynomials and their derivatives. To that end we first recall the function spaces of monogenic, hypermonogenic and holomorphic Cliffordian functions and give the results needed in the proof of our main theorem. We list some basic polynomials and their properties for the various function spaces. In particular, we consider recursive formulas, rules of differentiation and properties of linear independency for the polynomials.}, subject = {Architektur }, language = {en} } @inproceedings{ErlemannHartmann, author = {Erlemann, Kai and Hartmann, Dietrich}, title = {PARALLELIZATION OF A MICROSCOPIC TRAFFIC SIMULATION SYSTEM USING MPIJAVA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2951}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29516}, pages = {8}, abstract = {Traffic simulation is a valuable tool for the design and evaluation of road networks. Over the years, the level of detail to which urban and freeway traffic can be simulated has increased steadily, shifting from a merely qualitative macroscopic perspective to a very detailed microscopic view, where the behavior of individual vehicles is emulated realistically. With the improvement of behavioral models, however, the computational complexity has also steadily increased, as more and more aspects of real-life traffic have to be considered by the simulation environment. Despite the constant increase in computing power of modern personal computers, microscopic simulation stays computationally expensive, limiting the maximum network size than can be simulated on a single-processor computer in reasonable time. Parallelization can distribute the computing load from a single computer system to a cluster of several computing nodes. To this end, the exisiting simulation framework had to be adapted to allow for a distributed approach. As the simulation is ultimately targeted to be executed in real-time, incorporating real traffic data, only a spatial partition of the simulation was considered, meaning the road network has to be partitioned into subnets of comparable complexity, to ensure a homogenous load balancing. The partition process must also ensure, that the division between subnets does only occur in regions, where no strong interaction between the separated road segments occurs (i.e. not in the direct vicinity of junctions). In this paper, we describe a new microscopic reasoning voting strategy, and discuss in how far the increasing computational costs of these more complex behaviors lend themselves to a parallelized approach. We show the parallel architecture employed, the communication between computing units using MPIJava, and the benefits and pitfalls of adapting a single computer application to be used on a multi-node computing cluster.}, subject = {Architektur }, language = {en} } @inproceedings{EygelaarvanRooyen, author = {Eygelaar, Anton and van Rooyen, G.C.}, title = {ENGINEERING PROCESS MODEL SPECIFICATION AND RESOURCE LEVELING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2952}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29529}, pages = {18}, abstract = {The use of process models in the analysis, optimization and simulation of processes has proven to be extremely beneficial in the instances where they could be applied appropriately. However, the Architecture/Engineering/Construction (AEC) industries present unique challenges that complicate the modeling of their processes. A simple Engineering process model, based on the specification of Tasks, Datasets, Persons and Tools, and certain relations between them, have been developed, and its advantages over conventional techniques have been illustrated. Graph theory is used as the mathematical foundation mapping Tasks, Datasets, Persons and Tools to vertices and the relations between them to edges forming a directed graph. The acceptance of process modeling in AEC industries not only depends on the results it can provide, but the ease at which these results can be attained. Specifying a complex AEC process model is a dynamic exercise that is characterized by many modifications over the process model's lifespan. This article looks at reducing specification complexity, reducing the probability for erroneous input and allowing consistent model modification. Furthermore, the problem of resource leveling is discussed. Engineering projects are often executed with limited resources and determining the impact of such restrictions on the sequence of Tasks is important. Resource Leveling concerns itself with these restrictions caused by limited resources. This article looks at using Task shifting strategies to find a near-optimal sequence of Tasks that guarantees consistent Dataset evolution while resolving resource restrictions.}, subject = {Architektur }, language = {en} } @inproceedings{FalcaoCruzMalonek, author = {Falc{\~a}o, M. Irene and Cruz, J. F. and Malonek, Helmuth Robert}, title = {REMARKS ON THE GENERATION OF MONOGENIC FUNCTIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2939}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29390}, pages = {18}, abstract = { In this paper we consider three different methods for generating monogenic functions. The first one is related to Fueter's well known approach to the generation of monogenic quaternion-valued functions by means of holomorphic functions, the second one is based on the solution of hypercomplex differential equations and finally the third one is a direct series approach, based on the use of special homogeneous polynomials. We illustrate the theory by generating three different exponential functions and discuss some of their properties. Formula que se usa em preprints e artigos da nossa UI\&D (acho demasiado completo): Partially supported by the R\\&D unit \emph{Matem\'atica a Aplica\c\~es} (UIMA) of the University of Aveiro, through the Portuguese Foundation for Science and Technology (FCT), co-financed by the European Community fund FEDER.}, subject = {Architektur }, language = {en} } @inproceedings{Faustino, author = {Faustino, Nelson}, title = {FISCHER DECOMPOSITION FOR DIFFERENCE DIRAC OPERATORS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2955}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29551}, pages = {10}, abstract = {We establish the basis of a discrete function theory starting with a Fischer decomposition for difference Dirac operators. Discrete versions of homogeneous polynomials, Euler and Gamma operators are obtained. As a consequence we obtain a Fischer decomposition for the discrete Laplacian. For the sake of simplicity we consider in the first part only Dirac operators which contain only forward or backward finite differences. Of course, these Dirac operators do not factorize the classic discrete Laplacian. Therefore, we will consider a different definition of a difference Dirac operator in the quaternionic case which do factorizes the discrete Laplacian.}, subject = {Architektur }, language = {en} } @article{Fink2004, author = {Fink, Thomas}, title = {Structural analysis, design and detailing using standard CAD software and standard building information model}, doi = {10.25643/bauhaus-universitaet.270}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2702}, year = {2004}, abstract = {This paper describes the concept of a german commercial software package developed for the needs of structural engineers. Using a standard CAD software as user interface for all geometrical data and to save all important input data, there is a natural link to upcoming building information models.}, subject = {Bauindustrie}, language = {en} } @inproceedings{GalffyBaitschWellmannJelicetal., author = {Galffy, Mozes and Baitsch, Matthias and Wellmann Jelic, Andres and Hartmann, Dietrich}, title = {LIFETIME-ORIENTED OPTIMIZATION OF BRIDGE TIE RODS EXPOSED TO VORTEX-INDUCED ACROSS-WIND VIBRATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2956}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29561}, pages = {12}, abstract = {In recent years, damages in welded connections plates of vertical tie rods of several arched steel bridges have been reported. These damages are due to fatigue caused by wind-induced vibrations. In the present study, such phenomena are examined, and the corresponding lifetime of a reference bridge in M{\"u}nster-Hiltrup, Germany, is estimated, based on the actual shape of the connection plate. Also, the results obtained are compared to the expected lifetime of a connection plate, whose geometry has been optimized separately. The structural optimization, focussing on the shape of the cut at the hanger ends, has been carried out using evolution strategies. The oscillation amplitudes have been computed by means of the Newmark-Wilson time-step method, using an appropriate load model, which has been validated by on-site experiments on the selected reference bridge. Corresponding stress-amplitudes are evaluated by multiplying the oscillation amplitudes with a stress concentration factor. This factor has been computed on the basis of a finite element model of the system "hanger-welding-connection plate", applying solid elements, according to the notch stress approach. The damage estimation takes into account the stochastics of the exciting wind process, as well as the stochastics of the material parameters (fatigue strength) given in terms of Woehler-curves. The shape optimization results in a substantial increase of the estimated hanger lifetime. The comparison of the lifetimes of the bulk plate and of the welding revealed that, in the optimized structure, the welding, being the most sensitive part in the original structure, shows much more resistance against potential damages than the bulk material.}, subject = {Architektur }, language = {en} } @inproceedings{Geyer, author = {Geyer, Philipp}, title = {MODELS FOR MULTIDISCIPLINARY DESIGN OPTIMIZATION: AN EXEMPLARY OFFICE BUILDING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2957}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29574}, pages = {10}, abstract = {The mathematical and technical foundations of optimization have been developed to a large extent. In the design of buildings, however, optimization is rarely applied because of insufficient adaptation of this method to the needs of building design. The use of design optimization requires the consideration of all relevant objectives in an interactive and multidisciplinary process. Disciplines such as structural, light, and thermal engineering, architecture, and economics impose various objectives on the design. A good solution calls for a compromise between these often contradictory objectives. This presentation outlines a method for the application of Multidisciplinary Design Optimization (MDO) as a tool for the designing of buildings. An optimization model is established considering the fact that in building design the non-numerical aspects are of major importance than in other engineering disciplines. A component-based decomposition enables the designer to manage the non-numerical aspects in an interactive design optimization process. A fa{\c{c}}ade example demonstrates a way how the different disciplines interact and how the components integrate the disciplines in one optimization model. In this grid-based fa{\c{c}}ade example, the materials switch between a discrete number of materials and construction types. For light and thermal engineering, architecture, and economics, analysis functions calculate the performance; utility functions serve as an important means for the evaluation since not every increase or decrease of a physical value improves the design. For experimental purposes, a genetic algorithm applied to the exemplary model demonstrates the use of optimization in this design case. A component-based representation first serves to manage non-numerical characteristics such as aesthetics. Furthermore, it complies with usual fabrication methods in building design and with object-oriented data handling in CAD. Therefore, components provide an important basis for an interactive MDO process in building design.}, subject = {Architektur }, language = {en} } @article{Grau2003, author = {Grau, Oliver}, title = {Der digitale Bau : aktuelle Tendenzen der Raumvisualisierung und ihre Vorl{\"a}ufer}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20080304-13242}, year = {2003}, abstract = {Wissenschaftliches Kolloquium vom 24. bis 27. April 2003 in Weimar an der Bauhaus-Universit{\"a}t zum Thema: ‚MediumArchitektur - Zur Krise der Vermittlung'}, subject = {Architektur}, language = {de} } @inproceedings{GruberValdman, author = {Gruber, Peter and Valdman, J.}, title = {APPROXIMATE SOLUTION OF ELASTOPLASTIC PROBLEMS BASED ON THE MOREAU-YOSIDA THEOREM}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2960}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29602}, pages = {8}, abstract = {We propose a new approach to the numerical solution of quasi-static elastic-plastic problems based on the Moreau-Yosida theorem. After the time discretization, the problem is expressed as an energy minimization problem for unknown displacement and plastic strain fields. The dependency of the minimization functional on the displacement is smooth whereas the dependency on the plastic strain is non-smooth. Besides, there exists an explicit formula, how to calculate the plastic strain from a given displacement field. This allows us to reformulate the original problem as a minimization problem in the displacement only. Using the Moreau-Yosida theorem from the convex analysis, the minimization functional in the displacements turns out to be Frechet-differentiable, although the hidden dependency on the plastic strain is non-differentiable. The seconds derivative exists everywhere apart from the elastic-plastic interface dividing elastic and plastic zones of the continuum. This motivates to implement a Newton-like method, which converges super-linearly as can be observed in our numerical experiments.}, subject = {Architektur }, language = {en} } @inproceedings{GurtovyTynchuk, author = {Gurtovy, O. G. and Tynchuk, S.O.}, title = {RESEARCH OF DEFORMATION OF MULTILAYERED PLATES ON UNDEFORMABLE BASIS BY UNFLEXURAL SPECIFIED MODEL}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2961}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29613}, pages = {6}, abstract = {Stress-strain state (SSS) of multilayered plates on undeformable foundation is investigated. The settlement circuit of transverse loaded plate is formed by symmetrical attaching of a plate concerning a surface of contact to the foundation. The plate of the double thickness becomes bilateral symmetrically loaded concerning its median surface. It allows to model only unflexural deformation that reduces amount of unknown and the general order of differentiation of resolving system of the equations. The developed refined continual model takes into account deformations of transverse shear and transverse compression in high iterative approximation. Rigid contact between the foundation and a plate, and also shear without friction on a surface of contact of a plate with the foundation is considered. Calculations confirm efficiency of such approach, allowing to receive decisions which is qualitative and quantitatively close to three-dimensional solutions.}, subject = {Architektur }, language = {en} } @inproceedings{GoebelHildebrandWerner, author = {G{\"o}bel, Michael and Hildebrand, J{\"o}rg and Werner, Frank}, title = {NUMERISCHES MODELL F{\"U}R DIE SIMULATION EINER LASERSTRAHLSCHWEIßUNG VON QUARZGLAS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2958}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29589}, pages = {14}, abstract = {Ausgehend von den fundierten Erfahrungen, die f{\"u}r das Schweißen von verschiedensten Metallen vorliegen, wird an der Professur Stahlbau der Bauhaus-Universit{\"a}t Weimar ein neuartiges Verfahren zum CO2-Laserstrahlschweißen von Quarzglas numerisch untersucht. Dabei kommt die kommerzielle FE-Software SYSWELD® zum Einsatz. Die erforderlichen Versuche werden in Zusammenarbeit mit dem Institut f{\"u}r F{\"u}getechnik und Werkstoffpr{\"u}fung GmbH aus Jena realisiert. Die numerische Analyse wird eingesetzt, um geeignete Prozessparameter zu bestimmen und deren Auswirkungen auf die transienten thermischen und mechanischen Vorg{\"a}nge, die w{\"a}hrend des Schweißvorgangs ablaufen abzubilden. Um die aus der Simulation erhaltenen Aussagen zu {\"u}berpr{\"u}fen, ist es erforderlich, das Berechnungsmodell mittels Daten aus Versuchsschweißungen zu kalibrieren. Dabei sind die verwendeten Materialmodelle sowie die der Simulation zugrunde gelegten Materialkennwerte zu validieren. Es stehen verschiedene rheologische Berechnungsmodelle zur Auswahl, die die viskosen Materialeigenschaften des Glases abbilden. Dabei werden die drei mechanischen Grundelemente, die HOOKEsche Feder, der NEWTONsche D{\"a}mpfungszylinder und das ST.-VENANT-Element miteinander kombiniert. Die M{\"o}glichkeit, thermische und mechanische Vorg{\"a}nge innerhalb des Glases w{\"a}hrend des Schweißvorgangs und nach vollst{\"a}ndiger Abk{\"u}hlung, vorhersagen zu k{\"o}nnen, gestattet es den Schweißvorgang {\"u}ber eine Optimierung der Verfahrensparameter gezielt dahingehend zu beeinflussen, die Wirtschaftlichkeit des Schweißverfahrens zu verbessern, und ein zuverl{\"a}ssiges Schweißergebnis zu erhalten. Dabei k{\"o}nnen auch nur unter hohem experimentellen Aufwand durchf{\"u}hrbare Versuche simuliert werden, um eine Vorhersage zu treffen, ob es zweckm{\"a}ßig ist, den Versuch auch in der Praxis zu fahren. Dies f{\"u}hrt zu einer Reduzierung des experimentellen Aufwandes und damit zu einer Verk{\"u}rzung des Entwicklungszeitraumes f{\"u}r das angestrebte Verfahren.}, subject = {Architektur }, language = {de} } @inproceedings{Goettlicher, author = {G{\"o}ttlicher, Manfred}, title = {HYBRID SOLID-LIQUID MODEL FOR GRANULAR MATERIAL}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2959}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29596}, pages = {20}, abstract = {Solid behavior as well as liquid behavior characterizes the flow of granular material in silos. The presented model is based on an appropriate interaction of a displacement field and a velocity field. The constitutive equations and the applied algorithm are developed from the exact solution for a standard case. The standard case evolves from a very tall vertical plane strain silo containing material that flows at a constant speed. No horizontal displacements and velocities take place. No changes regarding the field values arise in the vertical direction and in time. Tension is not allowed at any point. Coulomb friction represents the effects of the vertical walls. The interaction between the flowing material and the walls is covered by a forced boundary condition resulting in an additional matrix for the solid component as well as for the liquid component. The resulting integral equations are designed to be solved directly. Three coefficients describe the properties of the granular material. They govern elastic solid behavior in combination with viscous liquid behavior.}, subject = {Architektur }, language = {de} } @article{HallerMenzel1997, author = {Haller, P. and Menzel, R.}, title = {Wissensbasierte Tragwerksplanung im Ingenieurholzbau mit ICAD}, doi = {10.25643/bauhaus-universitaet.497}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4976}, year = {1997}, abstract = {Der Entwurf einfacher Konstruktionen stellt f{\"u}r den Tragwerksplaner zumeist eine Routineaufgabe dar. {\"U}blicherweise werden Statik, Zeichnungen sowie Elementelisten separat voneinander erstellt. Das Programmsystem ICAD bietet die M{\"o}glichkeit, diese Arbeiten in einer Bearbeitungsstufe durchzuf{\"u}hren. Die Programmierung der Bemessung, Darstellung und Auswertung von Bauteilen wird mit dem Editor Emacs vorgenommen, die grafische Umsetzung des compilierten Quelltextes erfolgt im ICAD-Browser. Innerhalb dieser Benutzeroberfl{\"a}che steht eine Reihe von Werkzeugen f{\"u}r die Eingabe und Visualisierung von Daten zur Verf{\"u}gung. Betrachtet man die zu bearbeitenden Bauteile und Anschl{\"u}sse als bekannte Konstruktionen des Ingenieurholzbaus, f{\"u}r die es eine festgelegte Anzahl von Abmessungen und Kennwerten gibt, so l{\"a}ßt sich jede dieser Konstruktionen als eigenst{\"a}ndiger Modul programmieren. Der Tragwerksplaner ist somit in der Lage, aus einem Katalog an Bauteilen und Anschl{\"u}ssen die Gesamtkonstruktion zusammenzustellen. Allgemeing{\"u}ltige Kennwerte, Berechnungsverfahren und h{\"a}ufig verwendete Unterprogramme stehen modular als Wissensbasis zur Verf{\"u}gung und werden von den einzelnen Tragelementen bedarfsgerecht eingebunden. Eine weitere M{\"o}glichkeit der wissensbasierten Tragwerksplanung stellt die sogenannte Multi-Criteria-Analyse dar. Bei diesem Verfahren nimmt das Programm selbst{\"a}ndig eine Entscheidungsfindung f{\"u}r eine oder mehrere g{\"u}nstige Konstruktionsl{\"o}sungen vor. Dazu sind lediglich Wichtungen verschiedener Randbedingungen durch den Anwender erforderlich.}, subject = {Ingenieurholzbau}, language = {de} } @inproceedings{HapurneNistor, author = {Hapurne, Tania Mariana and Nistor, S.}, title = {USING MODERN TECHNOLOGIES TO UPGRADE EDILITARY URBAN INFRASTRUCTURE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2965}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29650}, pages = {6}, abstract = {Adopting the European laws concerning environmental protection will require sustained efforts of the authorities and communities from Romania; implementing modern solutions will become a fast and effective option for the improvement of the functioning systems, in order to prevent disasters. As a part of the urban infrastructure, the drainage networks of pluvial and residual waters are included in the plan of promoting the systems which protect the environmental quality, with the purpose of integrated and adaptive management. The paper presents a distributed control system for sewer network of Iasi town. Unsatisfactory technical state of the actual sewer system is exposed, focusing on objectives related to implementation of the control system. The proposed distributed control system of Iasi drainage network is based on the implementation of the hierarchic control theory for diagnose, sewer planning and management. There are proposed two control levels: coordinating and local execution. Configuration of the distributed control system, including data acquisition and conversion equipment, interface characteristics, local data bus, data communication network, station configuration are widely described. The project wish to be an useful instrument for the local authorities in the preventing and reducing the impact of future natural disasters over the urban areas by means of modern technologies.}, subject = {Architektur }, language = {en} } @inproceedings{HauschildHuebler1997, author = {Hauschild, Thomas and H{\"u}bler, Reinhard}, title = {Entwicklung eines verteilbaren und kooperativ nutzbaren objektorientierten CAAD-Produktmodellierkerns}, doi = {10.25643/bauhaus-universitaet.447}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4475}, year = {1997}, abstract = {Der Fokus des Projektes liegt auf einer besseren Unterst{\"u}tzung der kooperativen Aspekte im Bauwerksentwurf und der Anwendung von ComponentWare-Techniken in der Architektur des Entwurfssystems. Es muß festgestellt werden, daß die Kooperation der Beteiligten im Entwurfsprozeß von Bauwerken durch die heute praktizierten Datenaustauschverfahren nicht oder nur unbefriedigend unterst{\"u}tzt wird und das keine L{\"o}sung dieses Problems durch die Weiterentwicklung von filebasierten Datenaustauschformaten zu erwarten ist. Im Rahmen des Projektes wird mit einer CORBA-Umgebung f{\"u}r Smalltalk-80 ein verteilbares Objektsystem realisiert. Als Architektur des Systems wurde eine hybride Herangehensweise gew{\"a}hlt, bei der allgemeine Informationen auf einem zentralen Server verwaltet werden und die eigentlichen Projektinformationen bei Bedarf repliziert werden. Wie allgemein in GroupWare - orientierten Systemen notwendig, m{\"u}ssen effektive Mechanismen der Nebenl{\"a}ufigkeitskontrolle und zur Sperrung bestimmter Modellbereiche realisiert werden. Wichtig ist f{\"u}r kooperative Entwurfssysteme die Systemunterst{\"u}tzung der Beseitigung der Folgen von kollidierenden Entwurfsintensionen durch die Bearbeiter. Dazu werden unter anderem Remote-Pointer-Mechanismen realisiert. In Abh{\"a}ngigkeit von der Rolle eines Bearbeiters werden diesem Sichten auf des Objektmodell (Partialmodelle) zugeordnet. Es werden Mechanismen zur Autorisierung des Zugriffs auf Partialmodelle implementiert, zu diesem Zweck erfolgt eine Nutzerauthentifizierung. Beziehungen zwischen Partialmodellen werden durch eine spezielle Relation im Objektsystem abgebildet. Die Konzeption des Objektsystems lehnt sich an die PREPLAN-Philosophie an. Das impliziert die Unterst{\"u}tzung von Entwurfshandlungen sowohl in Bottom-Up- als auch in Top-Down - Richtung. Benutzer k{\"o}nnen das Objektsystem um eigene Klassen erweitern bzw. existierende Klassen modifizieren und Attribute mit Defaultwerten belegen, um das System inkrementell mit Dom{\"a}nenwissen anreichern zu k{\"o}nnen. Von großer Bedeutung f{\"u}r kooperative Entwurfssysteme sind eine Versionsverwaltung und die Bereitstellung von Undo - und Redo - Mechanismen. Es ist m{\"o}glich, multimediale Daten im Objektmodell abzulegen und diese in Abh{\"a}ngigkeit von ihrem Format wiederzugeben bzw. zu bearbeiten. Das beschriebene System befindet sich derzeit in der Implementierung.}, subject = {Bauwerk}, language = {de} } @inproceedings{HauschildHuebler2000, author = {Hauschild, Thomas and H{\"u}bler, Reinhard}, title = {Aspekte der verteilten Bauwerksmodellierung in kooperativen Entwurfsumgebungen auf Basis dynamischer Objektstrukturen}, doi = {10.25643/bauhaus-universitaet.591}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-5912}, year = {2000}, abstract = {Entwurfsprozesse im Bauwesen sind hochgradig kooperative Prozesse mit alternierenden Phasen asynchroner und synchroner Teamarbeit. Die Informationen {\"u}ber den aktuellen Entwurfsgegenstand k{\"o}nnen als Objektstrukturen modelliert werden, die in entsprechenden Modellverwaltungssystemen gespeichert werden. Bei der Realisierung von kooperativ nutzbaren Umgebungen f{\"u}r den Bauwerksentwurf sind jedoch bei der Auswahl von Basistechniken spezifische Anforderungen von CSCW-Applikationen zu beachten, die bestimmte traditionelle Verfahren nicht erf{\"u}llen. Neben verschiedenen Auswirkungen auf das Interaktionsverhalten der Entwurfsumgebung spielt die cooperation awareness der eingesetzten Mechanismen eine bedeutende Rolle. Mechanismen zur Zugriffskontrolle sind in netzwerkbasierten Mehrbenutzerumgebungen essentiell, jedoch sind herk{\"o}mmliche Verfahren zu unflexibel und nicht hinreichend ausdrucksstark. Eine adaptierte und erweiterte Variante des Matrixverfahrens ist f{\"u}r die Anwendung in Modellverwaltungssystemen geeignet. Ebenso muss bei der Auswahl von Mechanismen zur Nebenl{\"a}ufigkeitskontrolle Augenmerk auf dessen Eignung in Groupware-Systemen gelegt werden. Bei der Unterst{\"u}tzung asynchroner Kooperation k{\"o}nnen Lock-Verfahren auf die Informationen in Modellverwaltungssystemen angewandt werden. F{\"u}r die Applikationen f{\"u}r synchrone Teamarbeit m{\"u}ssen derartige Mechanismen auf die gemeinsamen Informationsbest{\"a}nde sowie auf Systemressourcen der Entwurfsumgebung angewendet werden. Hierf{\"u}r sind floor-passing'-Verfahren geeignet; die Anwendbarkeit von Transformationsverfahren sollte f{\"u}r die konkret umzusetzende Applikation gepr{\"u}ft werden.}, subject = {Bauwerk}, language = {de} } @inproceedings{Heinrich, author = {Heinrich, Timo}, title = {KENNZEICHNUNGSBASIERTER ZUGRIFF VON PROZESSMODELLEN AUF OBJEKTBEST{\"A}NDE DES BAUWESENS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2966}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29669}, pages = {13}, abstract = {F{\"u}r eine beherrschbare Koordination und Durchf{\"u}hrung von Planungsaufgaben in Bauprojekten wird der Planungsprozess zunehmend in formalisierten Modellen - Prozessmodellen - beschrieben. Die Produktmodellforschung ihrerseits widmet sich der Speicherung von Planungsdaten in Form von objektorientierten Modellen im Rechner. Hauptaugenmerk sind dabei die Wahrung der Konsistenz und die Modellierung von Abh{\"a}ngigkeiten innerhalb dieses Planungsmaterials. Der Bezug zu den Akteuren der Planung wird nicht direkt hergestellt. Ein formal beschriebener Planungsprozesses kann in der Praxis noch nicht derart realisiert werden, dass ein Zugriff auf Einzelobjekte des Planungsprozesses gew{\"a}hrleistet ist. Bestehende Planungsunterst{\"u}tzungs- und Workflowmanagement-Systeme abstrahieren und ordnen das Planungsmaterial nach wie vor auf Dateiebene. Der vorliegende Artikel beschreibt eine Methode f{\"u}r die geeignete Verbindung von formalisierten Prozessmodellen in der Bauplanung mit den Einzelobjekten, die in den modellorientierten Objektmengen kodiert sind. Dabei wird die Zugeh{\"o}rigkeit bestimmter Objekte zu Pl{\"a}nen und Dokumenten (zum Zwecke des Datenaustauschs) nicht l{\"a}nger durch die physische Zuordnung zu Dateien festgelegt. Es wird ein formales Beschreibungsmittel vorgestellt, welches die entsprechende Teilmengenbildung aus der Gesamtheit der Planungsobjekte erm{\"o}glicht. F{\"u}r die bisherigen Formen des Datenaustausches werden aus den Objektmodellen der Planung Teilmengen herausgel{\"o}st und physikalisch zwischen den Planern transportiert. Das neue Beschreibungsmittel hingegen erlaubt es, die Bildungsvorschrift f{\"u}r Objektteilmengen statt der Mengen selbst zwischen den Planern auszutauschen. Der Zugriff auf die konkreten Objekte findet dann direkt modellbasiert statt.}, subject = {Architektur }, language = {de} } @inproceedings{HeinrichHuhnt2003, author = {Heinrich, Timo and Huhnt, Wolfgang}, title = {Determination of Effects of Modefications during Planning Processes}, doi = {10.25643/bauhaus-universitaet.306}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-3065}, year = {2003}, abstract = {Usually, the co-ordination of design and planning tasks of a project in the construction industries is done in a paper based way. Subsequent modifications have to be handled manually. The effects of modifications cannot be determined automatically. The approach to specify a complete process model before project start does not consider the requirements of the construction industries. The effort of specification at the beginning and during the process (modifications) does not justify the use of standard process model techniques. A new approach is presented in the according paper. A complete process model is deducted on the basis of a core. The core consists of process elements and specific relations between them. Modifications need to be specified in the core only. The effort of specification is therefore reduced. The deduction of the complete process is based on the graph theory. Algorithms of the graph theory are also used to determine the effects of modifications during project work.}, subject = {CAD}, language = {en} } @inproceedings{HelbingKestingTreiberetal., author = {Helbing, D. and Kesting, A. and Treiber, M. and L{\"a}mmer, S. and Sch{\"o}nhof, M.}, title = {DECENTRALIZED APPROACHES TO ADAPTIVE TRAFFIC CONTROL AND AN EXTENDED LEVEL OF SERVICE CONCEPT}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2910}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29107}, pages = {19}, abstract = {Traffic systems are highly complex multi-component systems suffering from instabilities and non-linear dynamics, including chaos. This is caused by the non-linearity of interactions, delays, and fluctuations, which can trigger phenomena such as stop-and-go waves, noise-induced breakdowns, or slower-is-faster effects. The recently upcoming information and communication technologies (ICT) promise new solutions leading from the classical, centralized control to decentralized approaches in the sense of collective (swarm) intelligence and ad hoc networks. An interesting application field is adaptive, self-organized traffic control in urban road networks. We present control principles that allow one to reach a self-organized synchronization of traffic lights. Furthermore, vehicles will become automatic traffic state detection, data management, and communication centers when forming ad hoc networks through inter-vehicle communication (IVC). We discuss the mechanisms and the efficiency of message propagation on freeways by short-range communication. Our main focus is on future adaptive cruise control systems (ACC), which will not only increase the comfort and safety of car passengers, but also enhance the stability of traffic flows and the capacity of the road ("traffic assistance"). We present an automated driving strategy that adapts the operation mode of an ACC system to the autonomously detected, local traffic situation. The impact on the traffic dynamics is investigated by means of a multi-lane microscopic traffic simulation. The simulation scenarios illustrate the efficiency of the proposed driving strategy. Already an ACC equipment level of 10\% improves the traffic flow quality and reduces the travel times for the drivers drastically due to delaying or preventing a breakdown of the traffic flow. For the evaluation of the resulting traffic quality, we have recently developed an extended level of service concept (ELOS). We demonstrate our concept on the basis of travel times as the most important variable for a user-oriented quality of service.}, subject = {Architektur }, language = {en} } @inproceedings{Heuer, author = {Heuer, Andreas}, title = {THREE-DIMENSIONAL MODELING OF CONCRETE WITH DAMAGE AND PLASTICITY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2967}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29671}, pages = {15}, abstract = {The concrete is modeled as a material with damage and plasticity, whereat the viscoplastic and the viscoelastic behaviour depends on the rate of the total strains. Due to the damage behaviour the compliance tensor develops different properties in tension and compression. There have been tested various yield surfaces and flow rules, damage rules respectively to their usability in a concrete model. One three-dimensional yield surface was developed from a failure surface based on the Willam--Warnke five-parameter model by the author. Only one general uni-axial stress-strain-relation is used for the numeric control of the yield surface. From that curve all necessary parameters for different strengths of concrete and different strain rates can be derived by affine transformations. For the flow rule in the compression zone a non associated inelastic potential is used, in the tension zone a Rankine potential. Conditional on the time-dependent formulation, the symmetry of the system equations is maintained in spite of the usage of non-associated potentials for the derivation of the inelastic strains. In case of quasi statical computations a simple viscoplastic law is used that is rested on an approach to Perzyna. The principle of equality of dissipation power in the uni-axial and the three-axial state of stress is used. It is modified by a factor that depends on the actual stress ratio and in comparison with the Kupfer experiments it implicates strains that are more realistic. The implementation of the concrete model is conducted in a mixed hybrid finite element. Examples in the structural level are introduced for verification of the concrete model.}, subject = {Architektur }, language = {en} } @inproceedings{HildebrandWudtkeWerner, author = {Hildebrand, J{\"o}rg and Wudtke, Idna and Werner, Frank}, title = {M{\"O}GLICHKEITEN DER MATHEMATISCHEN BESCHREIBUNG VON PHASENUMWANDLUNGEN IM STAHL BEI SCHWEIß- UND WIG-NACHBEHANDLUNGSPROZESSEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2968}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29684}, pages = {13}, abstract = {In the final decades many scientists were occupied intensively with the change of materials during a process and their mathematical descriptions. The extensive and extensive analyses were supported by the advanced computer science. A mathematical description of the phase transformation is a condition for a realistic FE simulation of the state of microstructure. It is possible to simulate the temperature and stress field also in complex construction based on the state of microstructure. In the last years a great number of mathematical models were expanded to describe the transformation between different phases. For the development of the models for transformation kinetics it is practical to subdivide into isothermal and non-isothermal processes according to the thermal conditions. Some models for the description of the transformation with non-isothermal processes represent extensions for isothermal of processes. A part of parameters for the describing equations can be derived from the time-temperature-transformation diagrams in the literature. Furthermore the two possibilities of transformation are considered by different models - diffusion controlled and not diffusion controlled. The material-specific characteristics can be simulated during the transformation for each individual phase in a realistic FE analyses. Also new materials can be simulated after a modification of the parameters in the describing equations for the phase transformation. The effects in the temperature and stress field are a substantial reason for the investigation of the phase transformation during the welding and TIG-dressing processes.}, subject = {Architektur }, language = {de} } @inproceedings{HoffmannKornadt, author = {Hoffmann, Sabine and Kornadt, Oliver}, title = {PHASEN{\"U}BERGANGSMATERIALIEN ALS PASSIVE W{\"A}RMESPEICHER IN REVITALISIERUNGSOBJEKTEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2969}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29699}, pages = {12}, abstract = {Summer overheating in buildings is a common problem, especially in office buildings with large glazed facades, high internal loads and low thermal mass. Phase change materials (PCM) that undergo a phase transition in the temperature range of thermal comfort can add thermal mass without increasing the structural load of the building. The investigated PCM were micro-encapsulated and mixed into gypsum plaster. The experiments showed a reduction of indoor-temperature of up to 4 K when using a 3 cm layer of PCM-plaster with micro-encapsulated paraffin. The measurement results could validate a numerical model that is based on a temperature dependent function for heat capacity. Thermal building simulation showed that a 3 cm layer of PCM-plaster can help to fulfil German regulations concerning heat protection of buildings in summer for most office rooms.}, subject = {Architektur }, language = {de} } @inproceedings{Huhn2003, author = {Huhn, Michael}, title = {CAD by XML - Was XML im Planungssystem leisten kann}, doi = {10.25643/bauhaus-universitaet.315}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-3152}, year = {2003}, abstract = {In Bauplanungssystemen k{\"o}nnen XML-Technologien in vielen Bereichen eingesetzt werden mit dem Ziel, diese Systeme modular und webf{\"a}hig zu gestalten. Der Einsatz lohnt als Basis-Datenstruktur f{\"u}r verschiedene rechnerinterne Modelle, Steuerungsstruktur f{\"u}r Customizing von Anwendungen, Bindeglied zwischen objektbasierten Systemen, Kommunikationsprotokoll zwischen Komponenten. Es ist m{\"o}glich, komplexe Objekte aus dem Planungsalltag mittels XML arzustellen, zu speichern und zu verarbeiten. Es ist m{\"o}glich, entsprechende Komponenten im Netz zu verteilen bzw. {\"u}ber Internet zu verbinden. Die heute dominierende Sicht auf XML als Austauschmedium wird erg{\"a}nzt um die Idee eines XML-basierten Systems: Entwurfsobjekte k{\"o}nnen als >XML-Objekte< formuliert und im Sinne eines late binding verwendet werden.}, subject = {Bauplanung}, language = {de} } @article{Huhnt1997, author = {Huhnt, Wolfgang}, title = {Informationstechnische Integration im Bauwesen durch Nutzung fachspezifischen Anwenderwissens}, doi = {10.25643/bauhaus-universitaet.490}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4901}, year = {1997}, abstract = {Im vorliegenden Beitrag wird ein Integrationskonzept vorgestellt, bei dem das fachspezifische Wissen des Anwenders integraler Bestandteil des Konzeptes ist. Grundgedanke des Konzeptes ist es, die Informationen vorerst im Kontext der Anwendung, mit der sie erstellt wurden, zu belassen. Die Interpretation der Informationen erfolgt durch den Anwender, der diese Informationen zur Integration nutzen m{\"o}chte. Er weiß, welche Daten er f{\"u}r seine Arbeiten ben{\"o}tigt, und er verf{\"u}gt {\"u}ber das erforderliche fachspezifische Wissen, um die Informationen anderer Bearbeiter verstehen zu k{\"o}nnen. Damit ist es nicht erforderlich, die internen Datenstrukturen einer Software zu verstehen und in einem neutralen Format zu beschreiben. Die Integration erfolgt interaktiv am Bildschirm durch den Anwender. Das vorgestellt Konzept wird in den Kontext der in der Literatur beschriebenen Integrationskonzepte eingegliedert. Hierzu werden die Integrationskonzepte klassifiziert. Die Klassifikation erfolgt auf der Grundlage der Software-Architekturen. Das vorgestellte Integrationskonzept wird am Beispiel der Angebotsbearbeitung im Bauwesen konkretisiert.}, subject = {CAD}, language = {de} } @inproceedings{HuhntGielsdorf, author = {Huhnt, Wolfgang and Gielsdorf, F.}, title = {TOPOLOGICAL INFORMATION AS LEADING INFORMATION IN BUILDING PRODUCT MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2911}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29110}, pages = {11}, abstract = {Digital models of buildings are widely used in civil engineering. In these models, geometric information is used as leading information. Engineers are used to have geometric information, and, for instance, it is state of the art to specify a point by its three coordinates. However, the traditional approaches have disadvantages. Geometric information is over-determined. Thus, more geometric information is specified and stored than needed. In addition, engineers already deal with topological information. A denotation of objects in buildings is of topological nature. It has to be answered whether approaches where topological information becomes a leading role would be more efficient in civil engineering. This paper presents such an approach. Topological information is modelled independently of geometric information. It is used for denoting the objects of a building. Geometric information is associated to topological information so that geometric information "weights" a topology. The concept presented in this paper has already been used in surveying existing buildings. Experiences in the use of this concept showed that the number of geometric information that is required for a complete specification of a building could be reduced by a factor up to 100. Further research will show how this concept can be used in planning processes.}, subject = {Architektur }, language = {en} } @inproceedings{HaefnerKesselKoenke, author = {H{\"a}fner, Stefan and Kessel, Marco and K{\"o}nke, Carsten}, title = {MULTIPHASE B-SPLINE FINITE ELEMENTS OF VARIABLE ORDER IN THE MECHANICAL ANALYSIS OF HETEROGENEOUS SOLIDS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2964}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29643}, pages = {37}, abstract = {Advanced finite elements are proposed for the mechanical analysis of heterogeneous materials. The approximation quality of these finite elements can be controlled by a variable order of B-spline shape functions. An element-based formulation is developed such that the finite element problem can iteratively be solved without storing a global stiffness matrix. This memory saving allows for an essential increase of problem size. The heterogeneous material is modelled by projection onto a uniform, orthogonal grid of elements. Conventional, strictly grid-based finite element models show severe oscillating defects in the stress solutions at material interfaces. This problem is cured by the extension to multiphase finite elements. This concept enables to define a heterogeneous material distribution within the finite element. This is possible by a variable number of integration points to each of which individual material properties can be assigned. Based on an interpolation of material properties at nodes and further smooth interpolation within the finite elements, a continuous material function is established. With both, continuous B-spline shape function and continuous material function, also the stress solution will be continuous in the domain. The inaccuracy implied by the continuous material field is by far less defective than the prior oscillating behaviour of stresses. One- and two-dimensional numerical examples are presented.}, subject = {Architektur }, language = {en} } @inproceedings{HaefnerKoenke, author = {H{\"a}fner, Stefan and K{\"o}nke, Carsten}, title = {DAMAGE SIMULATION OF HETEROGENEOUS SOLIDS BY NONLOCAL FORMULATIONS ON ORTHOGONAL GRIDS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2963}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29638}, pages = {15}, abstract = {The present paper is part of a comprehensive approach of grid-based modelling. This approach includes geometrical modelling by pixel or voxel models, advanced multiphase B-spline finite elements of variable order and fast iterative solver methods based on the multigrid method. So far, we have only presented these grid-based methods in connection with linear elastic analysis of heterogeneous materials. Damage simulation demands further considerations. The direct stress solution of standard bilinear finite elements is severly defective, especially along material interfaces. Besides achieving objective constitutive modelling, various nonlocal formulations are applied to improve the stress solution. Such a corrective data processing can either refer to input data in terms of Young's modulus or to the attained finite element stress solution, as well as to a combination of both. A damage-controlled sequentially linear analysis is applied in connection with an isotropic damage law. Essentially by a high resolution of the heterogeneous solid, local isotropic damage on the material subscale allows to simulate complex damage topologies such as cracks. Therefore anisotropic degradation of a material sample can be simulated. Based on an effectively secantial global stiffness the analysis is numerically stable. The iteration step size is controlled for an adequate simulation of the damage path. This requires many steps, but in the iterative solution process each new step starts with the solution of the prior step. Therefore this method is quite effective. The present paper provides an introduction of the proposed concept for a stable simulation of damage in heterogeneous solids.}, subject = {Architektur }, language = {en} } @inproceedings{HaefnerKoenke, author = {H{\"a}fner, Stefan and K{\"o}nke, Carsten}, title = {MULTIGRID PRECONDITIONED CONJUGATE GRADIENT METHOD IN THE MECHANICAL ANALYSIS OF HETEROGENEOUS SOLIDS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2962}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29626}, pages = {29}, abstract = {A fast solver method called the multigrid preconditioned conjugate gradient method is proposed for the mechanical analysis of heterogeneous materials on the mesoscale. Even small samples of a heterogeneous material such as concrete show a complex geometry of different phases. These materials can be modelled by projection onto a uniform, orthogonal grid of elements. As one major problem the possible resolution of the concrete specimen is generally restricted due to (a) computation times and even more critical (b) memory demand. Iterative solvers can be based on a local element-based formulation while orthogonal grids consist of geometrical identical elements. The element-based formulation is short and transparent, and therefore efficient in implementation. A variation of the material properties in elements or integration points is possible. The multigrid method is a fast iterative solver method, where ideally the computational effort only increases linear with problem size. This is an optimal property which is almost reached in the implementation presented here. In fact no other method is known which scales better than linear. Therefore the multigrid method gains in importance the larger the problem becomes. But for heterogeneous models with very large ratios of Young's moduli the multigrid method considerably slows down by a constant factor. Such large ratios occur in certain heterogeneous solids, as well as in the damage analysis of solids. As solution to this problem the multigrid preconditioned conjugate gradient method is proposed. A benchmark highlights the multigrid preconditioned conjugate gradient method as the method of choice for very large ratio's of Young's modulus. A proposed modified multigrid cycle shows good results, in the application as stand-alone solver or as preconditioner.}, subject = {Architektur }, language = {en} } @inproceedings{Jahnke, author = {Jahnke, Georg}, title = {HISTORISCHE BAUSUBSTANZ IN MECKLENBURG}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2971}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29719}, pages = {11}, abstract = {Mit diesen Ausf{\"u}hrungen wird ein Beitrag zum weiteren Erhalt der historischen Bausubstanz in Mecklenburg aus der Sicht der Tragwerksanalyse geleistet. Dabei best{\"a}tigt es sich immer mehr, dass mit dem Modell der Geometrie, der Belastung und des Materials gleichberechtigte Modelle f{\"u}r eine wirklichkeitsnahe Einsch{\"a}tzung des Tragverhaltens eines Tragwerks vorliegen m{\"u}ssen. Es zeigt sich, dass dabei die besten Berechnungsprogramme nur die Ergebnisse liefern k{\"o}nnen, die mit den Eingabedaten zu erzielen sind. So hat sich der Forschungsschwerpunkt im Lehrgebiet Tragwerkslehre des FB Architektur an der Hochschule Wismar in den letzten Jahren auf die realistische Abbildung der Wechselwirkung zwischen der Bauaufnahme und der geometrischen Modellierung konzentriert. In diesem Bereich zeigen sich als Schwerpunkte die Wechselwirkung zwischen Sch{\"a}den und Tragwerksanalyse und die Wechselwirkung zwischen der aufgenommenen Geometrie und dem geometrischen Modell f{\"u}r die Tragwerksanalyse. Die F{\"u}lle der aufgenommenen Daten sind dabei in der Regel mehr hinderlich als ein Segen f{\"u}r die Tragwerksanalyse. Hier wurde gezeigt, welche und wie viele geometrische Daten f{\"u}r das geometrische Modell f{\"u}r die Tragwerksanalyse sinnvoll sind. Da die eigene Datenaufnahme relativ viel Zeit beansprucht, wurde eine "geistige" Bauaufnahme durchgef{\"u}hrt. Dazu wird der historische Planungsprozess in den einzelnen Formfindungsschritten nachvollzogen und in die virtuelle Realit{\"a}t {\"u}berf{\"u}hrt. Mit dieser Methode ergeben sich unterschiedliche Bauzust{\"a}nde und es lassen sich auch m{\"o}gliche Bauphasen abbilden. Die Tragwerksanalyse dieser virtuellen Realit{\"a}t zeigt dann m{\"o}gliche Schw{\"a}chen der Tragwerke und/oder die Notwendigkeit konstruktiver Ver{\"a}nderungen. Ein Vergleich der Ergebnisse der Tragwerksanalyse mit der Realit{\"a}t anhand des vorliegenden Datenbestands liefert die Grundlage f{\"u}r den aktuellen Handlungsbedarf. Da der Bauzustand eines Bauwerkes unter einer zeitlichen Ver{\"a}nderung steht, werden Methoden {\"u}berpr{\"u}ft, die es erm{\"o}glichen, einen einmal vorgelegten Datenbestand aufzubereiten und weiter zu verwalten.}, subject = {Architektur }, language = {de} } @inproceedings{Kasparek, author = {Kasparek, Eva}, title = {CONVERGENCE OF A NEW CONSISTENT FOLDED PLATE THEORY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2972}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29724}, pages = {10}, abstract = {The design of challenging space structures frequently relies on the theory of folded plates. The models are composed of plane facets of which the bending and membrane stiffness are coupled along the folds. In conventional finite element analysis of faceted structures the continuity of the displacement field is enforced exclusively at the nodes. Since approximate solutions for transverse and for in-plane displacements are not members of the same function space, separation occurs in between the common nodes of adjacent elements. It is shown that the kinematic assumptions of Bernoulli are accounted for this incompatibility along the edges in facet models. A general answer to this problem involves substantial modification of plate and membrane theory, but a straight forward formulation can be derived for simply folded plates, structures, whose folds do not intersect. A broad class of faceted structures, including models of various curved shells, belong to this category and can be calculated consistently. The additional requirements to assure continuity concern the mapping of displacement derivatives on the edges. An appropriate finite facet element provides node and edge-oriented degrees of freedom, whose transformation to system degrees of freedom, depends on the geometric configuration at each node. The concept is implemented using conform triangular elements. To evaluate the new approach, the energy norm of representative structures for refined meshes is calculated. The focus is placed on the mathematical convergence towards reliable solutions obtained from finite volume models.}, subject = {Architektur }, language = {en} } @inproceedings{KatzenbachGiere2003, author = {Katzenbach, Rolf and Giere, Johannes}, title = {Abstraktion von Prozessmustern im geotechnischen Bauplanungsprozess}, doi = {10.25643/bauhaus-universitaet.319}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-3195}, year = {2003}, abstract = {Der Beitrag basiert auf den Ans{\"a}tzen und Ergebnissen des Forschungsprojekts >Prozessorientierte Vernetzung von Ingenieurplanungen am Beispiel der Geotechnik<, das im Rahmen des Schwerpunktprogramms 1103 >Vernetzt-kooperative Planungsprozesse im Konstruktiven Ingenieurbau< von der DFG gef{\"o}rdert wird. Ziel des gemeinsam mit dem Institut f{\"u}r Numerische Methoden und Informatik im Bauwesen an der TU Darmstadt durchgef{\"u}hrten Forschungsprojekts ist die Entwicklung einer netzwerkbasierten Kooperationsplattform zur Unterst{\"u}tzung von geotechnischen Ingenieurplanungen. Daher konzentriert sich das Forschungsprojekt auf die Abbildung und Koordination der Planungsprozesse f{\"u}r Projekte des Konstruktiven Ingenieurbaus vor dem Hintergrund der stark arbeitsteiligen Projektbearbeitung in einer verteilten Rechnerumgebung. Der Beitrag stellt die Abstraktion von Prozessmustern im Bauplanungsprozess als Basis f{\"u}r die dynamische Prozessmodellierung in einem Kooperationsmodell dar. Ziel ist es, durch die Identifikation der mit dem Entwurf und der Dimensionierung eines Bauteils verbundenen Planungs- und Abstimmungsprozesse einen bauteilbezogenen Katalog von Prozessmustern zu abstrahieren. Die einzelnen Prozessmuster werden in jedem Bauplanungsprozess dynamisch {\"u}ber geeignete Kopplungsmechanismen in das aktuelle Prozessmodell integriert, so dass die f{\"u}r den Bauplanungsprozess typischen Ver{\"a}nderungen der Konstruktion und der Zusammensetzung des Planungsteams im Prozessmodell ber{\"u}cksichtigt werden k{\"o}nnen. Dazu werden im Beitrag die bisherigen Ergebnisse der Analyse des Planungsprozesses eines großen innerst{\"a}dtischen Bauvorhabens, das als Referenzobjekt dient, sowie typischer Planungsszenarien in der Geotechnik vorgestellt. Anschließend werden Grundlagen und methodische Ans{\"a}tze zur Modellierung von Prozessen mit der Methode der farbigen Petri-Netze mit individuellen Marken vorgestellt. Anhand von Beispielen f{\"u}r bauteilorientierte Prozessmuster wird die Funktionalit{\"a}t der Prozessmuster in sich und im gegenseitigen Zusammenspiel erl{\"a}utert}, subject = {Geotechnik}, language = {de} } @inproceedings{KimuraMakinoMaegaitoetal., author = {Kimura, Atsushi and Makino, Y. and Maegaito, Kentaro and Suzuki, Osamu}, title = {ITERATION DYNAMICAL SYSTEMS OF DISCRETE LAPLACIANS ON THE PLANE LATTICE (II) (THE VISUAL IMPRESSIONS GIVEN BY DESIGN-PATTERNS)}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2973}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29731}, pages = {4}, abstract = {The present study was designed to investigate the underlying factors determining the visual impressions of design-patterns that have complex textures. Design-patterns produced by "the dynamical system defined by iterations of discrete Laplacians on the plane lattice" were adopted as stimuli because they were not only complex, but also defined mathematically. In the experiment, 21 graduate and undergraduate students sorted 102 design-patterns into several groups by visual impressions. Those 102 patterns were classified into 12 categories by the cluster analysis. The results showed that the regularity of pattern was a most efficient factor for determining visual impressions of design-pattern, and there were some correspondence between visual impressions and mathematical variables of design-pattern. Especially, the visual impressions were influenced greatly by the neighborhood, and less influenced by steps of iterations.}, subject = {Architektur }, language = {en} } @inproceedings{Kisil, author = {Kisil, Vladimir}, title = {FILLMORE-SPRINGER-CNOPS CONSTRUCTION IMPLEMENTED IN GINAC}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2974}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29744}, pages = {103}, abstract = {This is an implementation of the Fillmore-Springer-Cnops construction (FSCc) based on the Clifford algebra capacities of the GiNaC computer algebra system. FSCc linearises the linear-fraction action of the Mobius group. This turns to be very useful in several theoretical and applied fields including engineering. The core of this realisation of FSCc is done for an arbitrary dimension, while a subclass for two dimensional cycles add some 2D-specific routines including a visualisation to PostScript files through the MetaPost or Asymptote software. This library is a backbone of many result published in, which serve as illustrations of its usage. It can be ported (with various level of required changes) to other CAS with Clifford algebras capabilities.}, subject = {Architektur }, language = {en} } @inproceedings{Klauer, author = {Klauer, Thomas}, title = {MOBILE FACILITY MANAGEMENT ZUR INSPEKTION UND INSTANDHALTUNG VON INGENIEURBAUWERKEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2975}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29759}, pages = {10}, abstract = {In diesem Beitrag wird eine mobile Software-Komponente zur Vor-Ort-Unterst{\"u}tzung von Bauwerkspr{\"u}fungen gem{\"a}ß DIN 1076 „Ingenieurbauwerke im Zuge von Strassen und Wegen, {\"U}berwachung und Pr{\"u}fung" vorgestellt, welche sich im praktischen Einsatz bei der Hochbahn AG Hamburg befindet. Mit Hilfe dieses Werkzeugs kann die Aktivit{\"a}t am Bauwerk in den gesamten softwaregest{\"u}tzten Gesch{\"a}ftsprozess der auwerksinstandhaltung integriert und somit die Bearbeitungszeit einer Bauwerkspr{\"u}fung von der Vorbereitung bis zur Pr{\"u}fbericht-Erstellung reduziert werden. Die Technologie des Mobile Computing wird unter Ber{\"u}cksichtigung spezieller fachlicher Randbedingungen, wie z.B. dem Einsatzort unter freiem Himmel, erl{\"a}utert und es werden Methoden zur effizienten Datenerfassung mit Stift und Sprache vorgestellt und bewertet. Ferner wird die Einschr{\"a}nkung der Hardware durch die geringere Gr{\"o}ße der Endger{\"a}te, die sich durch die Bedingung der Mobilit{\"a}t ergibt, untersucht.}, subject = {Architektur }, language = {de} } @inproceedings{KlawitterOstrowski, author = {Klawitter, Arne and Ostrowski, M.}, title = {INTEGRATED RAINFALL RUNOFF MODELLING IN SMALL URBANIZED CATCHMENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2976}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29767}, pages = {12}, abstract = {A concept for integrated modeling of urban and rural hydrology is introduced. The concept allows for simulations on the catchment scale as well as on the local scale. It is based on a 2-layer-approach which facilitates the parallel coupling of a catchment hydrology model with an urban hydrology model, considering the interactions between the two systems. The concept has been implemented in a computer model combining a grid based distributed hydrological catchment model and a hydrological urban stormwater model based on elementary units. The combined model provides a flexible solution for time and spatial scale integration and offers to calculate separate water balances for urban and rural hydrology. Furthermore, it is GIS-based which allows for easy and accurate geo-referencing of urban overflow structures, which are considered as points of interactions between the two hydrologic systems. Due to the two-layer-approach, programs of measures can be incorporated in each system separately. The capabilities of the combined model have been tested on a hypothetical test case and a real world application. It could be shown that the model is capable of accurately quantifying the effects of urbanization in a catchment. The affects of urbanization can be analyzed at the catchment outlet, but can also be traced back to its origins, due to the geo-referencing of urban overflow structures. This is a mayor advantage over conventional hydrological catchment models for the analysis of land use changes.}, subject = {Architektur }, language = {en} } @inproceedings{Klingert, author = {Klingert, Maik}, title = {THE USAGE OF IMAGE PROCESSING METHODS FOR INTERPRETATION OF THERMOGRAPHY DATA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2977}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29777}, pages = {13}, abstract = {For assessment of old buildings, thermal graphic analysis aided with infra-red camera have been employed in a wide range nowadays. Image processing and evaluation can be economically practicable only if the image evaluation can also be automated to the largest extend. For that reason methods of computer vision are presented in this paper to evaluate thermal images. To detect typical thermal image elements, such as thermal bridges and lintels in thermal images respectively gray value images, methods of digital image processing have been applied, of which numerical procedures are available to transform, modify and encode images. At the same time, image processing can be regarded as a multi-stage process. In order to be able to accomplish the process of image analysis from image formation through perfecting and segmentation to categorization, appropriate functions must be implemented. For this purpose, different measuring procedures and methods for automated detection and evaluation have been tested.}, subject = {Architektur }, language = {en} } @inproceedings{KnauerDammeierMeffert, author = {Knauer, Uwe and Dammeier, T. and Meffert, Beate}, title = {THE STRUCTURE OF ROAD TRAFFIC SCENES AS REVEALED BY UNSUPERVISED ANALYSIS OF THE TIME AVERAGED OPTICAL FLOW}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2978}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29780}, pages = {9}, abstract = {The Lucas-Kanade tracker has proven to be an efficient and accurate method for calculation of the optical flow. However, this algorithm can reliably track only suitable image features like corners and edges. Therefore, the optical flow can only be calculated for a few points in each image, resulting in sparse optical flow fields. Accumulation of these vectors over time is a suitable method to retrieve a dense motion vector field. However, the accumulation process limits application of the proposed method to fixed camera setups. Here, a histogram based approach is favored to allow more than a single typical flow vector per pixel. The resulting vector field can be used to detect roads and prescribed driving directions which constrain object movements. The motion structure can be modeled as a graph. The nodes represent entry and exit points for road users as well as crossings, while the edges represent typical paths.}, subject = {Architektur }, language = {en} } @inproceedings{Knyziak, author = {Knyziak, Piotr}, title = {ANALYSIS THE TECHNICAL STATE FOR LARGE-PANEL RESIDENTIAL BUILDINGS BEHIND ASSISTANCE OF ARTIFICIAL NEURAL NETWORKS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2979}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29792}, pages = {9}, abstract = {This paper presents two new methods for analysis of a technical state of large-panel residential buildings. The first method is based on elements extracted from the classical methods and on data about repairs and modernization collected from building documentations. The technical state of a building is calculated as a sum of several groups of elements defining the technical state. The deterioration in this method depends on: - time, which has passed since last repair of element or time which has passed since construction, - estimate of the state of element groups which can be determined on basis of yearly controls. This is a new unique method. it is easy to use, does not need expertise. The required data could be extracted easily from building documentations. For better accuracy the data from building inspections should be applied (in Poland inspections are made every year). The second method is based on the extracted data processing by means of the artificial neural networks. The aim is to learn the artificial neural network configurations for a set of data containing values of the technical state and information about building repairs for last years (or other information and building parameters) and next to analyse new buildings by the instructed neural network. The second profit from using artificial neural networks is the reduction of number of parameters. Instead of more then 40 parameters describing building, about 6-12 are usually sufficient for satisfactory accuracy. This method could have lower accuracy but it is less prone to data errors.}, subject = {Architektur }, language = {en} } @inproceedings{KolbeRanglackSteinmann1997, author = {Kolbe, P. and Ranglack, D. and Steinmann, Frank}, title = {Eine Schnittstelle f{\"u}r dynamische Objektstrukturen f{\"u}r Entwurfsanwendungen}, doi = {10.25643/bauhaus-universitaet.455}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4555}, year = {1997}, abstract = {Trotz der langj{\"a}hrigen Erfahrung bei der Anwendung objektorientierter Konzepte bei der Modellierung von Geb{\"a}uden ist es bisher nicht gelungen, ein allgemein anerkanntes Geb{\"a}udemodell im Rechner abzubilden. Das mag zum einen daran liegen, daß die Standardisierung eines solchen Modells bis heute zu keinem Abschluß gekommen ist. Zum anderen aber scheint vor allem die Problematik der Abbildung eines solchen Geb{\"a}udemodells in das Objektmodell einer Programmiersprache bisher untersch{\"a}tzt worden zu sein. Die erhoffte Durchg{\"a}ngigkeit von objektorientierter Analyse, Entwurf und Programmierung gelingt bei Anwendungen f{\"u}r Entwurfsaufgaben nicht. Das gilt vor allem f{\"u}r Anwendungen, die fr{\"u}he Entwurfsphasen unterst{\"u}tzen und damit erst zur Definition eines Geb{\"a}udemodells beitragen. Im Bereich der Softwareentwicklung wird das Konzept des Objektes als Ordnungsbegriff zur Strukturierung von Softwaremodulen benutzt. Die {\"U}bertragung dieser Ordnung in die Welt des Ingenieurs oder Architekten zur Bezeichnung eines konkreten Elements aus einem Modell (Raum 42 aus der Menge aller R{\"a}ume des Geb{\"a}udes Blumenstraße 7) kann daher nur zu begrenzten Erfolgen f{\"u}hren. Aus der Analyse der Widerspr{\"u}chlichkeit des Objektbegriffs zwischen Softwareentwickler (Programmierer) und Softwareanwender (Ingenieur, Architekt) wird im folgenden ein Laufzeitsystem f{\"u}r dynamische Objektstrukturen entwickelt, das es dem Softwareentwickler erlaubt, sowohl auf die Struktur als auch auf die Auspr{\"a}gung eines Modells zuzugreifen. Dem Softwareanwender k{\"o}nnen damit Werkzeuge zur Verf{\"u}gung gestellt werden, die es ihm gestatten, Geb{\"a}udemodelle zu definieren. Das Laufzeitsystem enth{\"a}lt zum einen eine Reihe von Klassen, die es erm{\"o}glichen, die Struktur von Anwenderobjekten dynamisch zu beschreiben und zu analysieren. Eine zweite Art von Klassen erlaubt das Erzeugen und Ver{\"a}ndern von Anwenderobjekten, die diesen Strukturen entsprechen}, subject = {Bauwerk}, language = {de} } @inproceedings{Kovalev1997, author = {Kovalev, M.}, title = {CAD and Discrete Optimization}, doi = {10.25643/bauhaus-universitaet.421}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-4214}, year = {1997}, abstract = {Review of Discrete Optimization Techniques for CAD Discrete optimization in the structure design Morphological method The alternative graph approach Convex discrete optimization without objective function Matroidal Decomposition in design Decomposition of layered matrices Discrete Optimization in Designing Packing problem Optimal arrangement of rectangles and shortest paths in L1-metrics Partition problems Discrete optimization in computational geometry and computer graphics Maxima of a point set on the plane Triangulation One of the main problems in computer graphics is removing hidden lines and surfaces}, subject = {CAD}, language = {en} } @inproceedings{KrafczykToelkeNachtwey2003, author = {Krafczyk, Manfred and Toelke, Jonas and Nachtwey, Bj{\"o}rn}, title = {Computational Steering in Civil Engineering}, doi = {10.25643/bauhaus-universitaet.11}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-112}, year = {2003}, abstract = {Computational Steering provides methods for the integration of modeling, simulation, visualization, data analysis and post processing. The user has full control over a running simulation and the possibility to modify objects (geometry and other properties), boundary conditions and other parameters of the system interactively. The objective of such a system is to explore the effects of changes made immediately and thus to optimize the target problem interactively. We present a computational steering based system for fluid flow problems in civil engineering. It is based on three software components as shown in figure 1. The modeler is the CAD-system AutoCAD, which offers a powerful programming interface allowing an efficient access to the geometric data. It also offers convenient manipulators for geometric objects. The simulation kernel is a Lattice-Boltzmann (LB) solver for the Navier-Stokes equations, which is especially suitable for instationary flows in complex geometries. For the visualization and postprocessing we use the software tool AVS, which provides a powerful programming interface and allows the efficient visualization of flow fields. These three components are interconnected through two communication modules and three interfaces as depicted in figure 1. Interface 1 is responsible for the transformation of the modified system for the simulation kernel, interface 2 is responsible for the proper preparation of the simulation data whereas interface 3 transforms the data from the modeler into a format suitable for the visualization system. The whole system is synchronized by the two communication modules.}, subject = {Bauwesen}, language = {de} } @inproceedings{Krasnov, author = {Krasnov, Yakov}, title = {ANALYTIC FUNCTIONS IN OPERATOR VARIABLES AS SOLUTION TO PDES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2982}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29822}, pages = {16}, abstract = {Procedures of a construction of general solutions for some classes of partial differential equations (PDEs) are proposed and a symmetry operators approach to the raising the orders of the polynomial solutions to linear PDEs are develops. We touch upon an ''operator analytic function theory'' as the solution of a frequent classes of the equations of mathematical physics, when its symmetry operators forms vast enough space. The MAPLE© package programs for the building the operator variables is elaborated also.}, subject = {Architektur }, language = {en} } @inproceedings{KraussharConstalesGuerlebecketal., author = {Kraußhar, Rolf S{\"o}ren and Constales, Denis and G{\"u}rlebeck, Klaus and Spr{\"o}ßig, Wolfgang}, title = {APPLICATIONS OF QUATERNIONIC ANALYSIS IN ENGINEERING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2912}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29128}, pages = {8}, abstract = {The quaternionic operator calculus can be applied very elegantly to solve many important boundary value problems arising in fluid dynamics and electrodynamics in an analytic way. In order to set up fully explicit solutions. In order to apply the quaternionic operator calculus to solve these types of boundary value problems fully explicitly, one has to evaluate two types of integral operators: the Teodorescu operator and the quaternionic Bergman projector. While the integral kernel of the Teodorescu transform is universal for all domains, the kernel function of the Bergman projector, called the Bergman kernel, depends on the geometry of the domain. Recently the theory of quaternionic holomorphic multiperiodic functions and automorphic forms provided new impulses to set up explicit representation formulas for large classes of hyperbolic polyhedron type domains. These include block shaped domains, wedge shaped domains (with or without additional rectangular restrictions) and circular symmetric finite and infinite cylinders as particular subcases. In this talk we want to give an overview over the recent developments in this direction.}, subject = {Architektur }, language = {en} } @inproceedings{Kravchenko, author = {Kravchenko, Vladislav}, title = {NEW APPLICATIONS OF PSEUDOANALYTIC FUNCTION THEORY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2983}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29835}, pages = {3}, abstract = {We show a close relation between the Schr{\"o}dinger equation and the conductivity equation to a Vekua equation of a special form. Under quite general conditions we propose an algorithm for explicit construction of pseudoanalytic positive formal powers for the Vekua equation that as a consequence gives us a complete system of solutions for the Schr{\"o}dinger and the conductivity equations. Besides the construction of complete systems of exact solutions for the above mentioned second order equations and the Dirac equation, we discuss some other applications of pseudoanalytic function theory.}, subject = {Architektur }, language = {en} } @inproceedings{Krzizek2000, author = {Krzizek, H.}, title = {Reduktion der Konstruktionsparameter von Schal- und Werkpl{\"a}nen des konstruktiven Ingenieurbaues mit rechteckigen Bauteilen durch Ber{\"u}hr- und B{\"u}ndigkeitsbedingungen}, doi = {10.25643/bauhaus-universitaet.601}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-6013}, year = {2000}, abstract = {F{\"u}r den Entwurf der i.a. aus langen schmalen Rechtecken bestehenden Schal- bzw. Werkpl{\"a}ne wird eine Entwurfsunterst{\"u}tzung vorgestellt, bei der die Gr{\"o}ße der Rechtecke wie immer festgelegt wird, die Lage der Rechtecke aber durch topologische Angaben. Letztere bilden programmtechnisch Bedingungen, wobei zwischen Ber{\"u}hr- und B{\"u}ndigkeitsbedingen unterschieden wird. Diese Angaben positionieren das neue Rechteck im Bezug zu einem bereits platzierten. Zum Beispiel erlaubt die Angabe, die S{\"a}ule ist oberhalb des Fundamentes und belastet dieses mittig, eine eindeutige Festlegung der Lage der S{\"a}ule bei gegebener Lage des Fundamentes und gegebenen Abmessungen beider Rechtecke. Die Formulierung mittels Bedingungen hat den Vorteil daß diese auch bei {\"A}nderung von Abmessungen g{\"u}ltig bleiben. Die hier vorgestellte Eingabeart der relativen Positionierung ist eine Erweiterung des Orthomodus, wie er bei Bau-CAD-Programmen stets gefunden wird.}, subject = {CAD}, language = {de} } @inproceedings{KoenigLang, author = {K{\"o}nig, Markus and Lang, H.}, title = {ANWENDUNG DES CASE-BASED REASONING BEI DER ERMITTLUNG VON VARIANTEN F{\"u}R DEN OBERBAU VON VERKEHRSFL{\"A}CHEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2980}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29808}, pages = {9}, abstract = {F{\"u}r die Ausf{\"u}hrung des Oberbaus von Verkehrsfl{\"a}chen existiert in Abh{\"a}ngigkeit von projektspezifischen Voraussetzungen eine Vielzahl von verschiedenen Varianten. Aufgrund von Erfahrungen der Projektplaner werden bei {\"a}hnlichen Voraussetzungen h{\"a}ufig gleichartige Ausf{\"u}hrungsvarianten gew{\"a}hlt. Um eine m{\"o}gliche L{\"o}sungsvariante f{\"u}r den Straßenoberbau zu erhalten, sollten daher nicht nur die gesetzlichen Richtlinien sondern auch bereits beendete Projekte ber{\"u}cksichtigt werden. Im Rahmen eines Wissenschaftlichen Kollegs an der Bauhaus-Universit{\"a}t Weimar wurde die Anwendung des Case-Based Reasoning f{\"u}r die Auswahl von Ausf{\"u}hrungsvarianten f{\"u}r den Straßenoberbau untersucht. In diesem Beitrag werden die grundlegenden Konzepte des Case-Based Reasoning und die Bestimmung von {\"a}hnlichen Varianten anhand einfacher Beispiele aus dem Straßenoberbau dargestellt.}, subject = {Architektur }, language = {de} } @inproceedings{KoenigTauscher, author = {K{\"o}nig, Markus and Tauscher, Eike}, title = {BERECHNUNG VON BAUABL{\"A}UFEN MIT VERSCHIEDENEN AUSF{\"U}HRUNGSVARIANTEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2981}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29816}, pages = {11}, abstract = {Prozesse im Bauingenieurwesen sind komplex und beinhalten eine große Anzahl verschiedener Aufgaben mit vielen logischen Abh{\"a}ngigkeiten. Basierend auf diesen projektspezifischen Abh{\"a}ngigkeiten wird gew{\"o}hnlich ein Bauablaufplan manuell erstellt. In der Regel existieren mehrere Varianten und somit alternative Bauabl{\"a}ufe um ein Projekt zu realisieren. Welche dieser Ausf{\"u}hrungsvarianten zur praktischen Anwendung kommt, wird durch den jeweiligen Projektmanager bestimmt. Falls {\"A};nderungen oder St{\"o}rungen w{\"a}hrend des Bauablaufs auftreten, m{\"u}ssen die davon betroffenen Aufgaben und Abl{\"a}ufe per Hand modifiziert und alternative Aufgaben sowie Abl{\"a}ufe stattdessen ausgef{\"u}hrt werden. Diese Vorgehensweise ist oft sehr aufw{\"a}ndig und teuer. Aktuelle Forschungsans{\"a}tze besch{\"a}ftigen sich mit der automatischen Generierung von Bauabl{\"a}ufen. Grundlage sind dabei Aufgaben mit ihren erforderlichen Voraussetzungen und erzeugten Ergebnissen. Im Rahmen dieses Beitrags wird eine Methodik vorgestellt, um Bauabl{\"a}ufe mit Ausf{\"u}hrungsvarianten in Form von Workflow-Netzen zu jeder Zeit berechnen zu k{\"o}nnen. Die vorgestellte Methode wird anhand eines Beispiels aus dem Straßenbau schematisch dargestellt.}, subject = {Architektur }, language = {de} } @article{Koenig2011, author = {K{\"o}nig, Reinhard}, title = {Generierung von Grundriss-Layouts mittels hybrider Evolutions-Strategie}, doi = {10.25643/bauhaus-universitaet.806}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20110414-15425}, year = {2011}, abstract = {Der vorliegende Text beschreibt ein computerbasiertes Verfahren zur L{\"o}sung von Layout-problemen in Architektur und St{\"a}dtebau, welches mit m{\"o}glichst wenig Problemwissen auskommt und schnell brauchbare Ergebnisse liefert, die durch schrittweises Hinzuf{\"u}gen von Problemwissen interaktiv weiter ausgearbeitet werden k{\"o}nnen. F{\"u}r das generative Verfahren wurde eine Evolutions-Strategie verwendet, die mit Mechanismen zur Kollisionserkennung und virtuellen Federn zu einem hybriden Algorithmus kombiniert wurde. Dieser dient erstens der L{\"o}sung des Problems der Dichten Packung von Rechtecken sowie zweitens der Herstellung bestimmter topologischer Beziehungen zwischen diesen Rechtecken. Die Bearbeitung beider Probleme wird durch schrittweise Erweiterung grundlegender Verfahren untersucht, wobei die einzelnen Schritte anhand von Performancetests miteinander verglichen werden. Am Ende wird ein iterativer Algorithmus vorgestellt, der einerseits optimale L{\"o}sungen garantiert und andererseits diese L{\"o}sungen in einer f{\"u}r eine akzeptable Nutzerinteraktion ausreichenden Geschwindigkeit generiert.}, subject = {CAD}, language = {de} } @inproceedings{Koenig, author = {K{\"o}nig, Reinhard}, title = {CPlan: An Open Source Library for Computational Analysis and Synthesis}, series = {33rd eCAADe Conference}, booktitle = {33rd eCAADe Conference}, editor = {Martens, Bob and Wurzer, G, Gabriel and Grasl, Tomas and Lorenz, Wolfgang and Schaffranek, Richard}, publisher = {Vienna University of Technology}, address = {Vienna}, doi = {10.25643/bauhaus-universitaet.2503}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160118-25037}, pages = {245 -- 250}, abstract = {Some caad packages offer additional support for the optimization of spatial configurations, but the possibilities for applying optimization are usually limited either by the complexity of the data model or by the constraints of the underlying caad system. Since we missed a system that allows to experiment with optimization techniques for the synthesis of spatial configurations, we developed a collection of methods over the past years. This collection is now combined in the presented open source library for computational planning synthesis, called CPlan. The aim of the library is to provide an easy to use programming framework with a flat learning curve for people with basic programming knowledge. It offers an extensible structure that allows to add new customized parts for various purposes. In this paper the existing functionality of the CPlan library is described.}, subject = {Architektur}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Computers in the design phase - Ten thesis on their uselessness}, series = {Der Generalist}, journal = {Der Generalist}, doi = {10.25643/bauhaus-universitaet.2607}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26075}, abstract = {At the end of the 1960s, architects at various universities world- wide began to explore the potential of computer technology for their profession. With the decline in prices for PCs in the 1990s and the development of various computer-aided architectural design systems (CAAD), the use of such systems in architectural and planning offices grew continuously. Because today no ar- chitectural office manages without a costly CAAD system and because intensive soſtware training has become an integral part of a university education, the question arises about what influence the various computer systems have had on the design process forming the core of architectural practice. The text at hand devel- ops ten theses about why there has been no success to this day in introducing computers such that new qualitative possibilities for design result. RESTRICTEDNESS}, subject = {CAD}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Die Stadt der Agenten und Automaten}, series = {FORUM - Architektur \& Bauforum}, journal = {FORUM - Architektur \& Bauforum}, doi = {10.25643/bauhaus-universitaet.2608}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26083}, abstract = {PLANUNGSUNTERST{\"U}TZUNG DURCH DIE ANALYSE R{\"A}UMLICHER PROZESSE MITTELS COMPUTERSIMULATIONEN. Erst wenn man - zumindest im Prinzip - versteht, wie eine Stadt mit ihren komplexen, verwobenen Vorg{\"a}ngen im Wesentlichen funktioniert, ist eine sinnvolle Stadtplanung m{\"o}glich. Denn jede Planung bedeutet einen Eingriff in den komplexen Organismus einer Stadt. Findet dieser Eingriff ohne Wissen {\"u}ber die Funktionsweise des Organismus statt, k{\"o}nnen auch die Auswirkungen nicht abgesch{\"a}tzt werden. Dieser Beitrag stellt dar, wie urbane Prozesse mittels Computersimulationen unter Zuhilfenahme so genannter Multi-Agenten-Systeme und Zellul{\"a}rer Automaten verstanden werden k{\"o}nnen. von}, subject = {CAD}, language = {de} } @article{Koenig2010, author = {K{\"o}nig, Reinhard}, title = {Generative Planungsmethoden aus strukturalistischer Perspektive}, doi = {10.25643/bauhaus-universitaet.811}, url = {http://nbn-resolving.de/urn:nbn:de:101:1-201105114205}, year = {2010}, abstract = {N{\"a}hert man sich der Frage nach den Zusammenh{\"a}ngen zwischen Strukturalismus und generativen algorithmischen Planungsmethoden, so ist zun{\"a}chst zu kl{\"a}ren, was man unter Strukturalismus in der Architektur versteht. Allerdings gibt es letztlich keinen verbindlichen terminologischen Rahmen, innerhalb dessen sich eine solche Kl{\"a}rung vollziehen k{\"o}nnte. Strukturalismus in der Architektur wird oftmals auf ein formales Ph{\"a}nomen und damit auf eine Stilfrage reduziert. Der vorliegende Text will sich nicht mit Stilen und Ph{\"a}nomenen strukturalistischer Architektur auseinandersetzen, sondern konzentriert sich auf die Betrachtung strukturalistischer Entwurfsmethoden und stellt Bez{\"u}ge her zu algorithmischen Verfahren, wobei das Zusammenspiel zwischen regelgeleitetem und intuitivem Vorgehen beim Entwerfen herausgearbeitet wird.}, subject = {Strukturalismus}, language = {de} } @article{Koenig2011, author = {K{\"o}nig, Reinhard}, title = {Generating urban structures: A method for urban planning supported by multi-agent systems and cellular automata}, doi = {10.25643/bauhaus-universitaet.1448}, url = {http://nbn-resolving.de/urn:nbn:de:101:1-201105183228}, year = {2011}, abstract = {This work is based on the concept that the structure of a city can be defined by six basic urban patterns. To enable more complex urban planning as a long-term objective I have developed a simulation method for generating these basic patterns and for combining them to form various structures. The generative process starts with the two-dimensional organisation of streets followed by the parceling of the remaining areas. An agent-based diffusion-contact model is the basis of these first two steps. Then, with the help of cellular automata, the sites for building on are defined and a three-dimensional building structure is derived. I illustrate the proposed method by showing how it can be applied to generate possible structures for an urban area in the city of Munich.}, subject = {CAD}, language = {en} } @inproceedings{KoenigSchmitt, author = {K{\"o}nig, Reinhard and Schmitt, Gerhard}, title = {Backcasting and a new way of command in computational design : Proceedings}, series = {CAADence in Architecture Conference}, booktitle = {CAADence in Architecture Conference}, editor = {Szoboszlai, Mih{\´a}ly}, address = {Budapest}, doi = {10.25643/bauhaus-universitaet.2599}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-25996}, pages = {15 -- 25}, abstract = {It's not uncommon that analysis and simulation methods are used mainly to evaluate finished designs and to proof their quality. Whereas the potential of such methods is to lead or control a design process from the beginning on. Therefore, we introduce a design method that move away from a "what-if" forecasting philosophy and increase the focus on backcasting approaches. We use the power of computation by combining sophisticated methods to generate design with analysis methods to close the gap between analysis and synthesis of designs. For the development of a future-oriented computational design support we need to be aware of the human designer's role. A productive combination of the excellence of human cognition with the power of modern computing technology is needed. We call this approach "cognitive design computing". The computational part aim to mimic the way a designer's brain works by combining state-of-the-art optimization and machine learning approaches with available simulation methods. The cognition part respects the complex nature of design problems by the provision of models for human-computation interaction. This means that a design problem is distributed between computer and designer. In the context of the conference slogan "back to command", we ask how we may imagine the command over a cognitive design computing system. We expect that designers will need to let go control of some parts of the design process to machines, but in exchange they will get a new powerful command on complex computing processes. This means that designers have to explore the potentials of their role as commanders of partially automated design processes. In this contribution we describe an approach for the development of a future cognitive design computing system with the focus on urban design issues. The aim of this system is to enable an urban planner to treat a planning problem as a backcasting problem by defining what performance a design solution should achieve and to automatically query or generate a set of best possible solutions. This kind of computational planning process offers proof that the designer meets the original explicitly defined design requirements. A key way in which digital tools can support designers is by generating design proposals. Evolutionary multi-criteria optimization methods allow us to explore a multi-dimensional design space and provide a basis for the designer to evaluate contradicting requirements: a task urban planners are faced with frequently. We also reflect why designers will give more and more control to machines. Therefore, we investigate first approaches learn how designers use computational design support systems in combination with manual design strategies to deal with urban design problems by employing machine learning methods. By observing how designers work, it is possible to derive more complex artificial solution strategies that can help computers make better suggestions in the future.}, subject = {CAD}, language = {en} } @article{KoenigThurowBraunesetal.2010, author = {K{\"o}nig, Reinhard and Thurow, Torsten and Braunes, J{\"o}rg and Donath, Dirk and Schneider, Sven}, title = {Technische Einf{\"u}hrung in FREAC: A Framework for Enhancing Research in Architectural Design and Communication}, doi = {10.25643/bauhaus-universitaet.1449}, url = {http://nbn-resolving.de/urn:nbn:de:101:1-201105183216}, year = {2010}, abstract = {Im vorliegenden Beitrag wird ein Framework f{\"u}r ein verteiltes dynamisches Produktmodell (FREAC) vorgestellt, welches der experimentellen Softwareentwicklung dient. Bei der Entwicklung von FREAC wurde versucht, folgende Eigenschaften umzusetzen, die bei herk{\"o}mmlichen Systemen weitgehend fehlen: Erstens eine hohe Flexibilit{\"a}t, also eine m{\"o}glichst hohe Anpassbarkeit f{\"u}r unterschiedliche Fachdisziplinen; Zweitens die M{\"o}glichkeit, verschiedene Tools nahtlos miteinander zu verkn{\"u}pfen; Drittens die verteilte Modellbearbeitung in Echtzeit; Viertens das Abspeichern des gesamten Modell-Bearbeitungsprozesses; F{\"u}nftens eine dynamische Erweiterbarkeit sowohl f{\"u}r Softwareentwickler, als auch f{\"u}r die Nutzer der Tools. Die Bezeichnung FREAC umfasst sowohl das Framework zur Entwicklung und Pflege eines Produktmodells (FREAC-Development) als auch die entwickelten Tools selbst (FREAC-Tools).}, subject = {CAD}, language = {de} } @inproceedings{Lee2004, author = {Lee, Yungui}, title = {Development and application of CAD software in the field of building structures in China}, doi = {10.25643/bauhaus-universitaet.89}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-898}, year = {2004}, abstract = {PKPM series CAD software is an integrated CAD system for building design, which integrated the following parts: architectural design, structural design, building service design and statistic analysis of quantity and budget. These four parts share the same database with high efficiency. Over 80\% of design corporation in China are using PKPM series CAD software. The detailed information and some key modules of PKPM series CAD software are mainly introduced in this paper.}, subject = {Bautechnik}, language = {en} } @inproceedings{LehnerHartmann, author = {Lehner, Karlheinz and Hartmann, Dietrich}, title = {USING INTERVAL ANALYSIS FOR STRUCTURAL ENGINEERING PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2984}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29844}, pages = {10}, abstract = {Interval analysis extends the concept of computing with real numbers to computing with real intervals. As a consequence, some interesting properties appear, such as the delivery of guaranteed results or confirmed global values. The former property is given in the sense that unknown numerical values are in known to lie in a computed interval. The latter property states that the global minimum value, for example, of a given function is also known to be contained in a interval (or a finite set of intervals). Depending upon the amount computation effort invested in the calculation, we can often find tight bounds on these enclosing intervals. The downside of interval analysis, however, is the mathematically correct, but often very pessimistic size of the interval result. This is in particularly due to the so-called dependency effect, where a single variable is used multiple times in one calculation. Applying interval analysis to structural analysis problems, the dependency has a great influence on the quality of numerical results. In this paper, a brief background of interval analysis is presented and shown how it can be applied to the solution of structural analysis problems. A discussion of possible improvements as well as an outlook to parallel computing is also given.}, subject = {Architektur }, language = {en} } @inproceedings{LourensvanRooyen, author = {Lourens, Eliz-Mari and van Rooyen, G.C.}, title = {Automating Preliminary Column Force Calculations In Multy-Storey Buildings}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2986}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29864}, pages = {10}, abstract = {In civil engineering practice, values of column forces are often required before any detailed analysis of the structure has been performed. One of the reasons for this arises from the fast-tracked nature of the majority of construction projects: foundations are laid and base columns constructed whilst analysis and design are still in progress. A need for quick results when feasibility studies are performed or when evaluating the effect of design changes on supporting columns form other situations in which column forces are required, but where a detailed analysis to get these forces seems superfluous. Thus it was concluded that the development of an efficient tool for column force calculations, in which the extensive input required in a finite element analysis is to be avoided, would be highly beneficial. The automation of the process is achieved by making use of a Voronoi diagram. The Voronoi diagram is used a) for subdividing the floor into influence areas and b) as a basis for automatic load assignment. The implemented procedure is integrated into a CAD system in which the relevant geometric information of the floor, i.e. its shape and column layout, can be defined or uploaded. A brief description of the implementation is included. Some comparative results and considerations regarding the continuation of the study are given.}, subject = {Architektur }, language = {en} } @inproceedings{LutherKoenke, author = {Luther, Torsten and K{\"o}nke, Carsten}, title = {INVESTIGATION OF CRACK GROWTH IN POLYCRYSTALLINE MESOSTRUCTURES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2988}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29886}, pages = {11}, abstract = {The design and application of high performance materials demands extensive knowledge of the materials damage behavior, which significantly depends on the meso- and microstructural complexity. Numerical simulations of crack growth on multiple length scales are promising tools to understand the damage phenomena in complex materials. In polycrystalline materials it has been observed that the grain boundary decohesion is one important mechanism that leads to micro crack initiation. Following this observation the paper presents a polycrystal mesoscale model consisting of grains with orthotropic material behavior and cohesive interfaces along grain boundaries, which is able to reproduce the crack initiation and propagation along grain boundaries in polycrystalline materials. With respect to the importance of modeling the geometry of the grain structure an advanced Voronoi algorithm is proposed to generate realistic polycrystalline material structures based on measured grain size distribution. The polycrystal model is applied to investigate the crack initiation and propagation in statically loaded representative volume elements of aluminum on the mesoscale without the necessity of initial damage definition. Future research work is planned to include the mesoscale model into a multiscale model for the damage analysis in polycrystalline materials.}, subject = {Architektur }, language = {en} } @phdthesis{LopezZermeno, author = {L{\´o}pez Zerme{\~n}o, Jorge Alberto}, title = {Isogeometric and CAD-based methods for shape and topology optimization: Sensitivity analysis, B{\´e}zier elements and phase-field approaches}, doi = {10.25643/bauhaus-universitaet.4710}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220831-47102}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {The Finite Element Method (FEM) is widely used in engineering for solving Partial Differential Equations (PDEs) over complex geometries. To this end, it is required to provide the FEM software with a geometric model that is typically constructed in a Computer-Aided Design (CAD) software. However, FEM and CAD use different approaches for the mathematical description of the geometry. Thus, it is required to generate a mesh, which is suitable for FEM, based on the CAD model. Nonetheless, this procedure is not a trivial task and it can be time consuming. This issue becomes more significant for solving shape and topology optimization problems, which consist in evolving the geometry iteratively. Therefore, the computational cost associated to the mesh generation process is increased exponentially for this type of applications. The main goal of this work is to investigate the integration of CAD and CAE in shape and topology optimization. To this end, numerical tools that close the gap between design and analysis are presented. The specific objectives of this work are listed below: • Automatize the sensitivity analysis in an isogeometric framework for applications in shape optimization. Applications for linear elasticity are considered. • A methodology is developed for providing a direct link between the CAD model and the analysis mesh. In consequence, the sensitivity analysis can be performed in terms of the design variables located in the design model. • The last objective is to develop an isogeometric method for shape and topological optimization. This method should take advantage of using Non-Uniform Rational B-Splines (NURBS) with higher continuity as basis functions. Isogeometric Analysis (IGA) is a framework designed to integrate the design and analysis in engineering problems. The fundamental idea of IGA is to use the same basis functions for modeling the geometry, usually NURBS, for the approximation of the solution fields. The advantage of integrating design and analysis is two-fold. First, the analysis stage is more accurate since the system of PDEs is not solved using an approximated geometry, but the exact CAD model. Moreover, providing a direct link between the design and analysis discretizations makes possible the implementation of efficient sensitivity analysis methods. Second, the computational time is significantly reduced because the mesh generation process can be avoided. Sensitivity analysis is essential for solving optimization problems when gradient-based optimization algorithms are employed. Automatic differentiation can compute exact gradients, automatically by tracking the algebraic operations performed on the design variables. For the automation of the sensitivity analysis, an isogeometric framework is used. Here, the analysis mesh is obtained after carrying out successive refinements, while retaining the coarse geometry for the domain design. An automatic differentiation (AD) toolbox is used to perform the sensitivity analysis. The AD toolbox takes the code for computing the objective and constraint functions as input. Then, using a source code transformation approach, it outputs a code for computing the objective and constraint functions, and their sensitivities as well. The sensitivities obtained from the sensitivity propagation method are compared with analytical sensitivities, which are computed using a full isogeometric approach. The computational efficiency of AD is comparable to that of analytical sensitivities. However, the memory requirements are larger for AD. Therefore, AD is preferable if the memory requirements are satisfied. Automatic sensitivity analysis demonstrates its practicality since it simplifies the work of engineers and designers. Complex geometries with sharp edges and/or holes cannot easily be described with NURBS. One solution is the use of unstructured meshes. Simplex-elements (triangles and tetrahedra for two and three dimensions respectively) are particularly useful since they can automatically parameterize a wide variety of domains. In this regard, unstructured B{\´e}zier elements, commonly used in CAD, can be employed for the exact modelling of CAD boundary representations. In two dimensions, the domain enclosed by NURBS curves is parameterized with B{\´e}zier triangles. To describe exactly the boundary of a two-dimensional CAD model, the continuity of a NURBS boundary representation is reduced to C^0. Then, the control points are used to generate a triangulation such that the boundary of the domain is identical to the initial CAD boundary representation. Thus, a direct link between the design and analysis discretizations is provided and the sensitivities can be propagated to the design domain. In three dimensions, the initial CAD boundary representation is given as a collection of NURBS surfaces that enclose a volume. Using a mesh generator (Gmsh), a tetrahedral mesh is obtained. The original surface is reconstructed by modifying the location of the control points of the tetrahedral mesh using B{\´e}zier tetrahedral elements and a point inversion algorithm. This method offers the possibility of computing the sensitivity analysis using the analysis mesh. Then, the sensitivities can be propagated into the design discretization. To reuse the mesh originally generated, a moving B{\´e}zier tetrahedral mesh approach was implemented. A gradient-based optimization algorithm is employed together with a sensitivity propagation procedure for the shape optimization cases. The proposed shape optimization approaches are used to solve some standard benchmark problems in structural mechanics. The results obtained show that the proposed approach can compute accurate gradients and evolve the geometry towards optimal solutions. In three dimensions, the moving mesh approach results in faster convergence in terms of computational time and avoids remeshing at each optimization step. For considering topological changes in a CAD-based framework, an isogeometric phase-field based shape and topology optimization is developed. In this case, the diffuse interface of a phase-field variable over a design domain implicitly describes the boundaries of the geometry. The design variables are the local values of the phase-field variable. The descent direction to minimize the objective function is found by using the sensitivities of the objective function with respect to the design variables. The evolution of the phase-field is determined by solving the time dependent Allen-Cahn equation. Especially for topology optimization problems that require C^1 continuity, such as for flexoelectric structures, the isogeometric phase field method is of great advantage. NURBS can achieve the desired continuity more efficiently than the traditional employed functions. The robustness of the method is demonstrated when applied to different geometries, boundary conditions, and material configurations. The applications illustrate that compared to piezoelectricity, the electrical performance of flexoelectric microbeams is larger under bending. In contrast, the electrical power for a structure under compression becomes larger with piezoelectricity.}, subject = {CAD}, language = {en} }