@inproceedings{AibaMaegaitoSuzuki, author = {Aiba, Yoshihisa and Maegaito, Kentaro and Suzuki, Osamu}, title = {Iteration dynamical systems of discrete Laplacians on the plane lattice(I) (Basic properties and computer simulations of the dynamical systems)}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2917}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29178}, pages = {3}, abstract = {In this study we introduce a concept of discrete Laplacian on the plane lattice and consider its iteration dynamical system. At first we discuss some basic properties on the dynamical system to be proved. Next making their computer simulations, we show that we can realize the following phenomena quite well:(1) The crystal of waters (2) The designs of carpets, embroideries (3) The time change of the numbers of families of extinct animals, and (4) The echo systems of life things. Hence we may expect that we can understand the evolutions and self organizations by use of the dynamical systems. Here we want to make a stress on the following fact: Although several well known chaotic dynamical systems can describe chaotic phenomena, they have difficulties in the descriptions of the evolutions and self organizations.}, subject = {Architektur }, language = {en} } @inproceedings{BaitschHartmann, author = {Baitsch, Matthias and Hartmann, Dietrich}, title = {A FRAMEWORK FOR THE INTERACTIVE VISUALIZATION OF ENGINEERING MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2919}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29194}, pages = {9}, abstract = {Interactive visualization based on 3D computer graphics nowadays is an indispensable part of any simulation software used in engineering. Nevertheless, the implementation of such visualization software components is often avoided in research projects because it is a challenging and potentially time consuming task. In this contribution, a novel Java framework for the interactive visualization of engineering models is introduced. It supports the task of implementing engineering visualization software by providing adequate program logic as well as high level classes for the visual representation of entities typical for engineering models. The presented framework is built on top of the open source visualization toolkit VTK. In VTK, a visualization model is established by connecting several filter objects in a so called visualization pipeline. Although designing and implementing a good pipeline layout is demanding, VTK does not support the reuse of pipeline layouts directly. Our framework tailors VTK to engineering applications on two levels. On the first level it adds new - engineering model specific - filter classes to VTK. On the second level, ready made pipeline layouts for certain aspects of engineering models are provided. For instance there is a pipeline class for one-dimensional elements like trusses and beams that is capable of showing the elements along with deformations and member forces. In order to facilitate the implementation of a graphical user interface (GUI) for each pipeline class, there exists a reusable Java Swing GUI component that allows the user to configure the appearance of the visualization model. Because of the flexible structure, the framework can be easily adapted and extended to new problem domains. Currently it is used in (i) an object-oriented p-version finite element code for design optimization, (ii) an agent based monitoring system for dam structures and (iii) the simulation of destruction processes by controlled explosives based on multibody dynamics. Application examples from all three domains illustrates that the approach presented is powerful as well as versatile.}, subject = {Architektur }, language = {en} } @inproceedings{BartelsZimmermann, author = {Bartels, Jan-Hendrik and Zimmermann, J{\"u}rgen}, title = {MINIMIZING THE TOTAL DISCOUNTED COST OF DISMANTLING A NUCLEAR POWER PLANT}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2920}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29200}, pages = {9}, abstract = {Due to economical, technical or political reasons all over the world about 100 nuclear power plants have been disconnected until today. All these power stations are still waiting for their complete dismantling which, considering one reactor, causes cost of up to one Bil. Euros and lasts up to 15 years. In our contribution we present a resource-constrained project scheduling approach minimizing the total discounted cost of dismantling a nuclear power plant. A project of dismantling a nuclear power plant can be subdivided into a number of disassembling activities. The execution of these activities requires time and scarce resources like manpower, special equipment or storage facilities for the contaminated material arising from the dismantling. Moreover, we have to regard several minimum and maximum time lags (temporal constraints) between the start times of the different activities. Finally, each disassembling activity can be processed in two alternative execution modes, which lead to different disbursements and determine the resource requirements of the considered activity. The optimization problem is to determine a start time and an execution mode for each activity, such that the discounted cost of the project is minimum, and neither the temporal constraints are violated nor the activities' resource requirements exceed the availability of any scarce resource at any point in time. In our contribution we introduce an appropriate multi-mode project scheduling model with minimum and maximum time lags as well as renewable and cumulative resources for the described optimization problem. Furthermore, we show that the considered optimization problem is NP-hard in the strong sense. For small problem instances, optimal solutions can be gained from a relaxation based enumeration approach which is incorporated into a branch and bound algorithm. In order to be able to solve large problem instances, we also propose a truncated version of the devised branch and bound algorithm.}, subject = {Architektur }, language = {en} } @inproceedings{BauerKandlerWeiss, author = {Bauer, Marek and Kandler, A. and Weiß, Hendrik}, title = {MODEL OF TRAM LINE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2921}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29217}, pages = {11}, abstract = {From passenger's perspective punctuality is one of the most important features of trams operations. Unfortunately in most cases this feature is only insufficiently fulfilled. In this paper we present a simulation model for trams operation with special focus on punctuality. The aim is to get a helpful tool for designing time-tables and for analyzing the effects by changing priorities for trams in traffic lights respectively the kind of track separation. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops. In this paper the running time is modeled by the sum of its mean value and a zero-mean random variable. With the help of multiple regression we find out that the average running time is a function depending on the length of the sections and the number of intersections. The random component is modeled by a sum of two independent zero-mean random variables. One of these variables describes the disturbance caused by the process of waiting at an intersection and the other the disturbance caused by the process of driving. The time spent at a stop is assumed to be a random variable, too. Its distribution is estimated from given measurements of these stop times for different tram lines in Krak{\´o}w. Finally a special case of the introduced model is considered and numerical results are presented. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauerRichter, author = {Bauer, Marek and Richter, Matthias}, title = {STATISTICAL ANALYSIS OF TIME LOST BY TRAMS BEFORE DEPARTURE FROM STOPS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2922}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29226}, pages = {18}, abstract = {The ride of the tram along the line, defined by a time-table, consists of the travel time between the subsequent sections and the time spent by tram on the stops. In the paper, statistical data collected in the city of Krakow is presented and evaluated. In polish conditions, for trams the time spent on stops makes up the remarkable amount of 30 \% of the total time of tram line operation. Moreover, this time is characterized by large variability. The time spent by tram on a stop consists of alighting and boarding time and time lost by tram on stop after alighting and boarding time ending, but before departure. Alighting and boarding time itself usually depends on the random number of alighting and boarding passengers and also on the number of passengers which are inside the vehicle. However, the time spent by tram on stop after alighting and boarding time ending is an effect of certain random events, mainly because of impossibility of departure from stop, caused by lack of priorities for public transport vehicles. The main focus of the talk lies on the description and the modelling of these effects. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauriedelDonathKoenig, author = {Bauriedel, Christian and Donath, Dirk and K{\"o}nig, Reinhard}, title = {COMPUTER-SUPPORTED SIMULATIONS FOR URBAN PLANNING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2923}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29235}, pages = {10}, abstract = {The idea about a simulation program to support urban planning is explained: Four different, clearly defined developing paths can be calculated for the rebuilding of a shrinking town. Aided by self-organization principles, a complex system can be created. The dynamics based on the action patterns of single actors, whose behaviour is cyclically depends on the generated structure. Global influences, which control the development, can be divided at a spatial, socioeconomic, and organizational-juridical level. The simulation model should offer conclusions on new planning strategies, especially in the context of the creation process of rebuilding measures. An example of a transportation system is shown by means of prototypes for the visualisation of the dynamic development process.}, subject = {Architektur }, language = {en} } @inproceedings{BeranDlask, author = {Beran, V{\´a}clav and Dlask, Petr}, title = {CONSTRUCTION SPEED AND CASH FLOW OPTIMISATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2926}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29269}, pages = {10}, abstract = {Practical examples show that the improvement in cost flow and total amount of money spend in construction and further use may be cut significantly. The calculation is based on spreadsheets calculation, very easy to develop on most PC´s now a days. Construction works, are a field where the evaluation of Cash Flow can be and should be applied. Decisions about cash flow in construction are decisions with long-term impact and long-term memory. Mistakes from the distant past have a massive impact on situations in the present and into the far economic future of economic activities. Two approaches exist. The Just-in-Time (JIT) approach and life cycle costs (LCC) approach. The calculation example shows the dynamic results for the production speed in opposition to stable flow of production in duration of activities. More sophisticated rescheduling in optimal solution might bring in return extra profit. In the technologies and organizational processes for industrial buildings, railways and road reconstruction, public utilities and housing developments there are assembly procedures that are very appropriate for the given purpose, complicated research-, development-, innovation-projects are all very good aspects of these kinds of applications. The investors of large investments and all public invested money may be spent more efficiently if an optimisation speed-strategy can be calculated.}, subject = {Architektur }, language = {en} } @inproceedings{BeranHromada, author = {Beran, V{\´a}clav and Hromada, E.}, title = {SOFTWARE FOR PROJECT RELIABILITY ESTIMATION AND RISK EVALUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2925}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29255}, pages = {16}, abstract = {The contribution presents a model that is able to simulate construction duration and cost for a building project. This model predicts set of expected project costs and duration schedule depending on input parameters such as production speed, scope of work, time schedule, bonding conditions and maximum and minimum deviations from scope of work and production speed. The simulation model is able to calculate, on the basis of input level of probability, the adequate construction cost and time duration of a project. The reciprocal view attends to finding out the adequate level of probability for construction cost and activity durations. Among interpretive outputs of the application software belongs the compilation of a presumed dynamic progress chart. This progress chart represents the expected scenario of development of a building project with the mapping of potential time dislocations for particular activities. The calculation of a presumed dynamic progress chart is based on an algorithm, which calculates mean values as a partial result of the simulated building project. Construction cost and time models are, in many ways, useful tools in project management. Clients are able to make proper decisions about the time and cost schedules of their investments. Consequently, building contractors are able to schedule predicted project cost and duration before any decision is finalized.}, subject = {Architektur }, language = {en} } @inproceedings{Bilchuk, author = {Bilchuk, Irina}, title = {GEOMETRIC IDENTIFICATION OF OBJECTS IN CIVIL ENGINEERING APPLICATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2927}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29274}, pages = {21}, abstract = {Objects for civil engineering applications can be identified with their reference in memory, their alpha-numeric name or their geometric location. Particularly in graphic user interfaces, it is common to identify objects geometrically by selection with the mouse. As the number of geometric objects in a graphic user interface grows, it becomes increasingly more important to treat the basic operations add, search and remove for geometric objects with great efficiency. Guttmann has proposed the Region-Tree (R-tree) for geometric identification in an environment which uses pages on disc as data structure. Minimal bounding rectangles are used to structure the data in such a way that neighborhood relations can be described effectively. The literature shows that the parameters which influence the efficiency of the R-trees have been studied extensively, but without conclusive results. The goal of the research which is reported in this paper is to determine reliably the parameters which significantly influence the efficiency of R-trees for geometric identification in technical drawings. In order to make this investigation conclusive, it must be performed with the best available software technology. Therefore an object-oriented software for the method is developed. This implementation is tested with technical drawings containing many thousands of geometric objects. These drawings are created automatically by a stochastic generator which is incorporated into a test bed consisting of an editor and a visualisor. This test bed is used to obtain statistics for the main factors which affect the efficiency of R-trees. The investigation shows that the following main factors which affect the efficiency can be identified reliably : number of geometric objects on the drawing the minimum und maximum number of children of a node of the tree the maximum width and height of the minimal bounding rectangles of the geometric objects relative to the size of the drawing.}, subject = {Architektur }, language = {en} } @inproceedings{BockGuerlebeck, author = {Bock, Sebastian and G{\"u}rlebeck, Klaus}, title = {A Coupled Ritz-Galerkin Approach Using Holomorphic and Anti-holomorphic Functions}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2928}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29281}, pages = {14}, abstract = {The contribution focuses on the development of a basic computational scheme that provides a suitable calculation environment for the coupling of analytical near-field solutions with numerical standard procedures in the far-field of the singularity. The proposed calculation scheme uses classical methods of complex function theory, which can be generalized to 3-dimensional problems by using the framework of hypercomplex analysis. The adapted approach is mainly based on the factorization of the Laplace operator EMBED Equation.3 by the Cauchy-Riemann operator EMBED Equation.3 , where exact solutions of the respective differential equation are constructed by using an orthonormal basis of holomorphic and anti-holomorphic functions.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeKnockDeSchepper, author = {Brackx, Fred and De Knock, B. and De Schepper, Hennie}, title = {A MULTI--DIMENSIONAL HILBERT TRANSFORM IN ANISOTROPIC CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2929}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29297}, pages = {15}, abstract = {In earlier research, generalized multidimensional Hilbert transforms have been constructed in m-dimensional Euclidean space, in the framework of Clifford analysis. Clifford analysis, centred around the notion of monogenic functions, may be regarded as a direct and elegant generalization to higher dimension of the theory of the holomorphic functions in the complex plane. The considered Hilbert transforms, usually obtained as a part of the boundary value of an associated Cauchy transform in m+1 dimensions, might be characterized as isotropic, since the metric in the underlying space is the standard Euclidean one. In this paper we adopt the idea of a so-called anisotropic Clifford setting, which leads to the introduction of a metric dependent m-dimensional Hilbert transform, showing, at least formally, the same properties as the isotropic one. The Hilbert transform being an important tool in signal analysis, this metric dependent setting has the advantage of allowing the adjustment of the co-ordinate system to possible preferential directions in the signals to be analyzed. A striking result to be mentioned is that the associated anisotropic (m+1)-dimensional Cauchy transform is no longer uniquely determined, but may stem from a diversity of (m+1)-dimensional "mother" metrics.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperDeSchepperetal., author = {Brackx, Fred and De Schepper, Hennie and De Schepper, Nele and Sommen, Frank}, title = {HERMITIAN CLIFFORD-HERMITE WAVELETS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2931}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29313}, pages = {13}, abstract = {The one-dimensional continuous wavelet transform is a successful tool for signal and image analysis, with applications in physics and engineering. Clifford analysis offers an appropriate framework for taking wavelets to higher dimension. In the usual orthogonal case Clifford analysis focusses on monogenic functions, i.e. null solutions of the rotation invariant vector valued Dirac operator ∂, defined in terms of an orthogonal basis for the quadratic space Rm underlying the construction of the Clifford algebra R0,m. An intrinsic feature of this function theory is that it encompasses all dimensions at once, as opposed to a tensorial approach with products of one-dimensional phenomena. This has allowed for a very specific construction of higher dimensional wavelets and the development of the corresponding theory, based on generalizations of classical orthogonal polynomials on the real line, such as the radial Clifford-Hermite polynomials introduced by Sommen. In this paper, we pass to the Hermitian Clifford setting, i.e. we let the same set of generators produce the complex Clifford algebra C2n (with even dimension), which we equip with a Hermitian conjugation and a Hermitian inner product. Hermitian Clifford analysis then focusses on the null solutions of two mutually conjugate Hermitian Dirac operators which are invariant under the action of the unitary group. In this setting we construct new Clifford-Hermite polynomials, starting in a natural way from a Rodrigues formula which now involves both Dirac operators mentioned. Due to the specific features of the Hermitian setting, four different types of polynomials are obtained, two types of even degree and two types of odd degree. These polynomials are used to introduce a new continuous wavelet transform, after thorough investigation of all necessary properties of the involved polynomials, the mother wavelet and the associated family of wavelet kernels.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperSommen, author = {Brackx, Fred and De Schepper, Nele and Sommen, Frank}, title = {Clifford-Hermite and Two-Dimensional Clifford-Gabor Filters For Early Vision}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2930}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29303}, pages = {22}, abstract = {Image processing has been much inspired by the human vision, in particular with regard to early vision. The latter refers to the earliest stage of visual processing responsible for the measurement of local structures such as points, lines, edges and textures in order to facilitate subsequent interpretation of these structures in higher stages (known as high level vision) of the human visual system. This low level visual computation is carried out by cells of the primary visual cortex. The receptive field profiles of these cells can be interpreted as the impulse responses of the cells, which are then considered as filters. According to the Gaussian derivative theory, the receptive field profiles of the human visual system can be approximated quite well by derivatives of Gaussians. Two mathematical models suggested for these receptive field profiles are on the one hand the Gabor model and on the other hand the Hermite model which is based on analysis filters of the Hermite transform. The Hermite filters are derivatives of Gaussians, while Gabor filters, which are defined as harmonic modulations of Gaussians, provide a good approximation to these derivatives. It is important to note that, even if the Gabor model is more widely used than the Hermite model, the latter offers some advantages like being an orthogonal basis and having better match to experimental physiological data. In our earlier research both filter models, Gabor and Hermite, have been developed in the framework of Clifford analysis. Clifford analysis offers a direct, elegant and powerful generalization to higher dimension of the theory of holomorphic functions in the complex plane. In this paper we expose the construction of the Hermite and Gabor filters, both in the classical and in the Clifford analysis framework. We also generalize the concept of complex Gaussian derivative filters to the Clifford analysis setting. Moreover, we present further properties of the Clifford-Gabor filters, such as their relationship with other types of Gabor filters and their localization in the spatial and in the frequency domain formalized by the uncertainty principle.}, subject = {Architektur }, language = {en} } @inproceedings{BraunesDonath, author = {Braunes, J{\"o}rg and Donath, Dirk}, title = {COMPUTERGEST{\"U}TZTE PLANUNG IM BESTAND VON DER DIGITALEN BESTANDSERFASSUNG ZUR PLANUNGSUNTERST{\"U}TZUNG IM CAAD}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2933}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29338}, pages = {10}, abstract = {F{\"u}r eine gesicherte Planung im Bestand, sind eine F{\"u}lle verschiedenster Informationen zu ber{\"u}cksichtigen, welche oft erst w{\"a}hrend des Planungs- oder Bauprozesses gewonnen werden. Voraussetzung hierf{\"u}r bildet immer eine Bestandserfassung. Zwar existieren Computerprogramme zur Unterst{\"u}tzung der Bestandserfassung, allerdings handelt es sich hierbei ausschließlich um Insell{\"o}sungen. Der Export der aufgenommenen Daten in ein Planungssystem bedingt Informationsverluste. Trotz der potentiellen M{\"o}glichkeit aktueller CAAD/BIM Systeme zur Verwaltung von Bestandsdaten, sind diese vorrangig f{\"u}r die Neubauplanung konzipiert. Die durchg{\"a}ngige Bearbeitung von Sanierungsprojekten von der Erfassung des Bestandes {\"u}ber die Entwurfs- und Genehmigungsplanung bis zur Ausf{\"u}hrungsplanung innerhalb eines CAAD/BIM Systems wird derzeit nicht ad{\"a}quat unterst{\"u}tzt. An der Professur Informatik in der Architektur (InfAR) der Fakult{\"a}t Architektur der Bauhaus-Universit{\"a}t Weimar entstanden im Rahmen des DFG Sonderforschungsbereich 524 "Werkzeuge und Konstruktionen f{\"u}r die Revitalisierung von Bauwerken" in den letzten Jahren Konzepte und Prototypen zur fachlich orientierten Unterst{\"u}tzung der Planung im Bestand. Der Fokus lag dabei in der Erfassung aller planungsrelevanter Bestandsdaten und der Abbildung dieser in einem dynamischen Bauwerksmodell. Aufbauend auf diesen Forschungsarbeiten befasst sich der Artikel mit der kontextbezogenen Weiterverwendung und gezielten Bereitstellung von Bestandsdaten im Prozess des Planens im Bestand und der Integration von Konzepten der planungsrelevanten Bestandserfassung in markt{\"u}bliche CAAD/BIM Systeme.}, subject = {Architektur }, language = {de} } @inproceedings{BrossmannMueller, author = {Broßmann, Marko and M{\"u}ller, Karl-Heinz}, title = {STOCHASTISCHE ANALYSE VON STAHLBETONBALKEN IM GRENZZUSTAND DER ADAPTION UNTER BER{\"u}CKSICHTIGUNG DER STEIFIGKEITSDEGRADATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2934}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29341}, pages = {20}, abstract = {Am Beispiel eines 3-feldrigen Durchlauftr{\"a}gers wird die Versagenswahrscheinlichkeit von wechselnd belasteten Stahlbetonbalken bez{\"u}glich des Grenzzustandes der Adaption (Einspielen, shakedown) untersucht. Die Adaptionsanalyse erfolgt unter Ber{\"u}cksichtigung der beanspruchungschabh{\"a}ngigen Degradation der Biegesteifigkeit infolge Rissbildung. Die damit verbundene mechanische Problemstellung kann auf die Adaptionsanalyse linear elastisch - ideal plastischer Balkentragwerke mit unbekannter aber begrenzter Biegesteifigkeit zur{\"u}ckgef{\"u}hrt werden. Die Versagenswahrscheinlichkeit wird unter Ber{\"u}cksichtigung stochastischer Tragwerks- und Belastungsgr{\"o}ßen berechnet. Tragwerkseigenschaften und st{\"a}ndige Lasten gelten als zeitunabh{\"a}ngige Zufallsgr{\"o}ßen. Zeitlich ver{\"a}nderliche Lasten werden als nutzungsdauerbezogene Extremwerte POISSONscher Rechteck-Pulsprozesse unter Ber{\"u}cksichtigung zeitlicher {\"U}berlagerungseffekte modelliert, so dass die Versagenswahrscheinlichkeit ebenfalls eine nutzungsdauerbezogene Gr{\"o}ße ist. Die mechanischen Problemstellungen werden numerisch mit der mathematischen Optimierung gel{\"o}st. Die Versagenswahrscheinlichkeit wird auf statistischem Weg mit der Monte-Carlo-Methode gesch{\"a}tzt.}, subject = {Architektur }, language = {de} } @inproceedings{BultheelJansenMaesetal., author = {Bultheel, Adhemar and Jansen, M. and Maes, J. and Van Aerschot, W. and Vanraes, E.}, title = {SUBDIVIDE AND CONQUER RESOLUTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2909}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29091}, pages = {47}, abstract = {This contribution will be freewheeling in the domain of signal, image and surface processing and touch briefly upon some topics that have been close to the heart of people in our research group. A lot of the research of the last 20 years in this domain that has been carried out world wide is dealing with multiresolution. Multiresolution allows to represent a function (in the broadest sense) at different levels of detail. This was not only applied in signals and images but also when solving all kinds of complex numerical problems. Since wavelets came into play in the 1980's, this idea was applied and generalized by many researchers. Therefore we use this as the central idea throughout this text. Wavelets, subdivision and hierarchical bases are the appropriate tools to obtain these multiresolution effects. We shall introduce some of the concepts in a rather informal way and show that the same concepts will work in one, two and three dimensions. The applications in the three cases are however quite different, and thus one wants to achieve very different goals when dealing with signals, images or surfaces. Because completeness in our treatment is impossible, we have chosen to describe two case studies after introducing some concepts in signal processing. These case studies are still the subject of current research. The first one attempts to solve a problem in image processing: how to approximate an edge in an image efficiently by subdivision. The method is based on normal offsets. The second case is the use of Powell-Sabin splines to give a smooth multiresolution representation of a surface. In this context we also illustrate the general method of construction of a spline wavelet basis using a lifting scheme.}, subject = {Architektur }, language = {en} } @inproceedings{CacaoConstalesKrausshar, author = {Cacao, Isabel and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {BESSEL FUNCTIONS AND HIGHER DIMENSIONAL DIRAC TYPE EQUATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2936}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29366}, pages = {8}, abstract = {In this paper we study the structure of the solutions to higher dimensional Dirac type equations generalizing the known λ-hyperholomorphic functions, where λ is a complex parameter. The structure of the solutions to the system of partial differential equations (D- λ) f=0 show a close connection with Bessel functions of first kind with complex argument. The more general system of partial differential equations that is considered in this paper combines Dirac and Euler operators and emphasizes the role of the Bessel functions. However, contrary to the simplest case, one gets now Bessel functions of any arbitrary complex order.}, subject = {Architektur }, language = {en} } @inproceedings{ChangChang, author = {Chang, Wei-Tsang and Chang, Teng-Wen}, title = {TIME-BASED FORM TRANSFORMATION WITH FOLDING SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2937}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29371}, pages = {10}, abstract = {Design activity could be treated as state transition computationally. In stepwise processing, in-between form-states are not easily observed. However, in this research time-based concept is introduced and applied in order to bridge the gap. In architecture, folding is one method of form manipulation and architects also want to search for alternatives by this operation. Besides, folding operation has to be defined and parameterized before time factor is involved as a variable of folding. As a result, time-based transformation provides sequential form states and redirects design activity.}, subject = {Architektur }, language = {en} } @inproceedings{ConstalesKrausshar, author = {Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {ON THE NAVIER-STOKES EQUATION WITH FREE CONVECTION IN STRIP DOMAINS AND 3D TRIANGULAR CHANNELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2938}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29387}, pages = {12}, abstract = {The Navier-Stokes equations and related ones can be treated very elegantly with the quaternionic operator calculus developed in a series of works by K. Guerlebeck, W. Sproeossig and others. This study will be extended in this paper. In order to apply the quaternionic operator calculus to solve these types of boundary value problems fully explicitly, one basically needs to evaluate two types of integral operators: the Teodorescu operator and the quaternionic Bergman projector. While the integral kernel of the Teodorescu transform is universal for all domains, the kernel function of the Bergman projector, called the Bergman kernel, depends on the geometry of the domain. With special variants of quaternionic holomorphic multiperiodic functions we obtain explicit formulas for three dimensional parallel plate channels, rectangular block domains and regular triangular channels. The explicit knowledge of the integral kernels makes it then possible to evaluate the operator equations in order to determine the solutions of the boundary value problem explicitly.}, subject = {Architektur }, language = {en} } @inproceedings{CruzFalcaoMalonek, author = {Cruz, J. F. and Falc{\~a}o, M. Irene and Malonek, Helmuth Robert}, title = {3D-MAPPINGS AND THEIR APPROXIMATION BY SERIES OF POWERS OF A SMALL PARAMETER}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2940}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29406}, pages = {14}, abstract = {In classical complex function theory the geometric mapping property of conformality is closely linked with complex differentiability. In contrast to the planar case, in higher dimensions the set of conformal mappings is only the set of M{\"o}bius transformations. Unfortunately, the theory of generalized holomorphic functions (by historical reasons they are called monogenic functions) developed on the basis of Clifford algebras does not cover the set of M{\"o}bius transformations in higher dimensions, since M{\"o}bius transformations are not monogenic. But on the other side, monogenic functions are hypercomplex differentiable functions and the question arises if from this point of view they can still play a special role for other types of 3D-mappings, for instance, for quasi-conformal ones. On the occasion of the 16th IKM 3D-mapping methods based on the application of Bergman's reproducing kernel approach (BKM) have been discussed. Almost all authors working before that with BKM in the Clifford setting were only concerned with the general algebraic and functional analytic background which allows the explicit determination of the kernel in special situations. The main goal of the abovementioned contribution was the numerical experiment by using a Maple software specially developed for that purpose. Since BKM is only one of a great variety of concrete numerical methods developed for mapping problems, our goal is to present a complete different from BKM approach to 3D-mappings. In fact, it is an extension of ideas of L. V. Kantorovich to the 3-dimensional case by using reduced quaternions and some suitable series of powers of a small parameter. Whereas until now in the Clifford case of BKM the recovering of the mapping function itself and its relation to the monogenic kernel function is still an open problem, this approach avoids such difficulties and leads to an approximation by monogenic polynomials depending on that small parameter.}, subject = {Architektur }, language = {en} }