@inproceedings{OPUS4-749, title = {Tag des Baubetriebs 2006 - Tagungsbeitr{\"a}ge "Nachtragsmanagement in Praxis und Forschung"}, editor = {Professur Baubetrieb und Bauverfahren,}, doi = {10.25643/bauhaus-universitaet.749}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-7498}, year = {2006}, abstract = {Die Fachtagung richtete sich an Gesch{\"a}ftsf{\"u}hrer, Projektleiter, Bauleiter und Projektsteuerer in Planung und Ausf{\"u}hrung mit Beitr{\"a}gen zum Nachtrags- und {\"A}nderungsmanagement am Bau, Workflow-Management in der Baupraxis, Integration von Informationsprozessen auf der Basis von Nemetschek Technologien sowie Kompetenzaufbau durch gezielte Weiterbildung.}, subject = {Weimar / Bauhaus-Universit{\"a}t / Professur Baubetrieb und Bauverfahren}, language = {de} } @inproceedings{AibaMaegaitoSuzuki, author = {Aiba, Yoshihisa and Maegaito, Kentaro and Suzuki, Osamu}, title = {Iteration dynamical systems of discrete Laplacians on the plane lattice(I) (Basic properties and computer simulations of the dynamical systems)}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2917}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29178}, pages = {3}, abstract = {In this study we introduce a concept of discrete Laplacian on the plane lattice and consider its iteration dynamical system. At first we discuss some basic properties on the dynamical system to be proved. Next making their computer simulations, we show that we can realize the following phenomena quite well:(1) The crystal of waters (2) The designs of carpets, embroideries (3) The time change of the numbers of families of extinct animals, and (4) The echo systems of life things. Hence we may expect that we can understand the evolutions and self organizations by use of the dynamical systems. Here we want to make a stress on the following fact: Although several well known chaotic dynamical systems can describe chaotic phenomena, they have difficulties in the descriptions of the evolutions and self organizations.}, subject = {Architektur }, language = {en} } @inproceedings{BaitschHartmann, author = {Baitsch, Matthias and Hartmann, Dietrich}, title = {A FRAMEWORK FOR THE INTERACTIVE VISUALIZATION OF ENGINEERING MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2919}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29194}, pages = {9}, abstract = {Interactive visualization based on 3D computer graphics nowadays is an indispensable part of any simulation software used in engineering. Nevertheless, the implementation of such visualization software components is often avoided in research projects because it is a challenging and potentially time consuming task. In this contribution, a novel Java framework for the interactive visualization of engineering models is introduced. It supports the task of implementing engineering visualization software by providing adequate program logic as well as high level classes for the visual representation of entities typical for engineering models. The presented framework is built on top of the open source visualization toolkit VTK. In VTK, a visualization model is established by connecting several filter objects in a so called visualization pipeline. Although designing and implementing a good pipeline layout is demanding, VTK does not support the reuse of pipeline layouts directly. Our framework tailors VTK to engineering applications on two levels. On the first level it adds new - engineering model specific - filter classes to VTK. On the second level, ready made pipeline layouts for certain aspects of engineering models are provided. For instance there is a pipeline class for one-dimensional elements like trusses and beams that is capable of showing the elements along with deformations and member forces. In order to facilitate the implementation of a graphical user interface (GUI) for each pipeline class, there exists a reusable Java Swing GUI component that allows the user to configure the appearance of the visualization model. Because of the flexible structure, the framework can be easily adapted and extended to new problem domains. Currently it is used in (i) an object-oriented p-version finite element code for design optimization, (ii) an agent based monitoring system for dam structures and (iii) the simulation of destruction processes by controlled explosives based on multibody dynamics. Application examples from all three domains illustrates that the approach presented is powerful as well as versatile.}, subject = {Architektur }, language = {en} } @inproceedings{Bargstaedt2006, author = {Bargst{\"a}dt, Hans-Joachim}, title = {Der Bauvertrag sagt dazu leider nichts! - Was tun?}, doi = {10.25643/bauhaus-universitaet.849}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8498}, year = {2006}, subject = {Weimar / Bauhaus-Universit{\"a}t / Professur Baubetrieb und Bauverfahren}, language = {de} } @inproceedings{BartelsZimmermann, author = {Bartels, Jan-Hendrik and Zimmermann, J{\"u}rgen}, title = {MINIMIZING THE TOTAL DISCOUNTED COST OF DISMANTLING A NUCLEAR POWER PLANT}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2920}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29200}, pages = {9}, abstract = {Due to economical, technical or political reasons all over the world about 100 nuclear power plants have been disconnected until today. All these power stations are still waiting for their complete dismantling which, considering one reactor, causes cost of up to one Bil. Euros and lasts up to 15 years. In our contribution we present a resource-constrained project scheduling approach minimizing the total discounted cost of dismantling a nuclear power plant. A project of dismantling a nuclear power plant can be subdivided into a number of disassembling activities. The execution of these activities requires time and scarce resources like manpower, special equipment or storage facilities for the contaminated material arising from the dismantling. Moreover, we have to regard several minimum and maximum time lags (temporal constraints) between the start times of the different activities. Finally, each disassembling activity can be processed in two alternative execution modes, which lead to different disbursements and determine the resource requirements of the considered activity. The optimization problem is to determine a start time and an execution mode for each activity, such that the discounted cost of the project is minimum, and neither the temporal constraints are violated nor the activities' resource requirements exceed the availability of any scarce resource at any point in time. In our contribution we introduce an appropriate multi-mode project scheduling model with minimum and maximum time lags as well as renewable and cumulative resources for the described optimization problem. Furthermore, we show that the considered optimization problem is NP-hard in the strong sense. For small problem instances, optimal solutions can be gained from a relaxation based enumeration approach which is incorporated into a branch and bound algorithm. In order to be able to solve large problem instances, we also propose a truncated version of the devised branch and bound algorithm.}, subject = {Architektur }, language = {en} } @inproceedings{Bauch2006, author = {Bauch, Ullrich}, title = {Transparenz im {\"A}nderungsmanagement - ein Vorteil f{\"u}r alle Vertragspartner?}, doi = {10.25643/bauhaus-universitaet.846}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8460}, year = {2006}, subject = {Weimar / Bauhaus-Universit{\"a}t / Professur Baubetrieb und Bauverfahren}, language = {de} } @inproceedings{BauerKandlerWeiss, author = {Bauer, Marek and Kandler, A. and Weiß, Hendrik}, title = {MODEL OF TRAM LINE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2921}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29217}, pages = {11}, abstract = {From passenger's perspective punctuality is one of the most important features of trams operations. Unfortunately in most cases this feature is only insufficiently fulfilled. In this paper we present a simulation model for trams operation with special focus on punctuality. The aim is to get a helpful tool for designing time-tables and for analyzing the effects by changing priorities for trams in traffic lights respectively the kind of track separation. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops. In this paper the running time is modeled by the sum of its mean value and a zero-mean random variable. With the help of multiple regression we find out that the average running time is a function depending on the length of the sections and the number of intersections. The random component is modeled by a sum of two independent zero-mean random variables. One of these variables describes the disturbance caused by the process of waiting at an intersection and the other the disturbance caused by the process of driving. The time spent at a stop is assumed to be a random variable, too. Its distribution is estimated from given measurements of these stop times for different tram lines in Krak{\´o}w. Finally a special case of the introduced model is considered and numerical results are presented. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauerRichter, author = {Bauer, Marek and Richter, Matthias}, title = {STATISTICAL ANALYSIS OF TIME LOST BY TRAMS BEFORE DEPARTURE FROM STOPS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2922}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29226}, pages = {18}, abstract = {The ride of the tram along the line, defined by a time-table, consists of the travel time between the subsequent sections and the time spent by tram on the stops. In the paper, statistical data collected in the city of Krakow is presented and evaluated. In polish conditions, for trams the time spent on stops makes up the remarkable amount of 30 \% of the total time of tram line operation. Moreover, this time is characterized by large variability. The time spent by tram on a stop consists of alighting and boarding time and time lost by tram on stop after alighting and boarding time ending, but before departure. Alighting and boarding time itself usually depends on the random number of alighting and boarding passengers and also on the number of passengers which are inside the vehicle. However, the time spent by tram on stop after alighting and boarding time ending is an effect of certain random events, mainly because of impossibility of departure from stop, caused by lack of priorities for public transport vehicles. The main focus of the talk lies on the description and the modelling of these effects. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauriedelDonathKoenig, author = {Bauriedel, Christian and Donath, Dirk and K{\"o}nig, Reinhard}, title = {COMPUTER-SUPPORTED SIMULATIONS FOR URBAN PLANNING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2923}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29235}, pages = {10}, abstract = {The idea about a simulation program to support urban planning is explained: Four different, clearly defined developing paths can be calculated for the rebuilding of a shrinking town. Aided by self-organization principles, a complex system can be created. The dynamics based on the action patterns of single actors, whose behaviour is cyclically depends on the generated structure. Global influences, which control the development, can be divided at a spatial, socioeconomic, and organizational-juridical level. The simulation model should offer conclusions on new planning strategies, especially in the context of the creation process of rebuilding measures. An example of a transportation system is shown by means of prototypes for the visualisation of the dynamic development process.}, subject = {Architektur }, language = {en} } @inproceedings{BeranDlask, author = {Beran, V{\´a}clav and Dlask, Petr}, title = {CONSTRUCTION SPEED AND CASH FLOW OPTIMISATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2926}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29269}, pages = {10}, abstract = {Practical examples show that the improvement in cost flow and total amount of money spend in construction and further use may be cut significantly. The calculation is based on spreadsheets calculation, very easy to develop on most PC´s now a days. Construction works, are a field where the evaluation of Cash Flow can be and should be applied. Decisions about cash flow in construction are decisions with long-term impact and long-term memory. Mistakes from the distant past have a massive impact on situations in the present and into the far economic future of economic activities. Two approaches exist. The Just-in-Time (JIT) approach and life cycle costs (LCC) approach. The calculation example shows the dynamic results for the production speed in opposition to stable flow of production in duration of activities. More sophisticated rescheduling in optimal solution might bring in return extra profit. In the technologies and organizational processes for industrial buildings, railways and road reconstruction, public utilities and housing developments there are assembly procedures that are very appropriate for the given purpose, complicated research-, development-, innovation-projects are all very good aspects of these kinds of applications. The investors of large investments and all public invested money may be spent more efficiently if an optimisation speed-strategy can be calculated.}, subject = {Architektur }, language = {en} } @inproceedings{BeranHromada, author = {Beran, V{\´a}clav and Hromada, E.}, title = {SOFTWARE FOR PROJECT RELIABILITY ESTIMATION AND RISK EVALUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2925}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29255}, pages = {16}, abstract = {The contribution presents a model that is able to simulate construction duration and cost for a building project. This model predicts set of expected project costs and duration schedule depending on input parameters such as production speed, scope of work, time schedule, bonding conditions and maximum and minimum deviations from scope of work and production speed. The simulation model is able to calculate, on the basis of input level of probability, the adequate construction cost and time duration of a project. The reciprocal view attends to finding out the adequate level of probability for construction cost and activity durations. Among interpretive outputs of the application software belongs the compilation of a presumed dynamic progress chart. This progress chart represents the expected scenario of development of a building project with the mapping of potential time dislocations for particular activities. The calculation of a presumed dynamic progress chart is based on an algorithm, which calculates mean values as a partial result of the simulated building project. Construction cost and time models are, in many ways, useful tools in project management. Clients are able to make proper decisions about the time and cost schedules of their investments. Consequently, building contractors are able to schedule predicted project cost and duration before any decision is finalized.}, subject = {Architektur }, language = {en} } @inproceedings{Bilchuk, author = {Bilchuk, Irina}, title = {GEOMETRIC IDENTIFICATION OF OBJECTS IN CIVIL ENGINEERING APPLICATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2927}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29274}, pages = {21}, abstract = {Objects for civil engineering applications can be identified with their reference in memory, their alpha-numeric name or their geometric location. Particularly in graphic user interfaces, it is common to identify objects geometrically by selection with the mouse. As the number of geometric objects in a graphic user interface grows, it becomes increasingly more important to treat the basic operations add, search and remove for geometric objects with great efficiency. Guttmann has proposed the Region-Tree (R-tree) for geometric identification in an environment which uses pages on disc as data structure. Minimal bounding rectangles are used to structure the data in such a way that neighborhood relations can be described effectively. The literature shows that the parameters which influence the efficiency of the R-trees have been studied extensively, but without conclusive results. The goal of the research which is reported in this paper is to determine reliably the parameters which significantly influence the efficiency of R-trees for geometric identification in technical drawings. In order to make this investigation conclusive, it must be performed with the best available software technology. Therefore an object-oriented software for the method is developed. This implementation is tested with technical drawings containing many thousands of geometric objects. These drawings are created automatically by a stochastic generator which is incorporated into a test bed consisting of an editor and a visualisor. This test bed is used to obtain statistics for the main factors which affect the efficiency of R-trees. The investigation shows that the following main factors which affect the efficiency can be identified reliably : number of geometric objects on the drawing the minimum und maximum number of children of a node of the tree the maximum width and height of the minimal bounding rectangles of the geometric objects relative to the size of the drawing.}, subject = {Architektur }, language = {en} } @incollection{Bimber2006, author = {Bimber, Oliver}, title = {Projector-Based Augmentation}, series = {Emerging Technologies of Augmented Reality: Interfaces \& Design}, booktitle = {Emerging Technologies of Augmented Reality: Interfaces \& Design}, doi = {10.25643/bauhaus-universitaet.735}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-7353}, year = {2006}, abstract = {Projector-based augmentation approaches hold the potential of combining the advantages of well-establishes spatial virtual reality and spatial augmented reality. Immersive, semi-immersive and augmented visualizations can be realized in everyday environments - without the need for special projection screens and dedicated display configurations. Limitations of mobile devices, such as low resolution and small field of view, focus constrains, and ergonomic issues can be overcome in many cases by the utilization of projection technology. Thus, applications that do not require mobility can benefit from efficient spatial augmentations. Examples range from edutainment in museums (such as storytelling projections onto natural stone walls in historical buildings) to architectural visualizations (such as augmentations of complex illumination simulations or modified surface materials in real building structures). This chapter describes projector-camera methods and multi-projector techniques that aim at correcting geometric aberrations, compensating local and global radiometric effects, and improving focus properties of images projected onto everyday surfaces.}, subject = {Erweiterte Realit{\"a}t }, language = {en} } @article{BimberGrundhoeferZollmannetal.2006, author = {Bimber, Oliver and Grundh{\"o}fer, Anselm and Zollmann, Stefanie and Kolster, Daniel}, title = {Digital Illumination for Augmented Studios}, doi = {10.25643/bauhaus-universitaet.857}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8576}, year = {2006}, abstract = {Virtual studio technology plays an important role for modern television productions. Blue-screen matting is a common technique for integrating real actors or moderators into computer generated sceneries. Augmented reality offers the possibility to mix real and virtual in a more general context. This article proposes a new technological approach for combining real studio content with computergenerated information. Digital light projection allows a controlled spatial, temporal, chrominance and luminance modulation of illumination - opening new possibilities for TV studios.}, subject = {Studiotechnik}, language = {en} } @phdthesis{Blickling2006, author = {Blickling, Arno}, title = {Spezifikation des Bau-Solls durch interaktive Modellierung auf virtuellen Baustellen}, doi = {10.25643/bauhaus-universitaet.790}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20061105-8311}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {Heutige Methoden zur Soll-Spezifikation von Bauleistungen (Kostenermittlung und zeitliche Ablaufplanung) gehen von einer abstrahierten und vereinfachten Betrachtung der Zusammenh{\"a}nge bei Bauprojekten aus. Leistungsverzeichnisse, Kostenermittlungen und Bauzeitpl{\"a}ne orientieren sich nur indirekt an der Geometrie des Bauwerks und der Baustelle. Die dabei verwendeten Medien wie Papier, 2D-Dateien, digitale Leistungsbeschreibungen oder 3D-Darstellungen lassen die Suche nach Informationen auf der Baustelle zu einem zeitaufw{\"a}ndigen und in Anbetracht existierender Medientechnologien ineffizienten Prozess werden. Interaktive virtuelle Umgebungen erlauben die Aufl{\"o}sung starrer Zusammenh{\"a}nge durch interaktive Eingriffe des Anwenders und visualisieren komplexe bauproduktionstechnische Vorg{\"a}nge. Das Konzept der visuellen interaktiven Simulation der Bauproduktion sieht vor, die Soll-Spezifikation anhand eines interaktiven 3D-Modells zu entwickeln, um r{\"a}umliche Ver{\"a}nderungen und parallele Prozesse auf der virtuellen Baustelle im Rahmen der Entscheidungsfindung zum Bauablauf besser ber{\"u}cksichtigen zu k{\"o}nnen. Verlangt man einen hohen Grad an Interaktivit{\"a}t mit dem 3D-Modell, dann bieten sich Computerspieltechnologien sehr gut zu Verifikationszwecken an. Die visuelle interaktive Simulation der Bauproduktion ist damit als eine 3D-modellbasierte Methode der Prozessmodellierung zu verstehen, die Entscheidungen als Input ben{\"o}tigt und die Kostenermittlung sowie die zeitliche Ablaufplanung als Output liefert.}, subject = {Virtuelle Realit{\"a}t}, language = {de} } @inproceedings{BockGuerlebeck, author = {Bock, Sebastian and G{\"u}rlebeck, Klaus}, title = {A Coupled Ritz-Galerkin Approach Using Holomorphic and Anti-holomorphic Functions}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2928}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29281}, pages = {14}, abstract = {The contribution focuses on the development of a basic computational scheme that provides a suitable calculation environment for the coupling of analytical near-field solutions with numerical standard procedures in the far-field of the singularity. The proposed calculation scheme uses classical methods of complex function theory, which can be generalized to 3-dimensional problems by using the framework of hypercomplex analysis. The adapted approach is mainly based on the factorization of the Laplace operator EMBED Equation.3 by the Cauchy-Riemann operator EMBED Equation.3 , where exact solutions of the respective differential equation are constructed by using an orthonormal basis of holomorphic and anti-holomorphic functions.}, subject = {Architektur }, language = {en} } @article{Both2006, author = {Both, Petra von}, title = {Integration von Informationsprozessen auf der Basis von Nemetschek Technologien}, doi = {10.25643/bauhaus-universitaet.851}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8511}, year = {2006}, subject = {Weimar / Bauhaus-Universit{\"a}t / Professur Baubetrieb und Bauverfahren}, language = {de} } @inproceedings{BrackxDeKnockDeSchepper, author = {Brackx, Fred and De Knock, B. and De Schepper, Hennie}, title = {A MULTI--DIMENSIONAL HILBERT TRANSFORM IN ANISOTROPIC CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2929}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29297}, pages = {15}, abstract = {In earlier research, generalized multidimensional Hilbert transforms have been constructed in m-dimensional Euclidean space, in the framework of Clifford analysis. Clifford analysis, centred around the notion of monogenic functions, may be regarded as a direct and elegant generalization to higher dimension of the theory of the holomorphic functions in the complex plane. The considered Hilbert transforms, usually obtained as a part of the boundary value of an associated Cauchy transform in m+1 dimensions, might be characterized as isotropic, since the metric in the underlying space is the standard Euclidean one. In this paper we adopt the idea of a so-called anisotropic Clifford setting, which leads to the introduction of a metric dependent m-dimensional Hilbert transform, showing, at least formally, the same properties as the isotropic one. The Hilbert transform being an important tool in signal analysis, this metric dependent setting has the advantage of allowing the adjustment of the co-ordinate system to possible preferential directions in the signals to be analyzed. A striking result to be mentioned is that the associated anisotropic (m+1)-dimensional Cauchy transform is no longer uniquely determined, but may stem from a diversity of (m+1)-dimensional "mother" metrics.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperDeSchepperetal., author = {Brackx, Fred and De Schepper, Hennie and De Schepper, Nele and Sommen, Frank}, title = {HERMITIAN CLIFFORD-HERMITE WAVELETS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2931}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29313}, pages = {13}, abstract = {The one-dimensional continuous wavelet transform is a successful tool for signal and image analysis, with applications in physics and engineering. Clifford analysis offers an appropriate framework for taking wavelets to higher dimension. In the usual orthogonal case Clifford analysis focusses on monogenic functions, i.e. null solutions of the rotation invariant vector valued Dirac operator ∂, defined in terms of an orthogonal basis for the quadratic space Rm underlying the construction of the Clifford algebra R0,m. An intrinsic feature of this function theory is that it encompasses all dimensions at once, as opposed to a tensorial approach with products of one-dimensional phenomena. This has allowed for a very specific construction of higher dimensional wavelets and the development of the corresponding theory, based on generalizations of classical orthogonal polynomials on the real line, such as the radial Clifford-Hermite polynomials introduced by Sommen. In this paper, we pass to the Hermitian Clifford setting, i.e. we let the same set of generators produce the complex Clifford algebra C2n (with even dimension), which we equip with a Hermitian conjugation and a Hermitian inner product. Hermitian Clifford analysis then focusses on the null solutions of two mutually conjugate Hermitian Dirac operators which are invariant under the action of the unitary group. In this setting we construct new Clifford-Hermite polynomials, starting in a natural way from a Rodrigues formula which now involves both Dirac operators mentioned. Due to the specific features of the Hermitian setting, four different types of polynomials are obtained, two types of even degree and two types of odd degree. These polynomials are used to introduce a new continuous wavelet transform, after thorough investigation of all necessary properties of the involved polynomials, the mother wavelet and the associated family of wavelet kernels.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperSommen, author = {Brackx, Fred and De Schepper, Nele and Sommen, Frank}, title = {Clifford-Hermite and Two-Dimensional Clifford-Gabor Filters For Early Vision}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2930}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29303}, pages = {22}, abstract = {Image processing has been much inspired by the human vision, in particular with regard to early vision. The latter refers to the earliest stage of visual processing responsible for the measurement of local structures such as points, lines, edges and textures in order to facilitate subsequent interpretation of these structures in higher stages (known as high level vision) of the human visual system. This low level visual computation is carried out by cells of the primary visual cortex. The receptive field profiles of these cells can be interpreted as the impulse responses of the cells, which are then considered as filters. According to the Gaussian derivative theory, the receptive field profiles of the human visual system can be approximated quite well by derivatives of Gaussians. Two mathematical models suggested for these receptive field profiles are on the one hand the Gabor model and on the other hand the Hermite model which is based on analysis filters of the Hermite transform. The Hermite filters are derivatives of Gaussians, while Gabor filters, which are defined as harmonic modulations of Gaussians, provide a good approximation to these derivatives. It is important to note that, even if the Gabor model is more widely used than the Hermite model, the latter offers some advantages like being an orthogonal basis and having better match to experimental physiological data. In our earlier research both filter models, Gabor and Hermite, have been developed in the framework of Clifford analysis. Clifford analysis offers a direct, elegant and powerful generalization to higher dimension of the theory of holomorphic functions in the complex plane. In this paper we expose the construction of the Hermite and Gabor filters, both in the classical and in the Clifford analysis framework. We also generalize the concept of complex Gaussian derivative filters to the Clifford analysis setting. Moreover, we present further properties of the Clifford-Gabor filters, such as their relationship with other types of Gabor filters and their localization in the spatial and in the frequency domain formalized by the uncertainty principle.}, subject = {Architektur }, language = {en} } @phdthesis{Brandstetter2006, author = {Brandstetter, Thomas}, title = {Kr{\"a}fte messen. Die Maschine von Marly und die Kultur der Technik 1680-1840}, doi = {10.25643/bauhaus-universitaet.760}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20060702-7984}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {Die Arbeit besch{\"a}ftigt sich mit der Entstehung eines {\"o}konomischen Kraftmaßes am Beispiel der Maschine von Marly im Zeitraum von ca. 1680 bis 1840. Die Leitthese der Dissertation besagt, dass vom 17. zum 19. Jahrhundert eine grundlegende Transformation des Maschinenbegriffs stattfand, die als {\"U}bergang vom Substanzbegriff zum Funktionsbegriff der Maschine bezeichnet werden kann. Im 17. Jahrhundert wurden mechanische Apparate als in sich geschlossene, selbstbez{\"u}gliche Strukturen aufgefasst. Als anschaulich erfahrbare Objekte konnten sie als Bildgeber dienen, die mittels des Verfahrens der Strukturanalogie Erkl{\"a}rungsmuster f{\"u}r verschiedenste Ph{\"a}nomene (K{\"o}rper, Staat, Welt) boten. Demzufolge galten sie als selbstevident: sie waren erkl{\"a}rend und mussten selbst nicht erkl{\"a}rt werden. Ihr etwaiger Zweck und ihre Einbettung in gesellschaftliche Zusammenh{\"a}nge spielten dabei keine Rolle. Wie anhand der Beschreibungen und Darstellungen aus jener Zeit nachgewiesen werden kann, wurde die Maschine von Marly innerhalb dieser Episteme als architektonisches Objekt wahrgenommen, bei dem vor allem das Zusammenspiel der einzelnen Elemente Aufmerksamkeit erregte. Wie andere Maschinen auch stand sie unter dem Primat der Sichtbarkeit. Man war davon {\"u}berzeugt, dass die Eigenschaften einer Maschine von der strukturellen Anordnung ihrer Bauteile abhingen und glaubte, ihre Qualit{\"a}t an ihrer Gestalt ablesen zu k{\"o}nnen. Ab der Mitte des 18. Jahrhunderts tauchte die Maschine von Marly in den Schriften physiokratischer Autoren auf. Zuerst diente sie dort als Beispiel f{\"u}r die Verschwendungssucht Louis' XIV. und als Metapher f{\"u}r eine schlechte Einrichtung des Staates. Doch zunehmend begann man, sie auch in ihrer Faktizit{\"a}t als technisch-politisches Objekt zu begreifen. Man kritisierte ihre aktuelle Nutzung und schlug andere M{\"o}glichkeiten ihrer Verwendung vor, etwa die Bew{\"a}sserung von Feldern oder die st{\"a}dtische Trinkwasserversorgung. Damit war die Maschine von Marly nicht l{\"a}nger ein Modell f{\"u}r die Einrichtung des Staates, das nur am Maßstab der immanenten Perfektion beurteilt werden konnte. Vielmehr war sie nun ein Instrument der Regierung, das sich als Teil eines staatlich verfassten Gemeinwesens verantworten musste. Als solches wurde sie auch zu einem bevorzugten Gegenstand aufkl{\"a}rerischer Reformprojekte. Das zeigt sich besonders deutlich am Wettbewerb, den die Pariser Akademie der Wissenschaften 1784-1786 organisiert hatte und der Vorschl{\"a}ge zur Verbesserung oder Ersetzung der Maschine von Marly zum Gegenstand hatte. Die Auswertung der mehr als 100 eingereichten Projekte und Memoranden erm{\"o}glicht einen einzigartigen Blick auf die Hoffnungen und W{\"u}nsche, die Ende des 18. Jahrhunderts an die Erfindung technischer Ger{\"a}te gekoppelt waren. Um 1800 kann man die allm{\"a}hliche Entstehung eines Funktionsbegriffs der Maschine bemerken. Lazare Carnots Essai sur les machines en g{\´e}n{\´e}ral, der eine in der Sprache der Algebra artikulierte Definition der Maschine beinhaltete, trug maßgeblich dazu bei, die Anschaulichkeit zugunsten eines operativen Symbolismus zu delegitimieren. Erst dadurch war die Formulierung eines Effizienzkalk{\"u}ls m{\"o}glich. Erg{\"a}nzt wurde diese Formalisierung durch den Diskurs der Industrialisierung, in dem technische Apparate zunehmend als Produktionsmittel verstanden wurden. Die Maschine von Marly war ein wichtiger Schauplatz f{\"u}r die Entstehung eines {\"o}konomischen Kraftmaßes. Nicht nur wurden dort Experimente mit verschiedenen Messinstrumenten (Dynamometern) durchgef{\"u}hrt, auch diente sie Joseph Montgolfier als Beispiel um zu beweisen, dass Kraft als Geldwert ausgedr{\"u}ckt werden k{\"o}nne. In den ersten Jahrzehnten des 19. Jahrhundert wurden Maschinen schließlich relational als Positionen innerhalb eines nationalen Produktionssystems definiert. Sie galten als Krafttransformatoren, bei denen ein bestimmter Input von ‚force motrice' einen entsprechenden Output von ‚travail utile' ergeben w{\"u}rde. Ihre vornehmlichste Aufgabe war die m{\"o}glichst effiziente Ausnutzung der Kraftressourcen. Den vorl{\"a}ufigen Endpunkt erreichte die Entstehung des {\"o}konomischen Kraftmaßes um 1830 mit der Formulierung des Begriffs der ‚mechanischen Arbeit'.}, subject = {Technik / Geschichte}, language = {de} } @inproceedings{BraunesDonath, author = {Braunes, J{\"o}rg and Donath, Dirk}, title = {COMPUTERGEST{\"U}TZTE PLANUNG IM BESTAND VON DER DIGITALEN BESTANDSERFASSUNG ZUR PLANUNGSUNTERST{\"U}TZUNG IM CAAD}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2933}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29338}, pages = {10}, abstract = {F{\"u}r eine gesicherte Planung im Bestand, sind eine F{\"u}lle verschiedenster Informationen zu ber{\"u}cksichtigen, welche oft erst w{\"a}hrend des Planungs- oder Bauprozesses gewonnen werden. Voraussetzung hierf{\"u}r bildet immer eine Bestandserfassung. Zwar existieren Computerprogramme zur Unterst{\"u}tzung der Bestandserfassung, allerdings handelt es sich hierbei ausschließlich um Insell{\"o}sungen. Der Export der aufgenommenen Daten in ein Planungssystem bedingt Informationsverluste. Trotz der potentiellen M{\"o}glichkeit aktueller CAAD/BIM Systeme zur Verwaltung von Bestandsdaten, sind diese vorrangig f{\"u}r die Neubauplanung konzipiert. Die durchg{\"a}ngige Bearbeitung von Sanierungsprojekten von der Erfassung des Bestandes {\"u}ber die Entwurfs- und Genehmigungsplanung bis zur Ausf{\"u}hrungsplanung innerhalb eines CAAD/BIM Systems wird derzeit nicht ad{\"a}quat unterst{\"u}tzt. An der Professur Informatik in der Architektur (InfAR) der Fakult{\"a}t Architektur der Bauhaus-Universit{\"a}t Weimar entstanden im Rahmen des DFG Sonderforschungsbereich 524 "Werkzeuge und Konstruktionen f{\"u}r die Revitalisierung von Bauwerken" in den letzten Jahren Konzepte und Prototypen zur fachlich orientierten Unterst{\"u}tzung der Planung im Bestand. Der Fokus lag dabei in der Erfassung aller planungsrelevanter Bestandsdaten und der Abbildung dieser in einem dynamischen Bauwerksmodell. Aufbauend auf diesen Forschungsarbeiten befasst sich der Artikel mit der kontextbezogenen Weiterverwendung und gezielten Bereitstellung von Bestandsdaten im Prozess des Planens im Bestand und der Integration von Konzepten der planungsrelevanten Bestandserfassung in markt{\"u}bliche CAAD/BIM Systeme.}, subject = {Architektur }, language = {de} } @inproceedings{BrossmannMueller, author = {Broßmann, Marko and M{\"u}ller, Karl-Heinz}, title = {STOCHASTISCHE ANALYSE VON STAHLBETONBALKEN IM GRENZZUSTAND DER ADAPTION UNTER BER{\"u}CKSICHTIGUNG DER STEIFIGKEITSDEGRADATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2934}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29341}, pages = {20}, abstract = {Am Beispiel eines 3-feldrigen Durchlauftr{\"a}gers wird die Versagenswahrscheinlichkeit von wechselnd belasteten Stahlbetonbalken bez{\"u}glich des Grenzzustandes der Adaption (Einspielen, shakedown) untersucht. Die Adaptionsanalyse erfolgt unter Ber{\"u}cksichtigung der beanspruchungschabh{\"a}ngigen Degradation der Biegesteifigkeit infolge Rissbildung. Die damit verbundene mechanische Problemstellung kann auf die Adaptionsanalyse linear elastisch - ideal plastischer Balkentragwerke mit unbekannter aber begrenzter Biegesteifigkeit zur{\"u}ckgef{\"u}hrt werden. Die Versagenswahrscheinlichkeit wird unter Ber{\"u}cksichtigung stochastischer Tragwerks- und Belastungsgr{\"o}ßen berechnet. Tragwerkseigenschaften und st{\"a}ndige Lasten gelten als zeitunabh{\"a}ngige Zufallsgr{\"o}ßen. Zeitlich ver{\"a}nderliche Lasten werden als nutzungsdauerbezogene Extremwerte POISSONscher Rechteck-Pulsprozesse unter Ber{\"u}cksichtigung zeitlicher {\"U}berlagerungseffekte modelliert, so dass die Versagenswahrscheinlichkeit ebenfalls eine nutzungsdauerbezogene Gr{\"o}ße ist. Die mechanischen Problemstellungen werden numerisch mit der mathematischen Optimierung gel{\"o}st. Die Versagenswahrscheinlichkeit wird auf statistischem Weg mit der Monte-Carlo-Methode gesch{\"a}tzt.}, subject = {Architektur }, language = {de} } @phdthesis{Bubner2006, author = {Bubner, Andr{\´e}}, title = {Datenmodelle zur Bearbeitung von Ingenieuraufgaben am Beispiel von Wohnh{\"a}usern in Stahlbauweise}, doi = {10.25643/bauhaus-universitaet.808}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20070423-8580}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {Modelle bilden die Grundlage der Planung. Sie repr{\"a}sentieren die zur Bearbeitung erforderlichen Eigenschaften eines Bauwerks in einer an die spezifische Aufgabe angepassten Form. Zwischen den verschiedenen zur Abbildung des Bauwerks eingesetzten Modellen bestehen fachliche Zusammenh{\"a}nge bez{\"u}glich der darin abgebildeten Aspekte. Diese Abh{\"a}ngigkeiten werden in der praktischen Planungsbearbeitung gegenw{\"a}rtig auf Grundlage von Erfahrungswerten, normativen Vorgaben und vereinfachenden Annahmen ber{\"u}cksichtigt. Die detailliertere Modellierung von Bauwerkseigenschaften f{\"u}hrt zu einer engeren Verzahnung der verschiedenen Modelle. Um eine fachliche Inselbildung zu vermeiden, ist eine entsprechend angepasste Abbildung der zwischen den einzelnen Modellen bestehenden Beziehungen erforderlich. Mit den steigenden Anspr{\"u}chen an eine Bearbeitung von Ingenieuraufgaben gewinnt eine {\"u}ber den Zweck der Bereitstellung ausgew{\"a}hlter Informationen zum Bauwerk und der Unterst{\"u}tzung eines Datenaustauschs zwischen verschiedenen Fachplanern hinausgehende datentechnische Abbildung an Bedeutung. Dies setzt eine Diskussion der Anforderungen an eine solche Beschreibung aus fachlicher Sicht voraus. Die Untersuchung der fachlichen Anforderungen wird am Beispiel von Wohnh{\"a}usern in Stahlbauweise gef{\"u}hrt.}, subject = {Modellierung}, language = {de} } @article{BucherFrangopol, author = {Bucher, Christian and Frangopol, D.M.}, title = {Optimization of lifetime maintenance strategies for deteriorting structures considering probabilities of violating safety, condition, and cost thresholds}, series = {Probabilistic Engineering Mechanics}, journal = {Probabilistic Engineering Mechanics}, pages = {1 -- 8}, abstract = {Optimization of lifetime maintenance strategies for deteriorting structures considering probabilities of violating safety, condition, and cost thresholds}, subject = {Angewandte Mathematik}, language = {en} } @inproceedings{BultheelJansenMaesetal., author = {Bultheel, Adhemar and Jansen, M. and Maes, J. and Van Aerschot, W. and Vanraes, E.}, title = {SUBDIVIDE AND CONQUER RESOLUTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2909}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29091}, pages = {47}, abstract = {This contribution will be freewheeling in the domain of signal, image and surface processing and touch briefly upon some topics that have been close to the heart of people in our research group. A lot of the research of the last 20 years in this domain that has been carried out world wide is dealing with multiresolution. Multiresolution allows to represent a function (in the broadest sense) at different levels of detail. This was not only applied in signals and images but also when solving all kinds of complex numerical problems. Since wavelets came into play in the 1980's, this idea was applied and generalized by many researchers. Therefore we use this as the central idea throughout this text. Wavelets, subdivision and hierarchical bases are the appropriate tools to obtain these multiresolution effects. We shall introduce some of the concepts in a rather informal way and show that the same concepts will work in one, two and three dimensions. The applications in the three cases are however quite different, and thus one wants to achieve very different goals when dealing with signals, images or surfaces. Because completeness in our treatment is impossible, we have chosen to describe two case studies after introducing some concepts in signal processing. These case studies are still the subject of current research. The first one attempts to solve a problem in image processing: how to approximate an edge in an image efficiently by subdivision. The method is based on normal offsets. The second case is the use of Powell-Sabin splines to give a smooth multiresolution representation of a surface. In this context we also illustrate the general method of construction of a spline wavelet basis using a lifting scheme.}, subject = {Architektur }, language = {en} } @inproceedings{CacaoConstalesKrausshar, author = {Cacao, Isabel and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {BESSEL FUNCTIONS AND HIGHER DIMENSIONAL DIRAC TYPE EQUATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2936}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29366}, pages = {8}, abstract = {In this paper we study the structure of the solutions to higher dimensional Dirac type equations generalizing the known λ-hyperholomorphic functions, where λ is a complex parameter. The structure of the solutions to the system of partial differential equations (D- λ) f=0 show a close connection with Bessel functions of first kind with complex argument. The more general system of partial differential equations that is considered in this paper combines Dirac and Euler operators and emphasizes the role of the Bessel functions. However, contrary to the simplest case, one gets now Bessel functions of any arbitrary complex order.}, subject = {Architektur }, language = {en} } @inproceedings{ChangChang, author = {Chang, Wei-Tsang and Chang, Teng-Wen}, title = {TIME-BASED FORM TRANSFORMATION WITH FOLDING SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2937}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29371}, pages = {10}, abstract = {Design activity could be treated as state transition computationally. In stepwise processing, in-between form-states are not easily observed. However, in this research time-based concept is introduced and applied in order to bridge the gap. In architecture, folding is one method of form manipulation and architects also want to search for alternatives by this operation. Besides, folding operation has to be defined and parameterized before time factor is involved as a variable of folding. As a result, time-based transformation provides sequential form states and redirects design activity.}, subject = {Architektur }, language = {en} } @inproceedings{ConstalesKrausshar, author = {Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {ON THE NAVIER-STOKES EQUATION WITH FREE CONVECTION IN STRIP DOMAINS AND 3D TRIANGULAR CHANNELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2938}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29387}, pages = {12}, abstract = {The Navier-Stokes equations and related ones can be treated very elegantly with the quaternionic operator calculus developed in a series of works by K. Guerlebeck, W. Sproeossig and others. This study will be extended in this paper. In order to apply the quaternionic operator calculus to solve these types of boundary value problems fully explicitly, one basically needs to evaluate two types of integral operators: the Teodorescu operator and the quaternionic Bergman projector. While the integral kernel of the Teodorescu transform is universal for all domains, the kernel function of the Bergman projector, called the Bergman kernel, depends on the geometry of the domain. With special variants of quaternionic holomorphic multiperiodic functions we obtain explicit formulas for three dimensional parallel plate channels, rectangular block domains and regular triangular channels. The explicit knowledge of the integral kernels makes it then possible to evaluate the operator equations in order to determine the solutions of the boundary value problem explicitly.}, subject = {Architektur }, language = {en} } @inproceedings{CruzFalcaoMalonek, author = {Cruz, J. F. and Falc{\~a}o, M. Irene and Malonek, Helmuth Robert}, title = {3D-MAPPINGS AND THEIR APPROXIMATION BY SERIES OF POWERS OF A SMALL PARAMETER}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2940}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29406}, pages = {14}, abstract = {In classical complex function theory the geometric mapping property of conformality is closely linked with complex differentiability. In contrast to the planar case, in higher dimensions the set of conformal mappings is only the set of M{\"o}bius transformations. Unfortunately, the theory of generalized holomorphic functions (by historical reasons they are called monogenic functions) developed on the basis of Clifford algebras does not cover the set of M{\"o}bius transformations in higher dimensions, since M{\"o}bius transformations are not monogenic. But on the other side, monogenic functions are hypercomplex differentiable functions and the question arises if from this point of view they can still play a special role for other types of 3D-mappings, for instance, for quasi-conformal ones. On the occasion of the 16th IKM 3D-mapping methods based on the application of Bergman's reproducing kernel approach (BKM) have been discussed. Almost all authors working before that with BKM in the Clifford setting were only concerned with the general algebraic and functional analytic background which allows the explicit determination of the kernel in special situations. The main goal of the abovementioned contribution was the numerical experiment by using a Maple software specially developed for that purpose. Since BKM is only one of a great variety of concrete numerical methods developed for mapping problems, our goal is to present a complete different from BKM approach to 3D-mappings. In fact, it is an extension of ideas of L. V. Kantorovich to the 3-dimensional case by using reduced quaternions and some suitable series of powers of a small parameter. Whereas until now in the Clifford case of BKM the recovering of the mapping function itself and its relation to the monogenic kernel function is still an open problem, this approach avoids such difficulties and leads to an approximation by monogenic polynomials depending on that small parameter.}, subject = {Architektur }, language = {en} } @book{DamischHeilmannBocketal., author = {Damisch, Hubert and Heilmann, Thomas and Bock, Wolfgang and Reichert, Ram{\´o}n and Vogl, Joseph and Bergermann, Ulrike and Leonhard, Karin and Weiberg, Birk and Kirchmann, Kay and Bexte, Peter and Lauper, Anja and Brandstetter, Thomas and Kuni, Verena and Kursell, Julia and Sch{\"a}fer, Armin and Witzgall, Susanne and Becker, Lutz}, title = {Wolken}, editor = {Engell, Lorenz and Siegert, Bernhard and Vogl, Joseph}, publisher = {Verl. der Bauhaus-Universit{\"a}t}, address = {Weimar}, isbn = {3-86068-267-9}, doi = {10.25643/bauhaus-universitaet.1870}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130318-18708}, pages = {199}, abstract = {Aufsatzsammlung der Reihe: Archiv f{\"u}r Mediengeschichte 2005. Die Beitr{\"a}ge liefern unterschiedliche Ausschnitte aus einer Medien- und Wissensgeschichte der Wolke und sehen die Wolke als ein Motiv f{\"u}r die Selbstinterpretation zeitgen{\"o}ssischer Kultur.}, subject = {Wolke}, language = {de} } @inproceedings{DeaconvanRooyen, author = {Deacon, Michael-John and van Rooyen, G.C.}, title = {DISTRIBUTED COLLABORATION: ENGINEERING PRACTICE REQUIREMENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2941}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29410}, pages = {8}, abstract = {Designing a structure follows a pattern of creating a structural design concept, executing a finite element analysis and developing a design model. A project was undertaken to create computer support for executing these tasks within a collaborative environment. This study focuses on developing a software architecture that integrates the various structural design aspects into a seamless functional collaboratory that satisfies engineering practice requirements. The collaboratory is to support both homogeneous collaboration i.e. between users operating on the same model and heterogeneous collaboration i.e. between users operating on different model types. Collaboration can take place synchronously or asynchronously, and the information exchange is done either at the granularity of objects or at the granularity of models. The objective is to determine from practicing engineers which configurations they regard as best and what features are essential for working in a collaborative environment. Based on the suggestions of these engineers a specification of a collaboration configuration that satisfies engineering practice requirements will be developed.}, subject = {Architektur }, language = {en} } @inproceedings{DoganArditiGunaydin, author = {Dogan, Sevgi Zeynep and Arditi, D. and Gunaydin, H. Murat}, title = {COMPARISON OF ANN AND CBR MODELS FOR EARLY COST PREDICTION OF STRUCTURAL SYSTEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2942}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29421}, abstract = {Reasonably accurate cost estimation of the structural system is quite desirable at the early stages of the design process of a construction project. However, the numerous interactions among the many cost-variables make the prediction difficult. Artificial neural networks (ANN) and case-based reasoning (CBR) are reported to overcome this difficulty. This paper presents a comparison of CBR and ANN augmented by genetic algorithms (GA) conducted by using spreadsheet simulations. GA was used to determine the optimum weights for the ANN and CBR models. The cost data of twenty-nine actual cases of residential building projects were used as an example application. Two different sets of cases were randomly selected from the data set for training and testing purposes. Prediction rates of 84\% in the GA/CBR study and 89\% in the GA/ANN study were obtained. The advantages and disadvantages of the two approaches are discussed in the light of the experiments and the findings. It appears that GA/ANN is a more suitable model for this example of cost estimation where the prediction of numerical values is required and only a limited number of cases exist. The integration of GA into CBR and ANN in a spreadsheet format is likely to improve the prediction rates.}, subject = {Architektur }, language = {en} } @inproceedings{DudekRichter, author = {Dudek, Mariusz and Richter, Matthias}, title = {UNTERSUCHUNGEN ZUR ZUVERL{\"A}SSIGKEIT DES STRAßENBAHNNETZES IN KRAKAU}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2943}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29432}, pages = {19}, abstract = {Der Begriff der Zuverl{\"a}ssigkeit spielt eine zentrale Rolle bei der Bewertung von Verkehrsnetzen. Aus der Sicht der Nutzer des {\"o}ffentlichen Personennahverkehrs ({\"O}PNV) ist eines der wichtigsten Kriterien zur Beurteilung der Qualit{\"a}t des Liniennetzes, ob es m{\"o}glich ist, mit einer großen Sicherheit das Reiseziel in einer vorgegebenen Zeit zu erreichen. Im Vortrag soll dieser Zuverl{\"a}ssigkeitsbegriff mathematisch gefasst werden. Dabei wird zun{\"a}chst auf den {\"u}blichen Begriff der Zuverl{\"a}ssigkeit eines Netzes im Sinne paarweiser Zusammenhangswahrscheinlichkeiten eingegangen. Dieser Begriff wird erweitert durch die Betrachtung der Zuverl{\"a}ssigkeit unter Einbeziehung einer maximal zul{\"a}ssigen Reisezeit. In vergangenen Arbeiten hat sich die Ring-Radius-Struktur als bew{\"a}hrtes Modell f{\"u}r die theoretische Beschreibung von Verkehrsnetzen erwiesen. Diese {\"U}berlegungen sollen nun durch Einbeziehung realer Verkehrsnetzstrukturen erweitert werden. Als konkretes Beispiel dient das Straßenbahnnetz von Krakau. Hier soll insbesondere untersucht werden, welche Auswirkungen ein geplanter Ausbau des Netzes auf die Zuverl{\"a}ssigkeit haben wird. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {de} } @inproceedings{DzwigonHempel, author = {Dzwigon, Wieslaw and Hempel, Lorenz}, title = {ZUR SYNCHRONISATION VON LINIEN IM {\"O}PNV}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2944}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29442}, pages = {12}, abstract = {Wir betrachten im {\"O}PNV ({\"O}ffentlichen Personennahverkehr) diejenige Situation, daß zwei Bus- oder Straßenbahnlinien gemeinsame Haltestellen haben. Ziel unserer Untersuchungen ist es, f{\"u}r beide Linien einen solchen Fahrplan zu finden, der f{\"u}r die Fahrg{\"a}ste m{\"o}glichst viel Bequemlichkeit bietet. Die Bedarfsstruktur - die Anzahl von Personen, die die beiden Linien benutzen - setzt dabei gewisse Beschr{\"a}nkungen f{\"u}r die Taktzeiten der beiden Linien. Die verbleibenden Entscheidungsfreiheiten sollen im Sinne der Zielstellung ausgenutzt werden. Im Vortrag wird folgenden Fragen nachgegangen: - nach welchen Kriterien kann man die "Bequemlichkeit" oder die "Synchonisationsg{\"u}te" messen? - wie kann man die einzelnen "Synchronisationsmaße" berechnen ? - wie kann man die verbleibenden Entscheidungsfreiheiten nutzen, um eine m{\"o}glichst gute Synchronisation zu erreichen ? Die Ergebnisse werden dann auf einige Beispiele angewandt und mit den bereitgestellten Methoden L{\"o}sungsvorschl{\"a}ge unterbreitet.}, subject = {Architektur }, language = {de} } @inproceedings{EbertLenzen, author = {Ebert, Carsten and Lenzen, Armin}, title = {OUTPUT-ONLY ANALYSIS FOR EXPERIMENTAL DAMAGE DETECTION OF A TIED-ARCH BRIDGE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2945}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29452}, pages = {13}, abstract = {In civil engineering it is very difficult and often expensive to excite constructions such as bridges and buildings with an impulse hammer or shaker. This problem can be avoided with the output-only method as special feature of stochastic system identification. The permanently existing ambient noise (e.g. wind, traffic, waves) is sufficient to excite the structures in their operational conditions. The output-only method is able to estimate the observable part of a state-space-model which contains the dynamic characteristics of the measured mechanical system. Because of the assumption that the ambient excitation is white there is no requirement to measure the input. Another advantage of the output-only method is the possibility to get high detailed models by a special method, called polyreference setup. To pretend the availability of a much larger set of sensors the data from varying sensor locations will be collected. Several successive data sets are recorded with sensors at different locations (moving sensors) and fixed locations (reference sensors). The covariance functions of the reference sensors are bases to normalize the moving sensors. The result of the following subspace-based system identification is a high detailed black-box-model that contains the weighting function including the well-known dynamic parameters eigenfrequencies and mode shapes of the mechanical system. Emphasis of this lecture is the presentation of an extensive damage detection experiment. A 53-year old prestressed concrete tied-arch-bridge in H{\"u}nxe (Germany) was deconstructed in 2005. Preliminary numerous vibration measurements were accomplished. The first experiment for system modification was an additional support near the bridge bearing of one main girder. During a further experiment one hanger from one tied arch was cut through as an induced damage. Some first outcomes of the described experiments will be presented.}, subject = {Architektur }, language = {en} } @inproceedings{EblingScheuermann, author = {Ebling, Julia and Scheuermann, G.}, title = {TEMPLATE MATCHING ON VECTOR FIELDS USING CLIFFORD ALGEBRA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2946}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29464}, pages = {25}, abstract = {Due to the amount of flow simulation and measurement data, automatic detection, classification and visualization of features is necessary for an inspection. Therefore, many automated feature detection methods have been developed in recent years. However, only one feature class is visualized afterwards in most cases, and many algorithms have problems in the presence of noise or superposition effects. In contrast, image processing and computer vision have robust methods for feature extraction and computation of derivatives of scalar fields. Furthermore, interpolation and other filter can be analyzed in detail. An application of these methods to vector fields would provide a solid theoretical basis for feature extraction. The authors suggest Clifford algebra as a mathematical framework for this task. Clifford algebra provides a unified notation for scalars and vectors as well as a multiplication of all basis elements. The Clifford product of two vectors provides the complete geometric information of the relative positions of these vectors. Integration of this product results in Clifford correlation and convolution which can be used for template matching of vector fields. For frequency analysis of vector fields and the behavior of vector-valued filters, a Clifford Fourier transform has been derived for 2D and 3D. Convolution and other theorems have been proved, and fast algorithms for the computation of the Clifford Fourier transform exist. Therefore the computation of Clifford convolution can be accelerated by computing it in Clifford Fourier domain. Clifford convolution and Fourier transform can be used for a thorough analysis and subsequent visualization of flow fields.}, subject = {Architektur }, language = {en} } @inproceedings{EckardtKoenke, author = {Eckardt, Stefan and K{\"o}nke, Carsten}, title = {ADAPTIVE SIMULATION OF THE DAMAGE BEHAVIOR OF CONCRETE USING HETEROGENEOUS MULTISCALE MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2947}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29478}, pages = {15}, abstract = {In this paper an adaptive heterogeneous multiscale model, which couples two substructures with different length scales into one numerical model is introduced for the simulation of damage in concrete. In the presented approach the initiation, propagation and coalescence of microcracks is simulated using a mesoscale model, which explicitly represents the heterogeneous material structure of concrete. The mesoscale model is restricted to the damaged parts of the structure, whereas the undamaged regions are simulated on the macroscale. As a result an adaptive enlargement of the mesoscale model during the simulation is necessary. In the first part of the paper the generation of the heterogeneous mesoscopic structure of concrete, the finite element discretization of the mesoscale model, the applied isotropic damage model and the cohesive zone model are briefly introduced. Furthermore the mesoscale simulation of a uniaxial tension test of a concrete prism is presented and own obtained numerical results are compared to experimental results. The second part is focused on the adaptive heterogeneous multiscale approach. Indicators for the model adaptation and for the coupling between the different numerical models will be introduced. The transfer from the macroscale to the mesoscale and the adaptive enlargement of the mesoscale substructure will be presented in detail. A nonlinear simulation of a realistic structure using an adaptive heterogeneous multiscale model is presented at the end of the paper to show the applicability of the proposed approach to large-scale structures.}, subject = {Architektur }, language = {en} } @inproceedings{Eickelkamp, author = {Eickelkamp, Jens Peter}, title = {LIQUIDIT{\"A}TSPLANUNG VON BAUPROJEKTEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2948}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29483}, pages = {12}, abstract = {Die Liquidit{\"a}tsplanung von Bauunternehmen XE "Liquidit{\"a}tsplanung" gilt als ein wesentliches Steuerungs-, Kontroll- sowie Informationsinstrument f{\"u}r interne und externe Adressaten und {\"u}bt eine Entscheidungsunterst{\"u}tzungsfunktion aus. Da die einzelnen Bauprojekte einen wesentlichen Anteil an den Gesamtkosten des Unternehmens ausmachen, besitzen diese auch einen erheblichen Einfluß auf die Liquidit{\"a}t und die Zahlungsf{\"a}higkeit der Bauunternehmung. Dem folgend ist es in der Baupraxis eine {\"u}bliche Verfahrensweise, die Liquidit{\"a}tsplanung zuerst projektbezogen zu erstellen und anschließend auf Unternehmensebene zu verdichten. Ziel der Ausf{\"u}hrungen ist es, die Zusammenh{\"a}nge von Arbeitskalkulation XE "Arbeitskalkulation" , Ergebnisrechnung XE "Ergebnisrechnung" und Finanzrechnung XE "Finanzrechnung" in Form eines deterministischen XE "Erkl{\"a}rungsmodells" Planungsmodells auf Projektebene darzustellen. Hierbei soll das Verst{\"a}ndnis und die Bedeutung der Verkn{\"u}pfungen zwischen dem technisch-orientierten Bauablauf und dessen Darstellung im Rechnungs- und Finanzwesen herausgestellt werden. Die Vorg{\"a}nge aus der Bauabwicklung, das heißt die Abarbeitung der Bauleistungsverzeichnispositionen und deren zeitliche Darstellung in einem Bauzeitenplan sind periodisiert in Gr{\"o}ßen der Betriebsbuchhaltung (Leistung, Kosten) zu transformieren und anschließend in der Finanzrechnung (Einzahlungen., Auszahlungen) nach Kreditoren und Debitoren aufzuschl{\"u}sseln.}, subject = {Architektur }, language = {de} } @inproceedings{EiermannErnstUllmann, author = {Eiermann, Michael and Ernst, O. and Ullmann, Elisabeth}, title = {SOLUTION STRATEGIES FOR STOCHASTIC FINITE ELEMENT DISCRETIZATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2949}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29493}, pages = {11}, abstract = {We consider efficient numerical methods for the solution of partial differential equations with stochastic coefficients or right hand side. The discretization is performed by the stochastic finite element method (SFEM). Separation of spatial and stochastic variables in the random input data is achieved via a Karhunen-Lo{\`e}ve expansion or Wiener's polynomial chaos expansion. We discuss solution strategies for the Galerkin system that take advantage of the special structure of the system matrix. For stochastic coefficients linear in a set of independent random variables we employ Krylov subspace recycling techniques after having decoupled the large SFEM stiffness matrix.}, subject = {Architektur }, language = {en} } @inproceedings{EngelkeSchuster, author = {Engelke, Gerald and Schuster, Otmar}, title = {OPENING THE RESERVE OF ECONOMIC EFFICIENCY IN LOGISTICAL AND FACILITY MANAGEMENT SERVICES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3017}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30177}, pages = {8}, abstract = {In many branches companies often lose the visibility of their human and technical resources of their field service. On the one hand the people in the fieldservice are often free like kings on the other hand they do not take part of the daily communication in the central office and suffer under the lacking involvement in the decisions inside the central office. The result is inefficiency. Reproaches in both directions follow. With the radio systems and then mobile phones the ditch began to dry up. But the solutions are far from being productive.}, subject = {Architektur }, language = {en} } @inproceedings{ErikssonKettunen, author = {Eriksson, Sirkka-Liisa and Kettunen, Jarkko}, title = {HYPERMONOGENIC POLYNOMIALS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2950}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29509}, pages = {22}, abstract = {It is well know that the power function is not monogenic. There are basically two ways to include the power function into the set of solutions: The hypermonogenic functions or holomorphic Cliffordian functions. L. Pernas has found out the dimension of the space of homogenous holomorphic Cliffordian polynomials of degree m, but his approach did not include a basis. It is known that the hypermonogenic functions are included in the space of holomorphic Cliffordian functions. As our main result we show that we can construct a basis for the right module of homogeneous holomorphic Cliffordian polynomials of degree m using hypermonogenic polynomials and their derivatives. To that end we first recall the function spaces of monogenic, hypermonogenic and holomorphic Cliffordian functions and give the results needed in the proof of our main theorem. We list some basic polynomials and their properties for the various function spaces. In particular, we consider recursive formulas, rules of differentiation and properties of linear independency for the polynomials.}, subject = {Architektur }, language = {en} } @inproceedings{ErlemannHartmann, author = {Erlemann, Kai and Hartmann, Dietrich}, title = {PARALLELIZATION OF A MICROSCOPIC TRAFFIC SIMULATION SYSTEM USING MPIJAVA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2951}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29516}, pages = {8}, abstract = {Traffic simulation is a valuable tool for the design and evaluation of road networks. Over the years, the level of detail to which urban and freeway traffic can be simulated has increased steadily, shifting from a merely qualitative macroscopic perspective to a very detailed microscopic view, where the behavior of individual vehicles is emulated realistically. With the improvement of behavioral models, however, the computational complexity has also steadily increased, as more and more aspects of real-life traffic have to be considered by the simulation environment. Despite the constant increase in computing power of modern personal computers, microscopic simulation stays computationally expensive, limiting the maximum network size than can be simulated on a single-processor computer in reasonable time. Parallelization can distribute the computing load from a single computer system to a cluster of several computing nodes. To this end, the exisiting simulation framework had to be adapted to allow for a distributed approach. As the simulation is ultimately targeted to be executed in real-time, incorporating real traffic data, only a spatial partition of the simulation was considered, meaning the road network has to be partitioned into subnets of comparable complexity, to ensure a homogenous load balancing. The partition process must also ensure, that the division between subnets does only occur in regions, where no strong interaction between the separated road segments occurs (i.e. not in the direct vicinity of junctions). In this paper, we describe a new microscopic reasoning voting strategy, and discuss in how far the increasing computational costs of these more complex behaviors lend themselves to a parallelized approach. We show the parallel architecture employed, the communication between computing units using MPIJava, and the benefits and pitfalls of adapting a single computer application to be used on a multi-node computing cluster.}, subject = {Architektur }, language = {en} } @inproceedings{Eschenbruch2006, author = {Eschenbruch, Klaus}, title = {{\"A}nderungsmanagementsysteme bei komplexen Projekten - Paradigmenwechsel beim Claimmanagement}, doi = {10.25643/bauhaus-universitaet.847}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8470}, year = {2006}, subject = {Weimar / Bauhaus-Universit{\"a}t / Professur Baubetrieb und Bauverfahren}, language = {de} } @inproceedings{EygelaarvanRooyen, author = {Eygelaar, Anton and van Rooyen, G.C.}, title = {ENGINEERING PROCESS MODEL SPECIFICATION AND RESOURCE LEVELING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2952}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29529}, pages = {18}, abstract = {The use of process models in the analysis, optimization and simulation of processes has proven to be extremely beneficial in the instances where they could be applied appropriately. However, the Architecture/Engineering/Construction (AEC) industries present unique challenges that complicate the modeling of their processes. A simple Engineering process model, based on the specification of Tasks, Datasets, Persons and Tools, and certain relations between them, have been developed, and its advantages over conventional techniques have been illustrated. Graph theory is used as the mathematical foundation mapping Tasks, Datasets, Persons and Tools to vertices and the relations between them to edges forming a directed graph. The acceptance of process modeling in AEC industries not only depends on the results it can provide, but the ease at which these results can be attained. Specifying a complex AEC process model is a dynamic exercise that is characterized by many modifications over the process model's lifespan. This article looks at reducing specification complexity, reducing the probability for erroneous input and allowing consistent model modification. Furthermore, the problem of resource leveling is discussed. Engineering projects are often executed with limited resources and determining the impact of such restrictions on the sequence of Tasks is important. Resource Leveling concerns itself with these restrictions caused by limited resources. This article looks at using Task shifting strategies to find a near-optimal sequence of Tasks that guarantees consistent Dataset evolution while resolving resource restrictions.}, subject = {Architektur }, language = {en} } @inproceedings{FalcaoCruzMalonek, author = {Falc{\~a}o, M. Irene and Cruz, J. F. and Malonek, Helmuth Robert}, title = {REMARKS ON THE GENERATION OF MONOGENIC FUNCTIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2939}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29390}, pages = {18}, abstract = { In this paper we consider three different methods for generating monogenic functions. The first one is related to Fueter's well known approach to the generation of monogenic quaternion-valued functions by means of holomorphic functions, the second one is based on the solution of hypercomplex differential equations and finally the third one is a direct series approach, based on the use of special homogeneous polynomials. We illustrate the theory by generating three different exponential functions and discuss some of their properties. Formula que se usa em preprints e artigos da nossa UI\&D (acho demasiado completo): Partially supported by the R\\&D unit \emph{Matem\'atica a Aplica\c\~es} (UIMA) of the University of Aveiro, through the Portuguese Foundation for Science and Technology (FCT), co-financed by the European Community fund FEDER.}, subject = {Architektur }, language = {en} } @inproceedings{Faustino, author = {Faustino, Nelson}, title = {FISCHER DECOMPOSITION FOR DIFFERENCE DIRAC OPERATORS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2955}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29551}, pages = {10}, abstract = {We establish the basis of a discrete function theory starting with a Fischer decomposition for difference Dirac operators. Discrete versions of homogeneous polynomials, Euler and Gamma operators are obtained. As a consequence we obtain a Fischer decomposition for the discrete Laplacian. For the sake of simplicity we consider in the first part only Dirac operators which contain only forward or backward finite differences. Of course, these Dirac operators do not factorize the classic discrete Laplacian. Therefore, we will consider a different definition of a difference Dirac operator in the quaternionic case which do factorizes the discrete Laplacian.}, subject = {Architektur }, language = {en} } @misc{Fuss2006, type = {Master Thesis}, author = {Fuß, Michael}, title = {Erstellung eines geeigneten Web-Content Management-Systems zugeschnitten auf die Anforderungen der Internetpr{\"a}sentation von Lehr- und Forschungseinrichtungen.}, doi = {10.25643/bauhaus-universitaet.743}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-7436}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {Die heute erh{\"a}ltlichen Web-Content Management-Systeme (WCMS) verf{\"u}gen {\"u}ber ein umfangreiches und breit gef{\"a}chertes Angebot an Funktionen, die weit {\"u}ber die, zur Redaktion und zum Management von Internetpr{\"a}sentationen, not-wendigen Grundanforderungen hinausgehen. Das macht diese Systeme in ih-ren Einsatz sehr flexibel und deckt vielf{\"a}ltige Anforderungen der Endanwender ab. Andererseits steigt durch die dadurch bedingte Komplexit{\"a}t der Arbeitsauf-wand erheblich und die Bedien- und Benutzerfreundlichkeit sinkt. Gerade f{\"u}r kleinere Internetpr{\"a}sentationen, die ohne aufwendige Interaktionsm{\"o}glichkeiten aber auf h{\"a}ufig wechselndem Informationsangeboten aufwarten, w{\"a}re dies in seiner Grundfunktionalit{\"a}t reduziertes System vorteilhaft. Ein solches reduziertes Web-Content Management-System soll w{\"a}hrend der Diplomarbeit entworfen und beispielhaft implementiert werden. Als Ausgangs- und Orientierungspunkt soll hierzu die Internetpr{\"a}sentation der Professur Informations- und Wissensverarbeitung dienen. Zur softwaretechnischen Umsetzung sind PHP und MySQL in Verbindung mit regul{\"a}ren HTML und CSS zu be-nutzen. F{\"u}r das weitere Vorgehen m{\"u}ssen zun{\"a}chst die Struktur und der Aufbau der Internetpr{\"a}sentation der Professur analysiert, strukturiert und formalisiert werden. Anschließend sind die am h{\"a}ufigsten professionell genutzten Webcontent-Managementsysteme (TYPO3 und weitere siehe www.opensourcecms.com) hinsichtlich der durch sie angebotenen Grundfunktionalit{\"a}ten und der verwen-deten Templates und Vorlagen zu untersuchen. Die aus dieser Analyse resultierenden Ergebnisse sind Ausgangspunkt f{\"u}r die Anforderungsdefinition des zu erstellenden Mini-WCMS. Anschließend ist eine prototypische Implementierung des theoretisch entstan-denen Systems, zugeschnitten auf die speziellen Bed{\"u}rfnisse der Professur, vorzunehmen und hinsichtlich seiner Eignung zu diskutieren.}, subject = {Content Management}, language = {de} } @inproceedings{GalffyBaitschWellmannJelicetal., author = {Galffy, Mozes and Baitsch, Matthias and Wellmann Jelic, Andres and Hartmann, Dietrich}, title = {LIFETIME-ORIENTED OPTIMIZATION OF BRIDGE TIE RODS EXPOSED TO VORTEX-INDUCED ACROSS-WIND VIBRATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2956}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29561}, pages = {12}, abstract = {In recent years, damages in welded connections plates of vertical tie rods of several arched steel bridges have been reported. These damages are due to fatigue caused by wind-induced vibrations. In the present study, such phenomena are examined, and the corresponding lifetime of a reference bridge in M{\"u}nster-Hiltrup, Germany, is estimated, based on the actual shape of the connection plate. Also, the results obtained are compared to the expected lifetime of a connection plate, whose geometry has been optimized separately. The structural optimization, focussing on the shape of the cut at the hanger ends, has been carried out using evolution strategies. The oscillation amplitudes have been computed by means of the Newmark-Wilson time-step method, using an appropriate load model, which has been validated by on-site experiments on the selected reference bridge. Corresponding stress-amplitudes are evaluated by multiplying the oscillation amplitudes with a stress concentration factor. This factor has been computed on the basis of a finite element model of the system "hanger-welding-connection plate", applying solid elements, according to the notch stress approach. The damage estimation takes into account the stochastics of the exciting wind process, as well as the stochastics of the material parameters (fatigue strength) given in terms of Woehler-curves. The shape optimization results in a substantial increase of the estimated hanger lifetime. The comparison of the lifetimes of the bulk plate and of the welding revealed that, in the optimized structure, the welding, being the most sensitive part in the original structure, shows much more resistance against potential damages than the bulk material.}, subject = {Architektur }, language = {en} } @phdthesis{Gehrmann2006, author = {Gehrmann, Hans-Joachim}, title = {Mathematische Modellierung und experimentelle Untersuchungen zur Pyrolyse von Abf{\"a}llen in Drehrohrsystemen}, doi = {10.25643/bauhaus-universitaet.772}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20060806-8119}, school = {Bauhaus-Universit{\"a}t Weimar}, year = {2006}, abstract = {F{\"u}r die Optimierung eines bereits bestehenden Prozesses, z.B. im Hinblick auf den maximal m{\"o}glichen Durchsatz bei gleich bleibender Qualit{\"a}t der Pyrolyseprodukte oder f{\"u}r die Einstellung der Betriebsparameter bei einem unbekannten Einsatzstoff, kann ein mathematisches Modell eine erste Absch{\"a}tzung f{\"u}r die Einstellung betrieblicher Parameter, wie z.B. Temperaturprofile im Gas und Feststoff, geben. Dar{\"u}ber hinaus kann man mit einem Modell f{\"u}r neu zu konzipierende Anlagen konstruktive Parameter ermitteln oder {\"u}berpr{\"u}fen. In dem hier dargestellten vereinfachten Modellansatz werden u. a. die Umsatzvorg{\"a}nge f{\"u}r ein Partikelkollektiv mit Hilfe von Summenparametern aus Untersuchungen an einer Thermowaage und erg{\"a}nzend im Drehrohr ermittelt. Das Prozessmodell basiert auf einem Reaktormodell, das das Verweilzeitverhalten des Einsatzstoffes im Reaktor beschreibt und einem Basismodell, bestehend aus Massen- und Energiebilanzen f{\"u}r Solid und Gas sowie Ans{\"a}tzen zur Trocknung und zum Umsatz. Im Hinblick auf die Verf{\"u}gbarkeit von stoffspezifischen Daten von Abf{\"a}llen sind insbesondere zur Berechnung des Verweilzeitverhaltens und des Umsatzes im Heißbetrieb vereinfachende Ans{\"a}tze durch die Bildung von Summenparametern hilfreich. Das Prozessmodell wurde schrittweise validiert: Zun{\"a}chst wurde in Kaltversuchen ein Summenparameter, der u.a. die unbekannten Reibungsverh{\"a}ltnisse im Drehrohr ber{\"u}cksichtigt, durch Vergleich von Experiment und Rechnung f{\"u}r Sand ermittelt. F{\"u}r heterogene Abfallgemische kann dieser Materialfaktor zwar f{\"u}r Kaltversuche bestimmt werden (soweit dies f{\"u}r Abf{\"a}lle m{\"o}glich ist), im Heißbetrieb {\"a}ndern sich jedoch alle wesentlichen Stoffparameter wie Partikeldurchmesser, Sch{\"u}ttdichte und Sch{\"u}ttwinkel sowie die Reibungsverh{\"a}ltnisse. F{\"u}r diesen Fall wird der Materialfaktor zu Eins gesetzt und die wesentlichen Stoffgr{\"o}ßen umsatzabh{\"a}ngig modelliert. Dazu ist die Kenntnis der Sch{\"u}ttdichten, statischen Sch{\"u}ttwinkel und mittleren Partikeldurchmesser vom Abfall und Koks aus dem Abfall notwendig. Die mit diesen Stoffdaten berechnete Verweilzeit wurde in einem Heißversuch bei der Pyrolyse von Brennstoff aus M{\"u}ll- (BRAM) Pellets mit einem Fehler von ca. 20 \% erreicht. Das Basismodell wurde zun{\"a}chst ohne Umsatz an Messergebnisse mit Sand im Drehrohr unter Variation von Temperaturen und Massenstrom angepasst bevor mit diesem Modell die Pyrolyse von einem homogenen Einsatzstoff (Polyethylen mit Sand) im Drehrohr berechnet wurde. Hier konnte bereits gezeigt werden, dass mit diesem vereinfachten Modellansatz gute Ergebnisse beim Vergleich von Modell und Experiment erzielt werden k{\"o}nnen. Im n{\"a}chsten Schritt wurde der Sand angefeuchtet, um die Teilmodelle der Trocknung unterhalb und bei Siedetemperatur zu validieren. Die Mess- und Modellierungsergebnisse stimmen gut miteinander {\"u}berein. F{\"u}r ein Abfallgemisch aus BRAM-Pellets konnte der Verlauf der Solidtemperaturen unter der Ber{\"u}cksichtigung variabler Stoffwerte des Solids und eines Verschmutzungsfaktors, der den Belag des Drehrohres mit anklebendem Pellets bis zur Verkokung ber{\"u}cksichtigt, gut wiedergegeben werden. Die Gastemperaturen k{\"o}nnen in erster N{\"a}herung ausreichend genau durch das mathematische Modell beschrieben werden. Mit diesem vereinfachten mathematischen Modellansatz steht nun ein Hilfsmittel zur Auslegung und Optimierung von indirekt beheizten Drehrohren zur Verf{\"u}gung, um bei einem neuen Einsatzstoff mit Daten aus experimentellen Basisuntersuchungen, die Temperaturverl{\"a}ufe im Feststoff und Gas sowie die Gaszusammensetzung in Abh{\"a}ngigkeit der wesentlichen Einflussgr{\"o}ßen abzusch{\"a}tzen.}, subject = {Pyrolyse}, language = {de} }