@inproceedings{OPUS4-2457, title = {International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom and Werner, Frank}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2457}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150916-24571}, pages = {434}, abstract = {The 19th International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering will be held at the Bauhaus University Weimar from 4th till 6th July 2012. Architects, computer scientists, mathematicians, and engineers from all over the world will meet in Weimar for an interdisciplinary exchange of experiences, to report on their results in research, development and practice and to discuss. The conference covers a broad range of research areas: numerical analysis, function theoretic methods, partial differential equations, continuum mechanics, engineering applications, coupled problems, computer sciences, and related topics. Several plenary lectures in aforementioned areas will take place during the conference. We invite architects, engineers, designers, computer scientists, mathematicians, planners, project managers, and software developers from business, science and research to participate in the conference!}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{OPUS4-2451, title = {International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2451}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150828-24515}, pages = {230}, abstract = {The 20th International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering will be held at the Bauhaus University Weimar from 20th till 22nd July 2015. Architects, computer scientists, mathematicians, and engineers from all over the world will meet in Weimar for an interdisciplinary exchange of experiences, to report on their results in research, development and practice and to discuss. The conference covers a broad range of research areas: numerical analysis, function theoretic methods, partial differential equations, continuum mechanics, engineering applications, coupled problems, computer sciences, and related topics. Several plenary lectures in aforementioned areas will take place during the conference. We invite architects, engineers, designers, computer scientists, mathematicians, planners, project managers, and software developers from business, science and research to participate in the conference!}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AbbasMorgenthal, author = {Abbas, Tajammal and Morgenthal, Guido}, title = {Model combinations for assessing the flutter stability of suspension bridges}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2757}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170306-27574}, pages = {11}, abstract = {Long-span cable supported bridges are prone to aerodynamic instabilities caused by wind and this phenomenon is usually a major design criterion. If the wind speed exceeds the critical flutter speed of the bridge, this constitutes an Ultimate Limit State. The prediction of the flutter boundary therefore requires accurate and robust models. This paper aims at studying various combinations of models to predict the flutter phenomenon. Since flutter is a coupling of aerodynamic forcing with a structural dynamics problem, different types and classes of models can be combined to study the interaction. Here, both numerical approaches and analytical models are utilised and coupled in different ways to assess the prediction quality of the hybrid model. Models for aerodynamic forces employed are the analytical Theodorsen expressions for the motion-enduced aerodynamic forces of a flat plate and Scanlan derivatives as a Meta model. Further, Computational Fluid Dynamics (CFD) simulations using the Vortex Particle Method (VPM) were used to cover numerical models. The structural representations were dimensionally reduced to two degree of freedom section models calibrated from global models as well as a fully three-dimensional Finite Element (FE) model. A two degree of freedom system was analysed analytically as well as numerically. Generally, all models were able to predict the flutter phenomenon and relatively close agreement was found for the particular bridge. In conclusion, the model choice for a given practical analysis scenario will be discussed in the context of the analysis findings.}, subject = {Angewandte Mathematik}, language = {en} } @inproceedings{AbuAbedMilbradt, author = {Abu Abed, Wassim and Milbradt, Peter}, title = {UNDERSTANDING THE ASPECT OF FUZZINESS IN INTERPOLATION METHODS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2872}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28726}, pages = {22}, abstract = {Fuzzy functions are suitable to deal with uncertainties and fuzziness in a closed form maintaining the informational content. This paper tries to understand, elaborate, and explain the problem of interpolating crisp and fuzzy data using continuous fuzzy valued functions. Two main issues are addressed here. The first covers how the fuzziness, induced by the reduction and deficit of information i.e. the discontinuity of the interpolated points, can be evaluated considering the used interpolation method and the density of the data. The second issue deals with the need to differentiate between impreciseness and hence fuzziness only in the interpolated quantity, impreciseness only in the location of the interpolated points and impreciseness in both the quantity and the location. In this paper, a brief background of the concept of fuzzy numbers and of fuzzy functions is presented. The numerical side of computing with fuzzy numbers is concisely demonstrated. The problem of fuzzy polynomial interpolation, the interpolation on meshes and mesh free fuzzy interpolation is investigated. The integration of the previously noted uncertainty into a coherent fuzzy valued function is discussed. Several sets of artificial and original measured data are used to examine the mentioned fuzzy interpolations.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AhmadZabelKoenke, author = {Ahmad, Sofyan and Zabel, Volkmar and K{\"o}nke, Carsten}, title = {WAVELET-BASED INDICATORS FOR RESPONSE SURFACE MODELS IN DAMAGE IDENTIFICATION OF STRUCTURES}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2758}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170306-27588}, pages = {14}, abstract = {In this paper, wavelet energy damage indicator is used in response surface methodology to identify the damage in simulated filler beam railway bridge. The approximate model is addressed to include the operational and surrounding condition in the assessment. The procedure is split into two stages, the training and detecting phase. During training phase, a so-called response surface is built from training data using polynomial regression and radial basis function approximation approaches. The response surface is used to detect the damage in structure during detection phase. The results show that the response surface model is able to detect moderate damage in one of bridge supports while the temperatures and train velocities are varied.}, subject = {Angewandte Mathematik}, language = {en} } @inproceedings{AhmedElSayedRashwanKamal, author = {Ahmed El-Sayed, Ahmed and Rashwan, R. A. and Kamal, A.}, title = {HADAMARD GAPS IN WEIGHTED LOGARITHMIC BLOCH SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2827}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28275}, pages = {20}, abstract = {We give a sufficient and a necessary condition for an analytic function "f" on the unit disk "D" with Hadamard gap to belong to a class of weighted logarithmic Bloch space as well as to the corresponding little weighted logarithmic Bloch space under some conditions posed on the defined weight function. Also, we study the relations between the class of weighted logarithmic Bloch functions and some other classes of analytic functions by the help of analytic functions in the Hadamard gap class.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AibaMaegaitoSuzuki, author = {Aiba, Yoshihisa and Maegaito, Kentaro and Suzuki, Osamu}, title = {Iteration dynamical systems of discrete Laplacians on the plane lattice(I) (Basic properties and computer simulations of the dynamical systems)}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2917}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29178}, pages = {3}, abstract = {In this study we introduce a concept of discrete Laplacian on the plane lattice and consider its iteration dynamical system. At first we discuss some basic properties on the dynamical system to be proved. Next making their computer simulations, we show that we can realize the following phenomena quite well:(1) The crystal of waters (2) The designs of carpets, embroideries (3) The time change of the numbers of families of extinct animals, and (4) The echo systems of life things. Hence we may expect that we can understand the evolutions and self organizations by use of the dynamical systems. Here we want to make a stress on the following fact: Although several well known chaotic dynamical systems can describe chaotic phenomena, they have difficulties in the descriptions of the evolutions and self organizations.}, subject = {Architektur }, language = {en} } @inproceedings{AlYasiriGuerlebeck, author = {Al-Yasiri, Zainab and G{\"u}rlebeck, Klaus}, title = {ON BOUNDARY VALUE PROBLEMS FOR P-LAPLACE AND P-DIRAC EQUATIONS}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2792}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-27928}, pages = {8}, abstract = {The p-Laplace equation is a nonlinear generalization of the Laplace equation. This generalization is often used as a model problem for special types of nonlinearities. The p-Laplace equation can be seen as a bridge between very general nonlinear equations and the linear Laplace equation. The aim of this paper is to solve the p-Laplace equation for 2 < p < 3 and to find strong solutions. The idea is to apply a hypercomplex integral operator and spatial function theoretic methods to transform the p-Laplace equation into the p-Dirac equation. This equation will be solved iteratively by using a fixed point theorem.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AlaladeKafleWuttkeetal., author = {Alalade, Muyiwa and Kafle, Binod and Wuttke, Frank and Lahmer, Tom}, title = {CALIBRATION OF CYCLIC CONSTITUTIVE MODELS FOR SOILS BY OSCILLATING FUNCTIONS}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2793}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-27932}, pages = {6}, abstract = {In order to minimize the probability of foundation failure resulting from cyclic action on structures, researchers have developed various constitutive models to simulate the foundation response and soil interaction as a result of these complex cyclic loads. The efficiency and effectiveness of these model is majorly influenced by the cyclic constitutive parameters. Although a lot of research is being carried out on these relatively new models, little or no details exist in literature about the model based identification of the cyclic constitutive parameters. This could be attributed to the difficulties and complexities of the inverse modeling of such complex phenomena. A variety of optimization strategies are available for the solution of the sum of least-squares problems as usually done in the field of model calibration. However for the back analysis (calibration) of the soil response to oscillatory load functions, this paper gives insight into the model calibration challenges and also puts forward a method for the inverse modeling of cyclic loaded foundation response such that high quality solutions are obtained with minimum computational effort. Therefore model responses are produced which adequately describes what would otherwise be experienced in the laboratory or field.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AlmamouGebhardtBocketal., author = {Almamou, Abd Albasset and Gebhardt, Thomas and Bock, Sebastian and Hildebrand, J{\"o}rg and Schwarz, Willfried}, title = {QUALITY CONTROL OF CONSTRUCTED MODELS USING 3D POINT CLOUD}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2794}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-27944}, pages = {9}, abstract = {Over the last decade, the technology of constructing buildings has been dramatically developed especially with the huge growth of CAD tools that help in modeling buildings, bridges, roads and other construction objects. Often quality control and size accuracy in the factory or on construction site are based on manual measurements of discrete points. These measured points of the realized object or a part of it will be compared with the points of the corresponding CAD model to see whether and where the construction element fits into the respective CAD model. This process is very complicated and difficult even when using modern measuring technology. This is due to the complicated shape of the components, the large amount of manually detected measured data and the high cost of manual processing of measured values. However, by using a modern 3D scanner one gets information of the whole constructed object and one can make a complete comparison against the CAD model. It gives an idea about quality of objects on the whole. In this paper, we present a case study of controlling the quality of measurement during the constructing phase of a steel bridge by using 3D point cloud technology. Preliminary results show that an early detection of mismatching between real element and CAD model could save a lot of time, efforts and obviously expenses.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{AmezianeLasserre2003, author = {Ameziane, Farid and Lasserre, Stephane}, title = {SYSBAT - An Application to the Building ProductionBased on Computer Supported Cooperative Work}, doi = {10.25643/bauhaus-universitaet.274}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2748}, year = {2003}, abstract = {Our proposed solution is to enable partners of a construction project to share all the technical data produced and handled during the building production process by building a system through the use of internet technology. The system links distributed databases and allows building partners to access remotely and manipulate specific information. It provides an updated building representation that is being enriched and refined all along the building production process. A recent collaboration with Nemetschek France (subsidiary company of Nemetschek AG, AEC CAD software leader) focus on a building product repository available in a web context. The aim is to help building project actors to choose a technical solution that fits its professional needs, and maintain our information system with up to date information. It starts with the possibility to build on line building product catalogs, in order to link Allplan CAD entities with building technical features. This paper presents the conceptual approaches on which our information system is built. Starting from a general organization diagram organization, we focus on the product and the description branches of construction works (including last IFC model specifications). Our aim is to add decisional support to the construction works selection process. To do so, we consider the actor's role upon the system and the pieces of information each one needs to achieve a given task.}, subject = {Bauplanung}, language = {en} } @inproceedings{BaitschHartmann, author = {Baitsch, Matthias and Hartmann, Dietrich}, title = {A FRAMEWORK FOR THE INTERACTIVE VISUALIZATION OF ENGINEERING MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2919}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29194}, pages = {9}, abstract = {Interactive visualization based on 3D computer graphics nowadays is an indispensable part of any simulation software used in engineering. Nevertheless, the implementation of such visualization software components is often avoided in research projects because it is a challenging and potentially time consuming task. In this contribution, a novel Java framework for the interactive visualization of engineering models is introduced. It supports the task of implementing engineering visualization software by providing adequate program logic as well as high level classes for the visual representation of entities typical for engineering models. The presented framework is built on top of the open source visualization toolkit VTK. In VTK, a visualization model is established by connecting several filter objects in a so called visualization pipeline. Although designing and implementing a good pipeline layout is demanding, VTK does not support the reuse of pipeline layouts directly. Our framework tailors VTK to engineering applications on two levels. On the first level it adds new - engineering model specific - filter classes to VTK. On the second level, ready made pipeline layouts for certain aspects of engineering models are provided. For instance there is a pipeline class for one-dimensional elements like trusses and beams that is capable of showing the elements along with deformations and member forces. In order to facilitate the implementation of a graphical user interface (GUI) for each pipeline class, there exists a reusable Java Swing GUI component that allows the user to configure the appearance of the visualization model. Because of the flexible structure, the framework can be easily adapted and extended to new problem domains. Currently it is used in (i) an object-oriented p-version finite element code for design optimization, (ii) an agent based monitoring system for dam structures and (iii) the simulation of destruction processes by controlled explosives based on multibody dynamics. Application examples from all three domains illustrates that the approach presented is powerful as well as versatile.}, subject = {Architektur }, language = {en} } @inproceedings{BargstaedtTarigan, author = {Bargst{\"a}dt, Hans-Joachim and Tarigan, Rina Sari}, title = {RULE BASED EXPANSION OF STANDARD CONSTRUCTION PROCESSES}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2822}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28229}, pages = {6}, abstract = {The paper introduces a systematic construction management approach, supporting expansion of a specified construction process, both automatically and semi-automatically. Throughout the whole design process, many requirements must be taken into account in order to fulfil demands defined by clients. In implementing those demands into a design concept up to the execution plan, constraints such as site conditions, building code, and legal framework are to be considered. However, complete information, which is needed to make a sound decision, is not yet acquired in the early phase. Decisions are traditionally taken based on experience and assumptions. Due to a vast number of appropriate available solutions, particularly in building projects, it is necessary to make those decisions traceable. This is important in order to be able to reconstruct considerations and assumptions taken, should there be any changes in the future project's objectives. The research will be carried out by means of building information modelling, where rules deriving from standard logics of construction management knowledge will be applied. The knowledge comprises a comprehensive interaction amongst bidding process, cost-estimation, construction site preparation as well as specific project logistics - which are usually still separately considered. By means of these rules, favourable decision taking regarding prefabrication and in-situ implementation can be justified. Modifications depending on the available information within current design stage will consistently be traceable.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BartelsZimmermann, author = {Bartels, Jan-Hendrik and Zimmermann, J{\"u}rgen}, title = {MINIMIZING THE TOTAL DISCOUNTED COST OF DISMANTLING A NUCLEAR POWER PLANT}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2920}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29200}, pages = {9}, abstract = {Due to economical, technical or political reasons all over the world about 100 nuclear power plants have been disconnected until today. All these power stations are still waiting for their complete dismantling which, considering one reactor, causes cost of up to one Bil. Euros and lasts up to 15 years. In our contribution we present a resource-constrained project scheduling approach minimizing the total discounted cost of dismantling a nuclear power plant. A project of dismantling a nuclear power plant can be subdivided into a number of disassembling activities. The execution of these activities requires time and scarce resources like manpower, special equipment or storage facilities for the contaminated material arising from the dismantling. Moreover, we have to regard several minimum and maximum time lags (temporal constraints) between the start times of the different activities. Finally, each disassembling activity can be processed in two alternative execution modes, which lead to different disbursements and determine the resource requirements of the considered activity. The optimization problem is to determine a start time and an execution mode for each activity, such that the discounted cost of the project is minimum, and neither the temporal constraints are violated nor the activities' resource requirements exceed the availability of any scarce resource at any point in time. In our contribution we introduce an appropriate multi-mode project scheduling model with minimum and maximum time lags as well as renewable and cumulative resources for the described optimization problem. Furthermore, we show that the considered optimization problem is NP-hard in the strong sense. For small problem instances, optimal solutions can be gained from a relaxation based enumeration approach which is incorporated into a branch and bound algorithm. In order to be able to solve large problem instances, we also propose a truncated version of the devised branch and bound algorithm.}, subject = {Architektur }, language = {en} } @inproceedings{BauerDudekRichter, author = {Bauer, Marek and Dudek, Mariusz and Richter, Matthias}, title = {RELIABILITY OF TRAM - NETWORK SECTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2828}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28281}, pages = {16}, abstract = {We investigate aspects of tram-network section reliability, which operates as a part of the model of whole city tram-network reliability. Here, one of the main points of interest is the character of the chronological development of the disturbances (namely the differences between time of departure provided in schedule and real time of departure) on subsequent sections during tram line operation. These developments were observed in comprehensive measurements done in Krakow, during one of the main transportation nodes (Rondo Mogilskie) rebuilding. All taken building activities cause big disturbances in tram lines operation with effects extended to neighboring sections. In a second part, the stochastic character of section running time will be analyzed more detailed. There will be taken into consideration sections with only one beginning stop and also with two or three beginning stops located at different streets at an intersection. Possibility of adding results from sections with two beginning stops to one set will be checked with suitable statistical tests which are used to compare the means of the two samples. Section running time may depend on the value of gap between two following trams and from the value of deviation from schedule. This dependence will be described by a multi regression formula. The main measurements were done in the city center of Krakow in two stages: before and after big changes in tramway infrastructure.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BauerKandlerWeiss, author = {Bauer, Marek and Kandler, A. and Weiß, Hendrik}, title = {MODEL OF TRAM LINE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2921}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29217}, pages = {11}, abstract = {From passenger's perspective punctuality is one of the most important features of trams operations. Unfortunately in most cases this feature is only insufficiently fulfilled. In this paper we present a simulation model for trams operation with special focus on punctuality. The aim is to get a helpful tool for designing time-tables and for analyzing the effects by changing priorities for trams in traffic lights respectively the kind of track separation. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops. In this paper the running time is modeled by the sum of its mean value and a zero-mean random variable. With the help of multiple regression we find out that the average running time is a function depending on the length of the sections and the number of intersections. The random component is modeled by a sum of two independent zero-mean random variables. One of these variables describes the disturbance caused by the process of waiting at an intersection and the other the disturbance caused by the process of driving. The time spent at a stop is assumed to be a random variable, too. Its distribution is estimated from given measurements of these stop times for different tram lines in Krak{\´o}w. Finally a special case of the introduced model is considered and numerical results are presented. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauerRichter, author = {Bauer, Marek and Richter, Matthias}, title = {STATISTICAL ANALYSIS OF TIME LOST BY TRAMS BEFORE DEPARTURE FROM STOPS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2922}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29226}, pages = {18}, abstract = {The ride of the tram along the line, defined by a time-table, consists of the travel time between the subsequent sections and the time spent by tram on the stops. In the paper, statistical data collected in the city of Krakow is presented and evaluated. In polish conditions, for trams the time spent on stops makes up the remarkable amount of 30 \% of the total time of tram line operation. Moreover, this time is characterized by large variability. The time spent by tram on a stop consists of alighting and boarding time and time lost by tram on stop after alighting and boarding time ending, but before departure. Alighting and boarding time itself usually depends on the random number of alighting and boarding passengers and also on the number of passengers which are inside the vehicle. However, the time spent by tram on stop after alighting and boarding time ending is an effect of certain random events, mainly because of impossibility of departure from stop, caused by lack of priorities for public transport vehicles. The main focus of the talk lies on the description and the modelling of these effects. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{BauerRichterWeiss, author = {Bauer, Marek and Richter, Matthias and Weiß, Hendrik}, title = {SIMULATION MODEL OF TRAM ROUTE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2829}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28295}, pages = {19}, abstract = {From passenger's perspective, punctuality is one of the most important features of tram route operation. We present a stochastic simulation model with special focus on determining important factors of influence. The statistical analysis bases on large samples (sample size is nearly 2000) accumulated from comprehensive measurements on eight tram routes in Cracow. For the simulation, we are not only interested in average values but also in stochastic characteristics like the variance and other properties of the distribution. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops divided in passengers alighting and boarding times and times waiting for possibility of departure . The running time depends on the kind of track separation including the priorities in traffic lights, the length of the section and the number of intersections. For every type of section, a linear mixed regression model describes the average running time and its variance as functions of the length of the section and the number of intersections. The regression coefficients are estimated by the iterative re-weighted least square method. Alighting and boarding time mainly depends on type of vehicle, number of passengers alighting and boarding and occupancy of vehicle. For the distribution of the time waiting for possibility of departure suitable distributions like Gamma distribution and Lognormal distribution are fitted.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BauriedelDonathKoenig, author = {Bauriedel, Christian and Donath, Dirk and K{\"o}nig, Reinhard}, title = {COMPUTER-SUPPORTED SIMULATIONS FOR URBAN PLANNING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2923}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29235}, pages = {10}, abstract = {The idea about a simulation program to support urban planning is explained: Four different, clearly defined developing paths can be calculated for the rebuilding of a shrinking town. Aided by self-organization principles, a complex system can be created. The dynamics based on the action patterns of single actors, whose behaviour is cyclically depends on the generated structure. Global influences, which control the development, can be divided at a spatial, socioeconomic, and organizational-juridical level. The simulation model should offer conclusions on new planning strategies, especially in the context of the creation process of rebuilding measures. An example of a transportation system is shown by means of prototypes for the visualisation of the dynamic development process.}, subject = {Architektur }, language = {en} } @inproceedings{BeranDlask, author = {Beran, V{\´a}clav and Dlask, Petr}, title = {CONSTRUCTION SPEED AND CASH FLOW OPTIMISATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2926}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29269}, pages = {10}, abstract = {Practical examples show that the improvement in cost flow and total amount of money spend in construction and further use may be cut significantly. The calculation is based on spreadsheets calculation, very easy to develop on most PC´s now a days. Construction works, are a field where the evaluation of Cash Flow can be and should be applied. Decisions about cash flow in construction are decisions with long-term impact and long-term memory. Mistakes from the distant past have a massive impact on situations in the present and into the far economic future of economic activities. Two approaches exist. The Just-in-Time (JIT) approach and life cycle costs (LCC) approach. The calculation example shows the dynamic results for the production speed in opposition to stable flow of production in duration of activities. More sophisticated rescheduling in optimal solution might bring in return extra profit. In the technologies and organizational processes for industrial buildings, railways and road reconstruction, public utilities and housing developments there are assembly procedures that are very appropriate for the given purpose, complicated research-, development-, innovation-projects are all very good aspects of these kinds of applications. The investors of large investments and all public invested money may be spent more efficiently if an optimisation speed-strategy can be calculated.}, subject = {Architektur }, language = {en} } @inproceedings{BeranHromada, author = {Beran, V{\´a}clav and Hromada, E.}, title = {SOFTWARE FOR PROJECT RELIABILITY ESTIMATION AND RISK EVALUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2925}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29255}, pages = {16}, abstract = {The contribution presents a model that is able to simulate construction duration and cost for a building project. This model predicts set of expected project costs and duration schedule depending on input parameters such as production speed, scope of work, time schedule, bonding conditions and maximum and minimum deviations from scope of work and production speed. The simulation model is able to calculate, on the basis of input level of probability, the adequate construction cost and time duration of a project. The reciprocal view attends to finding out the adequate level of probability for construction cost and activity durations. Among interpretive outputs of the application software belongs the compilation of a presumed dynamic progress chart. This progress chart represents the expected scenario of development of a building project with the mapping of potential time dislocations for particular activities. The calculation of a presumed dynamic progress chart is based on an algorithm, which calculates mean values as a partial result of the simulated building project. Construction cost and time models are, in many ways, useful tools in project management. Clients are able to make proper decisions about the time and cost schedules of their investments. Consequently, building contractors are able to schedule predicted project cost and duration before any decision is finalized.}, subject = {Architektur }, language = {en} } @inproceedings{BerbigMenzelEisenblaetter2003, author = {Berbig, Torsten and Menzel, Karsten and Eisenbl{\"a}tter, Karin}, title = {"Mobile Computing" - Anforderungen \& Einf{\"u}hrungsstratgie aus Sicht der Baupraxis}, doi = {10.25643/bauhaus-universitaet.294}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2948}, year = {2003}, abstract = {Die Sicherung der Wettbewerbsf{\"a}higkeit im Bereich des Bauwesens, insbesondere kleinerer und mittelst{\"a}ndischer Betriebe erfordert ein aktives Handeln als Antwort auf die sich {\"a}ndernde Wettbewerbssituation. Einen wesentlichen Wettbewerbsvorteil k{\"o}nnen kleine unternehmerische Einheiten durch h{\"o}here Flexibilit{\"a}t, schnelle Reaktion auf Kundenw{\"u}nsche oder aktuelle Situationen auf der Baustelle und Marktn{\"a}he erreichen. Dazu ist es n{\"o}tig, die Informations- und Kommunikationsstr{\"o}me durch Einsatz standardisierter und kosteng{\"u}nstiger Hard- und Software wie z.B. Handhelds zu unterst{\"u}tzen und insbesondere die existierenden Hindernisse im Informationsfluss zwischen Baustelle und B{\"u}ro zu beseitigen. Am Beispiel der Projekte >IuK - SystemBau< und >eSharing< wird eine Einf{\"u}hrungsstrategie f{\"u}r >Mobile Computing< in kleinen unternehmerischen Einheiten des Bauwesens (KMU) basierend auf einer umfangreichen Anforderungsanalyse vorgestellt. Folgende Aspekte sollen beschrieben werden: durchg{\"a}ngiger Einsatz der Technik unter Beachtung der verschiedenen Qualifikationsniveaus, Einf{\"u}hrungsunterst{\"u}tzung durch Schulungen, Prozessanalyse und m{\"o}gliche Integration in bestehende Software-Umgebungen sowie Feldtests.}, subject = {Bauablauf / Ablaufplanung}, language = {de} } @inproceedings{BertholdMilbradt, author = {Berthold, Tim and Milbradt, Peter}, title = {ARTIFICIAL NEURONAL NETWORKS IN ENVIRONMENTAL ENGINEERING: THEORY AND APPLICATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2830}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28304}, pages = {14}, abstract = {Models in the context of engineering can be classified in process based and data based models. Whereas the process based model describes the problem by an explicit formulation, the data based model is often used, where no such mapping can be found due to the high complexity of the problem. Artificial Neuronal Networks (ANN) is a data based model, which is able to "learn" a mapping from a set of training patterns. This paper deals with the application of ANN in time dependent bathymetric models. A bathymetric model is a geometric representation of the sea bed. Typically, a bathymetry is been measured and afterwards described by a finite set of measured data. Measuring at different time steps leads to a time dependent bathymetric model. To obtain a continuous surface, the measured data has to be interpolated by some interpolation method. Unlike the explicitly given interpolation methods, the presented time dependent bathymetric model using an ANN trains the approximated surface in space and time in an implicit way. The ANN is trained by topographic measured data, which consists of the location (x,y) and time t. In other words the ANN is trained to reproduce the mapping h = f(x,y,t) and afterwards it is able to approximate the topographic height for a given location and date. In a further step, this model is extended to take meteorological parameters into account. This leads to a model of more predictive character.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Bilchuk, author = {Bilchuk, Irina}, title = {GEOMETRIC IDENTIFICATION OF OBJECTS IN CIVIL ENGINEERING APPLICATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2927}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29274}, pages = {21}, abstract = {Objects for civil engineering applications can be identified with their reference in memory, their alpha-numeric name or their geometric location. Particularly in graphic user interfaces, it is common to identify objects geometrically by selection with the mouse. As the number of geometric objects in a graphic user interface grows, it becomes increasingly more important to treat the basic operations add, search and remove for geometric objects with great efficiency. Guttmann has proposed the Region-Tree (R-tree) for geometric identification in an environment which uses pages on disc as data structure. Minimal bounding rectangles are used to structure the data in such a way that neighborhood relations can be described effectively. The literature shows that the parameters which influence the efficiency of the R-trees have been studied extensively, but without conclusive results. The goal of the research which is reported in this paper is to determine reliably the parameters which significantly influence the efficiency of R-trees for geometric identification in technical drawings. In order to make this investigation conclusive, it must be performed with the best available software technology. Therefore an object-oriented software for the method is developed. This implementation is tested with technical drawings containing many thousands of geometric objects. These drawings are created automatically by a stochastic generator which is incorporated into a test bed consisting of an editor and a visualisor. This test bed is used to obtain statistics for the main factors which affect the efficiency of R-trees. The investigation shows that the following main factors which affect the efficiency can be identified reliably : number of geometric objects on the drawing the minimum und maximum number of children of a node of the tree the maximum width and height of the minimal bounding rectangles of the geometric objects relative to the size of the drawing.}, subject = {Architektur }, language = {en} } @inproceedings{BilekHartmann2003, author = {Bilek, Jochen and Hartmann, Dietrich}, title = {Agentenbasiertes Kooperationsmodell zur Unterst{\"u}tzung vernetzter Planungsprozesse in der Tragwerksplanung}, doi = {10.25643/bauhaus-universitaet.279}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-2791}, year = {2003}, abstract = {Die heutige Situation in der Tragwerksplanung ist durch das kooperative Zusammenwirken einer gr{\"o}ßeren Anzahl von Fachleuten verschiedener Disziplinen (Architektur, Tragwerksplanung, etc.) in zeitlich befristeten Projektgemeinschaften gekennzeichnet. Bei der Abstimmung der hierdurch bedingten komplexen, dynamischen und vernetzten Planungsprozesse kommt es dabei h{\"a}ufig zu Planungsm{\"a}ngeln und Qualit{\"a}tseinbußen. Dieser Artikel zeigt auf, wie mit Hilfe der Agententechnologie L{\"o}sungsans{\"a}tze zur Verbesserung der Planungssituation erreicht werden k{\"o}nnen. Hierzu wird ein Agentenmodell f{\"u}r die vernetzt-kooperative Tragwerksplanung vorgestellt und anhand der Planung einer Fußg{\"a}ngerbogenbr{\"u}cke anschaulich demonstriert. Das Agentenmodell erfasst (1) die beteiligten Fachplaner und Organisationen, (2) die tragwerksspezifischen Planungsprozesse, (3) die zugeh{\"o}rigen (Teil-)Produktmodelle und (4) die genutzte (Ingenieur-)Software. Hieraus leiten sich die drei Teilmodelle (1) agentenbasiertes Kooperationsmodell, (2) agentenbasierte Produktmodellintegration und (3) Modell zur agentenbasierten Software-Integration ab. Der Fokus des Artikels liegt auf der Darstellung des agentenbasierten Kooperationsmodells.}, subject = {Tragwerk}, language = {de} } @inproceedings{BiltchoukPahl2004, author = {Biltchouk, Irina and Pahl, Peter Jan}, title = {Interaction of Data Bases and Graphical Interfaces in Civil Engineering}, doi = {10.25643/bauhaus-universitaet.163}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-1636}, year = {2004}, abstract = {Applications for civil engineering tasks usually contain graphical user interfaces for the engineering processes. Persistent objects of the applications are stored to data bases. The influence of the interaction between a graphical user interface and a data base for the development of an civil engineering application is investigated in this paper. A graphic application for the linear elastic analysis of plane frames, which was previously developed with standard tools of the Java platform, is compared to a redesigned implementation using a generalized data base for persistent objects. The investigation leads to the following results : - A strict distinction between persistent and transient objects influences the class structure of an application, in particular the class structure of a graphical user interface. - The structure of an application depends on the logic for updating of references to persistent and transient graphical objects after an application is read from a file. - The complexity of the reference management can usually be handled better by just in time referencing associated with String - identifiers rather than by automated referencing associated with Name - identifiers.}, subject = {Baubetrieb}, language = {en} } @inproceedings{BockGuerlebeck, author = {Bock, Sebastian and G{\"u}rlebeck, Klaus}, title = {A Coupled Ritz-Galerkin Approach Using Holomorphic and Anti-holomorphic Functions}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2928}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29281}, pages = {14}, abstract = {The contribution focuses on the development of a basic computational scheme that provides a suitable calculation environment for the coupling of analytical near-field solutions with numerical standard procedures in the far-field of the singularity. The proposed calculation scheme uses classical methods of complex function theory, which can be generalized to 3-dimensional problems by using the framework of hypercomplex analysis. The adapted approach is mainly based on the factorization of the Laplace operator EMBED Equation.3 by the Cauchy-Riemann operator EMBED Equation.3 , where exact solutions of the respective differential equation are constructed by using an orthonormal basis of holomorphic and anti-holomorphic functions.}, subject = {Architektur }, language = {en} } @inproceedings{BombasaroBucher, author = {Bombasaro, Emanuel and Bucher, Christian}, title = {INVESTIGATION OF MODELING ERRORS OF DIFFERENT RANDOM FIELD BASED WIND LOAD FORMULATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2831}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28318}, pages = {11}, abstract = {In this paper the influence of changes in the mean wind velocity, the wind profile power-law coefficient, the drag coefficient of the terrain and the structural stiffness are investigated on different complex structural models. This paper gives a short introduction to wind profile models and to the approach by Davenport A. G. to compute the structural reaction of wind induced vibrations. Firstly with help of a simple example (a skyscraper) this approach is shown. Using this simple example gives the reader the possibility to study the variance differences when changing one of the above mentioned parameters on this very easy example and see the influence of different complex structural models on the result. Furthermore an approach for estimation of the needed discretization level is given. With the help of this knowledge the structural model design methodology can be base on deeper understanding of the different behavior of the single models.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BrackxDeKnockDeSchepper, author = {Brackx, Fred and De Knock, B. and De Schepper, Hennie}, title = {A MULTI--DIMENSIONAL HILBERT TRANSFORM IN ANISOTROPIC CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2929}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29297}, pages = {15}, abstract = {In earlier research, generalized multidimensional Hilbert transforms have been constructed in m-dimensional Euclidean space, in the framework of Clifford analysis. Clifford analysis, centred around the notion of monogenic functions, may be regarded as a direct and elegant generalization to higher dimension of the theory of the holomorphic functions in the complex plane. The considered Hilbert transforms, usually obtained as a part of the boundary value of an associated Cauchy transform in m+1 dimensions, might be characterized as isotropic, since the metric in the underlying space is the standard Euclidean one. In this paper we adopt the idea of a so-called anisotropic Clifford setting, which leads to the introduction of a metric dependent m-dimensional Hilbert transform, showing, at least formally, the same properties as the isotropic one. The Hilbert transform being an important tool in signal analysis, this metric dependent setting has the advantage of allowing the adjustment of the co-ordinate system to possible preferential directions in the signals to be analyzed. A striking result to be mentioned is that the associated anisotropic (m+1)-dimensional Cauchy transform is no longer uniquely determined, but may stem from a diversity of (m+1)-dimensional "mother" metrics.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperDeSchepperetal., author = {Brackx, Fred and De Schepper, Hennie and De Schepper, Nele and Sommen, Frank}, title = {HERMITIAN CLIFFORD-HERMITE WAVELETS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2931}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29313}, pages = {13}, abstract = {The one-dimensional continuous wavelet transform is a successful tool for signal and image analysis, with applications in physics and engineering. Clifford analysis offers an appropriate framework for taking wavelets to higher dimension. In the usual orthogonal case Clifford analysis focusses on monogenic functions, i.e. null solutions of the rotation invariant vector valued Dirac operator ∂, defined in terms of an orthogonal basis for the quadratic space Rm underlying the construction of the Clifford algebra R0,m. An intrinsic feature of this function theory is that it encompasses all dimensions at once, as opposed to a tensorial approach with products of one-dimensional phenomena. This has allowed for a very specific construction of higher dimensional wavelets and the development of the corresponding theory, based on generalizations of classical orthogonal polynomials on the real line, such as the radial Clifford-Hermite polynomials introduced by Sommen. In this paper, we pass to the Hermitian Clifford setting, i.e. we let the same set of generators produce the complex Clifford algebra C2n (with even dimension), which we equip with a Hermitian conjugation and a Hermitian inner product. Hermitian Clifford analysis then focusses on the null solutions of two mutually conjugate Hermitian Dirac operators which are invariant under the action of the unitary group. In this setting we construct new Clifford-Hermite polynomials, starting in a natural way from a Rodrigues formula which now involves both Dirac operators mentioned. Due to the specific features of the Hermitian setting, four different types of polynomials are obtained, two types of even degree and two types of odd degree. These polynomials are used to introduce a new continuous wavelet transform, after thorough investigation of all necessary properties of the involved polynomials, the mother wavelet and the associated family of wavelet kernels.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperLunaElizararrasetal., author = {Brackx, Fred and De Schepper, Hennie and Luna-Elizararras, Maria Elena and Shapiro, Michael}, title = {INTEGRAL REPRESENTATIONS IN HERMITEAN CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2832}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28326}, pages = {13}, abstract = {Euclidean Clifford analysis is a higher dimensional function theory offering a refinement of classical harmonic analysis. The theory is centered around the concept of monogenic functions, i.e. null solutions of a first order vector valued rotation invariant differential operator called the Dirac operator, which factorizes the Laplacian. More recently, Hermitean Clifford analysis has emerged as a new and successful branch of Clifford analysis, offering yet a refinement of the Euclidean case; it focusses on the simultaneous null solutions, called Hermitean (or h-) monogenic functions, of two Hermitean Dirac operators which are invariant under the action of the unitary group. In Euclidean Clifford analysis, the Clifford-Cauchy integral formula has proven to be a corner stone of the function theory, as is the case for the traditional Cauchy formula for holomorphic functions in the complex plane. Previously, a Hermitean Clifford-Cauchy integral formula has been established by means of a matrix approach. This formula reduces to the traditional Martinelli-Bochner formula for holomorphic functions of several complex variables when taking functions with values in an appropriate part of complex spinor space. This means that the theory of Hermitean monogenic functions should encompass also other results of several variable complex analysis as special cases. At present we will elaborate further on the obtained results and refine them, considering fundamental solutions, Borel-Pompeiu representations and the Teoderescu inversion, each of them being developed at different levels, including the global level, handling vector variables, vector differential operators and the Clifford geometric product as well as the blade level were variables and differential operators act by means of the dot and wedge products. A rich world of results reveals itself, indeed including well-known formulae from the theory of several complex variables.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BrackxDeSchepperSommen, author = {Brackx, Fred and De Schepper, Nele and Sommen, Frank}, title = {Clifford-Hermite and Two-Dimensional Clifford-Gabor Filters For Early Vision}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2930}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29303}, pages = {22}, abstract = {Image processing has been much inspired by the human vision, in particular with regard to early vision. The latter refers to the earliest stage of visual processing responsible for the measurement of local structures such as points, lines, edges and textures in order to facilitate subsequent interpretation of these structures in higher stages (known as high level vision) of the human visual system. This low level visual computation is carried out by cells of the primary visual cortex. The receptive field profiles of these cells can be interpreted as the impulse responses of the cells, which are then considered as filters. According to the Gaussian derivative theory, the receptive field profiles of the human visual system can be approximated quite well by derivatives of Gaussians. Two mathematical models suggested for these receptive field profiles are on the one hand the Gabor model and on the other hand the Hermite model which is based on analysis filters of the Hermite transform. The Hermite filters are derivatives of Gaussians, while Gabor filters, which are defined as harmonic modulations of Gaussians, provide a good approximation to these derivatives. It is important to note that, even if the Gabor model is more widely used than the Hermite model, the latter offers some advantages like being an orthogonal basis and having better match to experimental physiological data. In our earlier research both filter models, Gabor and Hermite, have been developed in the framework of Clifford analysis. Clifford analysis offers a direct, elegant and powerful generalization to higher dimension of the theory of holomorphic functions in the complex plane. In this paper we expose the construction of the Hermite and Gabor filters, both in the classical and in the Clifford analysis framework. We also generalize the concept of complex Gaussian derivative filters to the Clifford analysis setting. Moreover, we present further properties of the Clifford-Gabor filters, such as their relationship with other types of Gabor filters and their localization in the spatial and in the frequency domain formalized by the uncertainty principle.}, subject = {Architektur }, language = {en} } @inproceedings{BraunesDonath, author = {Braunes, J{\"o}rg and Donath, Dirk}, title = {COMPUTERGEST{\"U}TZTE PLANUNG IM BESTAND VON DER DIGITALEN BESTANDSERFASSUNG ZUR PLANUNGSUNTERST{\"U}TZUNG IM CAAD}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2933}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29338}, pages = {10}, abstract = {F{\"u}r eine gesicherte Planung im Bestand, sind eine F{\"u}lle verschiedenster Informationen zu ber{\"u}cksichtigen, welche oft erst w{\"a}hrend des Planungs- oder Bauprozesses gewonnen werden. Voraussetzung hierf{\"u}r bildet immer eine Bestandserfassung. Zwar existieren Computerprogramme zur Unterst{\"u}tzung der Bestandserfassung, allerdings handelt es sich hierbei ausschließlich um Insell{\"o}sungen. Der Export der aufgenommenen Daten in ein Planungssystem bedingt Informationsverluste. Trotz der potentiellen M{\"o}glichkeit aktueller CAAD/BIM Systeme zur Verwaltung von Bestandsdaten, sind diese vorrangig f{\"u}r die Neubauplanung konzipiert. Die durchg{\"a}ngige Bearbeitung von Sanierungsprojekten von der Erfassung des Bestandes {\"u}ber die Entwurfs- und Genehmigungsplanung bis zur Ausf{\"u}hrungsplanung innerhalb eines CAAD/BIM Systems wird derzeit nicht ad{\"a}quat unterst{\"u}tzt. An der Professur Informatik in der Architektur (InfAR) der Fakult{\"a}t Architektur der Bauhaus-Universit{\"a}t Weimar entstanden im Rahmen des DFG Sonderforschungsbereich 524 "Werkzeuge und Konstruktionen f{\"u}r die Revitalisierung von Bauwerken" in den letzten Jahren Konzepte und Prototypen zur fachlich orientierten Unterst{\"u}tzung der Planung im Bestand. Der Fokus lag dabei in der Erfassung aller planungsrelevanter Bestandsdaten und der Abbildung dieser in einem dynamischen Bauwerksmodell. Aufbauend auf diesen Forschungsarbeiten befasst sich der Artikel mit der kontextbezogenen Weiterverwendung und gezielten Bereitstellung von Bestandsdaten im Prozess des Planens im Bestand und der Integration von Konzepten der planungsrelevanten Bestandserfassung in markt{\"u}bliche CAAD/BIM Systeme.}, subject = {Architektur }, language = {de} } @inproceedings{BrehmZabelBucheretal., author = {Brehm, Maik and Zabel, Volkmar and Bucher, Christian and Ribeiro, D.}, title = {AN AUTOMATIC MODE SELECTION STRATEGY FOR MODEL UPDATING USING THE MODAL ASSURANCE CRITERION AND MODAL STRAIN ENERGIES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2833}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28330}, pages = {18}, abstract = {In the context of finite element model updating using vibration test data, natural frequencies and mode shapes are used as validation criteria. Consequently, the order of natural frequencies and mode shapes is important. As only limited spatial information is available and noise is present in the measurements, the automatic selection of the most likely numerical mode shape corresponding to a measured mode shape is a difficult task. The most common criterion to indicate corresponding mode shapes is the modal assurance criterion. Unfortunately, this criterion fails in certain cases. In this paper, the pure mathematical modal assurance criterion will be enhanced by additional physical information of the numerical model in terms of modal strain energies. A numerical example and a benchmark study with real measured data are presented to show the advantages of the enhanced energy based criterion in comparison to the traditional modal assurance criterion.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BrossmannMueller, author = {Broßmann, Marko and M{\"u}ller, Karl-Heinz}, title = {STOCHASTISCHE ANALYSE VON STAHLBETONBALKEN IM GRENZZUSTAND DER ADAPTION UNTER BER{\"u}CKSICHTIGUNG DER STEIFIGKEITSDEGRADATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2934}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29341}, pages = {20}, abstract = {Am Beispiel eines 3-feldrigen Durchlauftr{\"a}gers wird die Versagenswahrscheinlichkeit von wechselnd belasteten Stahlbetonbalken bez{\"u}glich des Grenzzustandes der Adaption (Einspielen, shakedown) untersucht. Die Adaptionsanalyse erfolgt unter Ber{\"u}cksichtigung der beanspruchungschabh{\"a}ngigen Degradation der Biegesteifigkeit infolge Rissbildung. Die damit verbundene mechanische Problemstellung kann auf die Adaptionsanalyse linear elastisch - ideal plastischer Balkentragwerke mit unbekannter aber begrenzter Biegesteifigkeit zur{\"u}ckgef{\"u}hrt werden. Die Versagenswahrscheinlichkeit wird unter Ber{\"u}cksichtigung stochastischer Tragwerks- und Belastungsgr{\"o}ßen berechnet. Tragwerkseigenschaften und st{\"a}ndige Lasten gelten als zeitunabh{\"a}ngige Zufallsgr{\"o}ßen. Zeitlich ver{\"a}nderliche Lasten werden als nutzungsdauerbezogene Extremwerte POISSONscher Rechteck-Pulsprozesse unter Ber{\"u}cksichtigung zeitlicher {\"U}berlagerungseffekte modelliert, so dass die Versagenswahrscheinlichkeit ebenfalls eine nutzungsdauerbezogene Gr{\"o}ße ist. Die mechanischen Problemstellungen werden numerisch mit der mathematischen Optimierung gel{\"o}st. Die Versagenswahrscheinlichkeit wird auf statistischem Weg mit der Monte-Carlo-Methode gesch{\"a}tzt.}, subject = {Architektur }, language = {de} } @inproceedings{BultheelJansenMaesetal., author = {Bultheel, Adhemar and Jansen, M. and Maes, J. and Van Aerschot, W. and Vanraes, E.}, title = {SUBDIVIDE AND CONQUER RESOLUTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2909}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29091}, pages = {47}, abstract = {This contribution will be freewheeling in the domain of signal, image and surface processing and touch briefly upon some topics that have been close to the heart of people in our research group. A lot of the research of the last 20 years in this domain that has been carried out world wide is dealing with multiresolution. Multiresolution allows to represent a function (in the broadest sense) at different levels of detail. This was not only applied in signals and images but also when solving all kinds of complex numerical problems. Since wavelets came into play in the 1980's, this idea was applied and generalized by many researchers. Therefore we use this as the central idea throughout this text. Wavelets, subdivision and hierarchical bases are the appropriate tools to obtain these multiresolution effects. We shall introduce some of the concepts in a rather informal way and show that the same concepts will work in one, two and three dimensions. The applications in the three cases are however quite different, and thus one wants to achieve very different goals when dealing with signals, images or surfaces. Because completeness in our treatment is impossible, we have chosen to describe two case studies after introducing some concepts in signal processing. These case studies are still the subject of current research. The first one attempts to solve a problem in image processing: how to approximate an edge in an image efficiently by subdivision. The method is based on normal offsets. The second case is the use of Powell-Sabin splines to give a smooth multiresolution representation of a surface. In this context we also illustrate the general method of construction of a spline wavelet basis using a lifting scheme.}, subject = {Architektur }, language = {en} } @inproceedings{CacaoConstalesKrausshar, author = {Cacao, Isabel and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {A UNIFIED APPROACH FOR THE TREATMENT OF SOME HIGHER DIMENSIONAL DIRAC TYPE EQUATIONS ON SPHERES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2834}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28343}, pages = {8}, abstract = {Using Clifford analysis methods, we provide a unified approach to obtain explicit solutions of some partial differential equations combining the n-dimensional Dirac and Euler operators, including generalizations of the classical time-harmonic Maxwell equations. The obtained regular solutions show strong connections between hypergeometric functions and homogeneous polynomials in the kernel of the Dirac operator.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{CacaoConstalesKrausshar, author = {Cacao, Isabel and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {BESSEL FUNCTIONS AND HIGHER DIMENSIONAL DIRAC TYPE EQUATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2936}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29366}, pages = {8}, abstract = {In this paper we study the structure of the solutions to higher dimensional Dirac type equations generalizing the known λ-hyperholomorphic functions, where λ is a complex parameter. The structure of the solutions to the system of partial differential equations (D- λ) f=0 show a close connection with Bessel functions of first kind with complex argument. The more general system of partial differential equations that is considered in this paper combines Dirac and Euler operators and emphasizes the role of the Bessel functions. However, contrary to the simplest case, one gets now Bessel functions of any arbitrary complex order.}, subject = {Architektur }, language = {en} } @inproceedings{CastilloPerez, author = {Castillo-P{\´e}rez, Ra{\´u}l}, title = {AN APPLICATION OF FORMAL POWER SERIES FOR THE DEVELOPMENT OF OPTICAL FILTERS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2835}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28354}, pages = {8}, abstract = {The application of a recent method using formal power series is proposed. It is based on a new representation for solutions of Sturm-Liouville equations. This method is used to calculate the transmittance and reflectance coefficients of finite inhomogeneous layers with high accuracy and efficiency. Tailoring the refraction index profile defining the inhomogeneous media it is possible to develop very important applications such as optical filters. A number of profiles were evaluated and then some of them selected in order to perform an improvement of their characteristics via the modification of their profiles.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{CastilloPerezCedilloDiazKravchenkoetal., author = {Castillo-P{\´e}rez, Ra{\´u}l and Cedillo - D{\´i}az, A. del C. and Kravchenko, Vladislav and Oviedo - Galdeano, H.}, title = {COMPUTATION OF THE REFLECTANCE AND TRANSMITTANCE FOR AN INHOMOGENEOUS LAYERED MEDIUM WITH TURNING POINT S USING THE WKB AND SPPS METHODS}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom and Werner, Frank}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2759}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170306-27598}, pages = {16}, abstract = {Electromagnetic wave propagation is currently present in the vast majority of situations which occur in veryday life, whether in mobile communications, DTV, satellite tracking, broadcasting, etc. Because of this the study of increasingly complex means of propagation of lectromagnetic waves has become necessary in order to optimize resources and increase the capabilities of the devices as required by the growing demand for such services. Within the electromagnetic wave propagation different parameters are considered that characterize it under various circumstances and of particular importance are the reflectance and transmittance. There are several methods or the analysis of the reflectance and transmittance such as the method of approximation by boundary condition, the plane wave expansion method (PWE), etc., but this work focuses on the WKB and SPPS methods. The implementation of the WKB method is relatively simple but is found to be relatively efficient only when working at high frequencies. The SPPS method (Spectral Parameter Powers Series) based on the theory of pseudoanalytic functions, is used to solve this problem through a new representation for solutions of Sturm Liouville equations and has recently proven to be a powerful tool to solve different boundary value and eigenvalue problems. Moreover, it has a very suitable structure for numerical implementation, which in this case took place in the Matlab software for the valuation of both conventional and turning points profiles. The comparison between the two methods allows us to obtain valuable information about their perfor mance which is useful for determining the validity and propriety of their application for solving problems where these parameters are calculated in real life applications.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{ChangChang, author = {Chang, Wei-Tsang and Chang, Teng-Wen}, title = {TIME-BASED FORM TRANSFORMATION WITH FOLDING SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2937}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29371}, pages = {10}, abstract = {Design activity could be treated as state transition computationally. In stepwise processing, in-between form-states are not easily observed. However, in this research time-based concept is introduced and applied in order to bridge the gap. In architecture, folding is one method of form manipulation and architects also want to search for alternatives by this operation. Besides, folding operation has to be defined and parameterized before time factor is involved as a variable of folding. As a result, time-based transformation provides sequential form states and redirects design activity.}, subject = {Architektur }, language = {en} } @inproceedings{ChudobaScholzenHegger, author = {Chudoba, Rostislav and Scholzen, A. and Hegger, Josef}, title = {MICROPLANE MODEL WITH INITIAL AND DAMAGE-INDUCED ANISOTROPY APPLIED TO TEXTILE-REINFORCED CONCRETE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2836}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28367}, pages = {8}, abstract = {The presented material model reproduces the anisotropic characteristics of textile reinforced concrete in a smeared manner. This includes both the initial anisotropy introduced by the textile reinforcement, as well as the anisotropic damage evolution reflecting fine patterns of crack bridges. The model is based on the microplane approach. The direction-dependent representation of the material structure into oriented microplanes provides a flexible way to introduce the initial anisotropy. The microplanes oriented in a yarn direction are associated with modified damage laws that reflect the tension-stiffening effect due to the multiple cracking of the matrix along the yarn.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{ConstalesKrausshar, author = {Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {ON THE KLEIN-GORDON EQUATION ON THE 3-TORUS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2863}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28639}, pages = {10}, abstract = {In this paper we consider the time independent Klein-Gordon equation on some conformally flat 3-tori with given boundary data. We set up an explicit formula for the fundamental solution. We show that we can represent any solution to the homogeneous Klein-Gordon equation on the torus as finite sum over generalized 3-fold periodic elliptic functions that are in the kernel of the Klein-Gordon operator. Furthermore we prove Cauchy and Green type integral formulas and set up a Teodorescu and Cauchy transform for the toroidal Klein-Gordon operator. These in turn are used to set up explicit formulas for the solution to the inhomogeneous version of the Klein-Gordon equation on the 3-torus.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{ConstalesKrausshar, author = {Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {ON THE NAVIER-STOKES EQUATION WITH FREE CONVECTION IN STRIP DOMAINS AND 3D TRIANGULAR CHANNELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2938}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29387}, pages = {12}, abstract = {The Navier-Stokes equations and related ones can be treated very elegantly with the quaternionic operator calculus developed in a series of works by K. Guerlebeck, W. Sproeossig and others. This study will be extended in this paper. In order to apply the quaternionic operator calculus to solve these types of boundary value problems fully explicitly, one basically needs to evaluate two types of integral operators: the Teodorescu operator and the quaternionic Bergman projector. While the integral kernel of the Teodorescu transform is universal for all domains, the kernel function of the Bergman projector, called the Bergman kernel, depends on the geometry of the domain. With special variants of quaternionic holomorphic multiperiodic functions we obtain explicit formulas for three dimensional parallel plate channels, rectangular block domains and regular triangular channels. The explicit knowledge of the integral kernels makes it then possible to evaluate the operator equations in order to determine the solutions of the boundary value problem explicitly.}, subject = {Architektur }, language = {en} } @inproceedings{CruzFalcaoMalonek, author = {Cruz, J. F. and Falc{\~a}o, M. Irene and Malonek, Helmuth Robert}, title = {3D-MAPPINGS AND THEIR APPROXIMATION BY SERIES OF POWERS OF A SMALL PARAMETER}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2940}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29406}, pages = {14}, abstract = {In classical complex function theory the geometric mapping property of conformality is closely linked with complex differentiability. In contrast to the planar case, in higher dimensions the set of conformal mappings is only the set of M{\"o}bius transformations. Unfortunately, the theory of generalized holomorphic functions (by historical reasons they are called monogenic functions) developed on the basis of Clifford algebras does not cover the set of M{\"o}bius transformations in higher dimensions, since M{\"o}bius transformations are not monogenic. But on the other side, monogenic functions are hypercomplex differentiable functions and the question arises if from this point of view they can still play a special role for other types of 3D-mappings, for instance, for quasi-conformal ones. On the occasion of the 16th IKM 3D-mapping methods based on the application of Bergman's reproducing kernel approach (BKM) have been discussed. Almost all authors working before that with BKM in the Clifford setting were only concerned with the general algebraic and functional analytic background which allows the explicit determination of the kernel in special situations. The main goal of the abovementioned contribution was the numerical experiment by using a Maple software specially developed for that purpose. Since BKM is only one of a great variety of concrete numerical methods developed for mapping problems, our goal is to present a complete different from BKM approach to 3D-mappings. In fact, it is an extension of ideas of L. V. Kantorovich to the 3-dimensional case by using reduced quaternions and some suitable series of powers of a small parameter. Whereas until now in the Clifford case of BKM the recovering of the mapping function itself and its relation to the monogenic kernel function is still an open problem, this approach avoids such difficulties and leads to an approximation by monogenic polynomials depending on that small parameter.}, subject = {Architektur }, language = {en} } @inproceedings{DeAguinaga, author = {De Aguinaga, Jos{\´e} Guillermo}, title = {INFLUENCE OF DIFFERENT DATA TYPES FOR THE ESTIMATION OF HYDROMECHANICAL PARAMETERS FOR A WATER RETAINING DAM USING SYNTHETIC DATA}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom and Werner, Frank}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2760}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170306-27607}, pages = {12}, abstract = {The present research analyses the error on prediction obtained under different data availability scenarios to determine which measurements contribute to an improvement of model prognosis and which not. A fully coupled 2D hydromechanical model of a water retaining dam is taken as an example. Here, the mean effective stress in the porous skeleton is reduced due to an increase in pore water pressure under drawdown conditions. Relevant model parameters are ranked by scaled sensitivities, Particle Swarm Optimization is applied to determine the optimal parameter values and model validation is performed to determine the magnitude of error forecast. We compare the predictions of the optimized models with results from a forward run of the reference model to obtain actual prediction errors. The analyses presented here were performed to 31 data sets of 100 observations of varying data types. Calibrating with multiple information types instead of only one sort, brings better calibration results and improvement in model prognosis. However, when using several types of information the number of observations have to be increased to be able to cover a representative part of the model domain; otherwise a compromise between data availability and domain coverage prove best. Which type of information for calibration contributes to the best prognoses, could not be determined in advance. For the error in model prognosis does not depends on the error in calibration, but on the parameter error, which unfortunately can not be determined in reality since we do not know its real value. Excellent calibration fits with parameters' values near the limits of reasonable physical values, provided the highest prognosis errors. While models which included excess pore pressure values for calibration provided the best prognosis, independent of the calibration fit.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DeBieSommen, author = {De Bie, Hendrik and Sommen, Frank}, title = {VECTOR AND BIVECTOR FOURIER TRANSFORMS IN CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2837}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28371}, pages = {11}, abstract = {In the past, several types of Fourier transforms in Clifford analysis have been studied. In this paper, first an overview of these different transforms is given. Next, a new equation in a Clifford algebra is proposed, the solutions of which will act as kernels of a new class of generalized Fourier transforms. Two solutions of this equation are studied in more detail, namely a vector-valued solution and a bivector-valued solution, as well as the associated integral transforms.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DeSchepperBrackxSommen, author = {De Schepper, Nele and Brackx, Fred and Sommen, Frank}, title = {THE FOURIER-BESSEL TRANSFORM}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2838}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28387}, pages = {18}, abstract = {In this paper we devise a new multi-dimensional integral transform within the Clifford analysis setting, the so-called Fourier-Bessel transform. It appears that in the two-dimensional case, it coincides with the Clifford-Fourier and cylindrical Fourier transforms introduced earlier. We show that this new integral transform satisfies operational formulae which are similar to those of the classical tensorial Fourier transform. Moreover the L2-basis elements consisting of generalized Clifford-Hermite functions appear to be eigenfunctions of the Fourier-Bessel transform.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DeaconvanRooyen, author = {Deacon, Michael-John and van Rooyen, G.C.}, title = {DISTRIBUTED COLLABORATION: ENGINEERING PRACTICE REQUIREMENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2941}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29410}, pages = {8}, abstract = {Designing a structure follows a pattern of creating a structural design concept, executing a finite element analysis and developing a design model. A project was undertaken to create computer support for executing these tasks within a collaborative environment. This study focuses on developing a software architecture that integrates the various structural design aspects into a seamless functional collaboratory that satisfies engineering practice requirements. The collaboratory is to support both homogeneous collaboration i.e. between users operating on the same model and heterogeneous collaboration i.e. between users operating on different model types. Collaboration can take place synchronously or asynchronously, and the information exchange is done either at the granularity of objects or at the granularity of models. The objective is to determine from practicing engineers which configurations they regard as best and what features are essential for working in a collaborative environment. Based on the suggestions of these engineers a specification of a collaboration configuration that satisfies engineering practice requirements will be developed.}, subject = {Architektur }, language = {en} } @inproceedings{DeebZabel, author = {Deeb, Maher and Zabel, Volkmar}, title = {THE APPLICATION OF POD CURVES TO DAMAGE DETECTION BASED ON PARTIAL MODELS- A NUMERICAL AND EXPERIMENTAL STUDY}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 04 - 06 2012, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom and Werner, Frank}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2761}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170306-27615}, pages = {18}, abstract = {Non-destructive techniques for damage detection became the focus of engineering interests in the last few years. However, applying these techniques to large complex structures like civil engineering buildings still has some limitations since these types of structures are unique and the methodologies often need a large number of specimens for reliable results. For this reason, cost and time can greatly influence the final results. Model Assisted Probability Of Detection (MAPOD) has taken its place among the ranks of damage identification techniques, especially with advances in computer capacity and modeling tools. Nevertheless, the essential condition for a successful MAPOD is having a reliable model in advance. This condition is opening the door for model assessment and model quality problems. In this work, an approach is proposed that uses Partial Models (PM) to compute the Probability Of damage Detection (POD). A simply supported beam, that can be structurally modified and tested under laboratory conditions, is taken as an example. The study includes both experimental and numerical investigations, the application of vibration-based damage detection approaches and a comparison of the results obtained based on tests and simulations. Eventually, a proposal for a methodology to assess the reliability and the robustness of the models is given.}, subject = {Angewandte Informatik}, language = {en} }