@inproceedings{DzwigonHempel, author = {Dzwigon, Wieslaw and Hempel, Lorenz}, title = {ZUR SYNCHRONISATION VON LINIEN IM {\"O}PNV}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2944}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29442}, pages = {12}, abstract = {Wir betrachten im {\"O}PNV ({\"O}ffentlichen Personennahverkehr) diejenige Situation, daß zwei Bus- oder Straßenbahnlinien gemeinsame Haltestellen haben. Ziel unserer Untersuchungen ist es, f{\"u}r beide Linien einen solchen Fahrplan zu finden, der f{\"u}r die Fahrg{\"a}ste m{\"o}glichst viel Bequemlichkeit bietet. Die Bedarfsstruktur - die Anzahl von Personen, die die beiden Linien benutzen - setzt dabei gewisse Beschr{\"a}nkungen f{\"u}r die Taktzeiten der beiden Linien. Die verbleibenden Entscheidungsfreiheiten sollen im Sinne der Zielstellung ausgenutzt werden. Im Vortrag wird folgenden Fragen nachgegangen: - nach welchen Kriterien kann man die "Bequemlichkeit" oder die "Synchonisationsg{\"u}te" messen? - wie kann man die einzelnen "Synchronisationsmaße" berechnen ? - wie kann man die verbleibenden Entscheidungsfreiheiten nutzen, um eine m{\"o}glichst gute Synchronisation zu erreichen ? Die Ergebnisse werden dann auf einige Beispiele angewandt und mit den bereitgestellten Methoden L{\"o}sungsvorschl{\"a}ge unterbreitet.}, subject = {Architektur }, language = {de} } @inproceedings{Markwardt, author = {Markwardt, Klaus}, title = {WAVELET ANALYSIS AND FREQUENCY BAND DECOMPOSITIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2989}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29895}, pages = {22}, abstract = {In many applications such as parameter identification of oscillating systems in civil enginee-ring, speech processing, image processing and others we are interested in the frequency con-tent of a signal locally in time. As a start wavelet analysis provides a time-scale decomposition of signals, but this wavelet transform can be connected with an appropriate time-frequency decomposition. For instance in Matlab are defined pseudo-frequencies of wavelet scales as frequency centers of the corresponding bands. This frequency bands overlap more or less which depends on the choice of the biorthogonal wavelet system. Such a definition of frequency center is possible and useful, because different frequencies predominate at different dyadic scales of a wavelet decomposition or rather at different nodes of a wavelet packet decomposition tree. The goal of this work is to offer better algorithms for characterising frequency band behaviour and for calculating frequency centers of orthogonal and biorthogonal wavelet systems. This will be done with some product formulas in frequency domain. Now the connecting procedu-res are more analytical based, better connected with wavelet theory and more assessable. This procedures doesn't need any time approximation of the wavelet and scaling functions. The method only works in the case of biorthogonal wavelet systems, where scaling functions and wavelets are defined over discrete filters. But this is the practically essential case, because it is connected with fast algorithms (FWT, Mallat Algorithm). At the end corresponding to the wavelet transform some closed formulas of pure oscillations are given. They can generally used to compare the application of different wavelets in the FWT regarding it's frequency behaviour.}, subject = {Architektur }, language = {en} } @inproceedings{SchleinkoferSchaefervanTreecketal., author = {Schleinkofer, Matthias and Sch{\"a}fer, T. and van Treeck, Christoph and Rank, Ernst}, title = {VOM LASERSCAN ZUM PLANUNGSTAUGLICHEN PRODUKTMODELL}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3015}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30159}, pages = {18}, abstract = {Im Bereich der Altbausanierung und der Bestandserfassung im Bauwesen ist es h{\"a}ufig notwendig, bestehende Pl{\"a}ne hinsichtlich des Bauwerkszustandes zu aktualisieren oder, wenn diese Pl{\"a}ne nicht (mehr) zug{\"a}nglich sind, g{\"a}nzlich neue Planunterlagen des Ist-Zustandes zu erstellen. Ein komfortabler Weg, diese Bauwerksdaten zu erheben, er{\"o}ffnet die Technologie der Laservermessung. Der vorliegende Artikel stellt in diesem Zusammenhang Ans{\"a}tze zur Teilautomatisierung der Generierung eines dreidimensionalen Computermodells eines Bauwerkes vor. Als Ergebnis wird ein Volumenmodell bereitgestellt, in dem zun{\"a}chst die geometrischen und topologischen Informationen {\"u}ber Fl{\"a}chen, Kanten und Punkte im Sinne eines B-rep Modells beschrieben sind. Die Objekte dieses Volumenmodells werden mit Verfahren aus dem Bereich der k{\"u}nstlichen Intelligenz analysiert und in Bauteilklassen systematisch kategorisiert. Die Kenntnis der Bauteilsemantik erlaubt es somit, aus den Daten ein Bauwerks-Produktmodell abzuleiten und dieses einzelnen Fachplanern - etwa zur Erstellung eines Energiepasses - zug{\"a}nglich zu machen. Der Aufsatz zeigt den erfolgreichen Einsatz virtueller neuronaler Netze im Bereich der Bestandserfassung anhand eines komplexen Beispiels.}, subject = {Architektur }, language = {de} } @inproceedings{SampaioHenriquesStuderetal., author = {Sampaio, Alcinia Zita and Henriques, Pedro and Studer, P. and Luizi, Rui}, title = {VIRTUAL REALITY TECHNOLOGY TO REPRESENT CONSTRUCTION ACTIVITIES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30090}, pages = {9}, abstract = {The use of virtual reality techniques in the development of educational applications brings new perspectives to the teaching of subjects related to the field of civil construction in Civil Engineering domain. In order to obtain models, which would be able to visually simulate the construction process of two types of construction work, the research turned to the techniques of geometric modelling and virtual reality. The applications developed for this purpose are concerned with the construction of a cavity wall and a bridge. These models make it possible to view the physical evolution of the work, to follow the planned construction sequence and to visualize details of the form of every component of the works. They also support the study of the type and method of operation of the equipment necessary for these construction procedures. These models have been used to distinct advantage as educational aids in first-degree courses in Civil Engineering. Normally, three-dimensional geometric models, which are used to present architectural and engineering works, show only their final form, not allowing the observation of their physical evolution. The visual simulation of the construction process needs to be able to produce changes to the geometry of the project dynamically. In the present study, two engineering construction work models were created, from which it was possible to obtain three-dimensional models corresponding to different states of their form, simulating distinct stages in their construction. Virtual reality technology was applied to the 3D models. Virtual reality capacities allow the interactive real-time viewing of 3D building models and facilitate the process of visualizing, evaluating and communicating.}, subject = {Architektur }, language = {en} } @inproceedings{SmithGarloffWerkle, author = {Smith, Andrew Paul and Garloff, J{\"u}rgen and Werkle, Horst}, title = {VERIFIED SOLUTION OF FINITE ELEMENT MODELS WITH UNCERTAIN NODE LOCATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2901}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-29010}, pages = {15}, abstract = {We consider a structural truss problem where all of the physical model parameters are uncertain: not just the material values and applied loads, but also the positions of the nodes are assumed to be inexact but bounded and are represented by intervals. Such uncertainty may typically arise from imprecision during the process of manufacturing or construction, or round-off errors. In this case the application of the finite element method results in a system of linear equations with numerous interval parameters which cannot be solved conventionally. Applying a suitable variable substitution, an iteration method for the solution of a parametric system of linear equations is firstly employed to obtain initial bounds on the node displacements. Thereafter, an interval tightening (pruning) technique is applied, firstly on the element forces and secondly on the node displacements, in order to obtain tight guaranteed enclosures for the interval solutions for the forces and displacements.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DeBieSommen, author = {De Bie, Hendrik and Sommen, Frank}, title = {VECTOR AND BIVECTOR FOURIER TRANSFORMS IN CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2837}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28371}, pages = {11}, abstract = {In the past, several types of Fourier transforms in Clifford analysis have been studied. In this paper, first an overview of these different transforms is given. Next, a new equation in a Clifford algebra is proposed, the solutions of which will act as kernels of a new class of generalized Fourier transforms. Two solutions of this equation are studied in more detail, namely a vector-valued solution and a bivector-valued solution, as well as the associated integral transforms.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Siekierski, author = {Siekierski, Wojciech}, title = {VARIATION OF ROTATIONAL RESTRAINT IN GRID DECK CONNECTION DUE TO CORROSION DAMAGE AND STRENGTHENING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3021}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30217}, pages = {8}, abstract = {The approach to assessment of rotational restraint of stringer-to-crossbeam connection in a deck of 100-year old steel truss bridge is presented. Sensitivity of rotational restraint coefficient of the connection to corrosion damage and strengthening is analyzed. Two criteria of the assessment of the rotational restraint coefficient are applied: static and kinematic one. The former is based on bending moment distribution in the considered member, the latter one - on the member rotation at the given joint. 2D-element model of finite element method is described: webs and flanges are modeled with shell elements, while rivets in the connection - with system of beam and spring elements. The method of rivet modeling is verified by T-stub connection test results published in literature. FEM analyses proved that recorded extent of corrosion damage does not alter the initial rotational restraint of stringer-to-crossbeam connection. Strengthening of stringer midspan influences midspan bending moment and stringer end rotation in a different way. Usually restoring member load bearing capacity means strengthening its critical regions (where the highest stress levels occur). This alters flexural stiffness distribution over member length and influences rotational restraint at its connection to other members. The impact depends on criterion chosen for rotational restraint coefficient assessment.}, subject = {Architektur }, language = {en} } @inproceedings{HapurneNistor, author = {Hapurne, Tania Mariana and Nistor, S.}, title = {USING MODERN TECHNOLOGIES TO UPGRADE EDILITARY URBAN INFRASTRUCTURE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2965}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29650}, pages = {6}, abstract = {Adopting the European laws concerning environmental protection will require sustained efforts of the authorities and communities from Romania; implementing modern solutions will become a fast and effective option for the improvement of the functioning systems, in order to prevent disasters. As a part of the urban infrastructure, the drainage networks of pluvial and residual waters are included in the plan of promoting the systems which protect the environmental quality, with the purpose of integrated and adaptive management. The paper presents a distributed control system for sewer network of Iasi town. Unsatisfactory technical state of the actual sewer system is exposed, focusing on objectives related to implementation of the control system. The proposed distributed control system of Iasi drainage network is based on the implementation of the hierarchic control theory for diagnose, sewer planning and management. There are proposed two control levels: coordinating and local execution. Configuration of the distributed control system, including data acquisition and conversion equipment, interface characteristics, local data bus, data communication network, station configuration are widely described. The project wish to be an useful instrument for the local authorities in the preventing and reducing the impact of future natural disasters over the urban areas by means of modern technologies.}, subject = {Architektur }, language = {en} } @inproceedings{LehnerHartmann, author = {Lehner, Karlheinz and Hartmann, Dietrich}, title = {USING INTERVAL ANALYSIS FOR STRUCTURAL ENGINEERING PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2984}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29844}, pages = {10}, abstract = {Interval analysis extends the concept of computing with real numbers to computing with real intervals. As a consequence, some interesting properties appear, such as the delivery of guaranteed results or confirmed global values. The former property is given in the sense that unknown numerical values are in known to lie in a computed interval. The latter property states that the global minimum value, for example, of a given function is also known to be contained in a interval (or a finite set of intervals). Depending upon the amount computation effort invested in the calculation, we can often find tight bounds on these enclosing intervals. The downside of interval analysis, however, is the mathematically correct, but often very pessimistic size of the interval result. This is in particularly due to the so-called dependency effect, where a single variable is used multiple times in one calculation. Applying interval analysis to structural analysis problems, the dependency has a great influence on the quality of numerical results. In this paper, a brief background of interval analysis is presented and shown how it can be applied to the solution of structural analysis problems. A discussion of possible improvements as well as an outlook to parallel computing is also given.}, subject = {Architektur }, language = {en} } @inproceedings{DudekRichter, author = {Dudek, Mariusz and Richter, Matthias}, title = {UNTERSUCHUNGEN ZUR ZUVERL{\"A}SSIGKEIT DES STRAßENBAHNNETZES IN KRAKAU}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2943}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29432}, pages = {19}, abstract = {Der Begriff der Zuverl{\"a}ssigkeit spielt eine zentrale Rolle bei der Bewertung von Verkehrsnetzen. Aus der Sicht der Nutzer des {\"o}ffentlichen Personennahverkehrs ({\"O}PNV) ist eines der wichtigsten Kriterien zur Beurteilung der Qualit{\"a}t des Liniennetzes, ob es m{\"o}glich ist, mit einer großen Sicherheit das Reiseziel in einer vorgegebenen Zeit zu erreichen. Im Vortrag soll dieser Zuverl{\"a}ssigkeitsbegriff mathematisch gefasst werden. Dabei wird zun{\"a}chst auf den {\"u}blichen Begriff der Zuverl{\"a}ssigkeit eines Netzes im Sinne paarweiser Zusammenhangswahrscheinlichkeiten eingegangen. Dieser Begriff wird erweitert durch die Betrachtung der Zuverl{\"a}ssigkeit unter Einbeziehung einer maximal zul{\"a}ssigen Reisezeit. In vergangenen Arbeiten hat sich die Ring-Radius-Struktur als bew{\"a}hrtes Modell f{\"u}r die theoretische Beschreibung von Verkehrsnetzen erwiesen. Diese {\"U}berlegungen sollen nun durch Einbeziehung realer Verkehrsnetzstrukturen erweitert werden. Als konkretes Beispiel dient das Straßenbahnnetz von Krakau. Hier soll insbesondere untersucht werden, welche Auswirkungen ein geplanter Ausbau des Netzes auf die Zuverl{\"a}ssigkeit haben wird. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {de} } @inproceedings{AbuAbedMilbradt, author = {Abu Abed, Wassim and Milbradt, Peter}, title = {UNDERSTANDING THE ASPECT OF FUZZINESS IN INTERPOLATION METHODS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2872}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28726}, pages = {22}, abstract = {Fuzzy functions are suitable to deal with uncertainties and fuzziness in a closed form maintaining the informational content. This paper tries to understand, elaborate, and explain the problem of interpolating crisp and fuzzy data using continuous fuzzy valued functions. Two main issues are addressed here. The first covers how the fuzziness, induced by the reduction and deficit of information i.e. the discontinuity of the interpolated points, can be evaluated considering the used interpolation method and the density of the data. The second issue deals with the need to differentiate between impreciseness and hence fuzziness only in the interpolated quantity, impreciseness only in the location of the interpolated points and impreciseness in both the quantity and the location. In this paper, a brief background of the concept of fuzzy numbers and of fuzzy functions is presented. The numerical side of computing with fuzzy numbers is concisely demonstrated. The problem of fuzzy polynomial interpolation, the interpolation on meshes and mesh free fuzzy interpolation is investigated. The integration of the previously noted uncertainty into a coherent fuzzy valued function is discussed. Several sets of artificial and original measured data are used to examine the mentioned fuzzy interpolations.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{ZimmermannBartels, author = {Zimmermann, J{\"u}rgen and Bartels, Jan-Hendrik}, title = {TREE-BASED METHODS FOR RESOURCE INVESTMENT AND RESOURCE LEVELLING PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3040}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30405}, pages = {6}, abstract = {The execution of project activities generally requires the use of (renewable) resources like machines, equipment or manpower. The resource allocation problem consists in assigning time intervals to the execution of the project activities while taking into account temporal constraints between activities emanating from technological or organizational requirements and costs incurred by the resource allocation. If the total procurement cost of the different renewable resources has to be minimized we speak of a resource investment problem. If the cost depends on the smoothness of the resource utilization over time the underlying problem is called a resource levelling problem. In this paper we consider a new tree-based enumeration method for solving resource investment and resource levelling problems exploiting some fundamental properties of spanning trees. The enumeration scheme is embedded in a branch-and-bound procedure using a workload-based lower bound and a depth first search. Preliminary computational results show that the proposed procedure is promising for instances with up to 30 activities.}, subject = {Architektur }, language = {en} } @inproceedings{KhanCongKarstenetal., author = {Khan, Farhan Manzoor Ahmed and Cong, ZiXiang and Karsten, Menzel and Stack, Paul}, title = {TRACKING OCCUPANTS AND INVENTORY ITEMS IN BUILDINGS USING RADIO FREQUENCY IDENTIFICATION (RFID) TECHNOLOGY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2856}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28562}, pages = {13}, abstract = {In order to make control decisions, Smart Buildings need to collect data from multiple sources and bring it to a central location, such as the Building Management System (BMS). This needs to be done in a timely and automated fashion. Besides data being gathered from different energy using elements, information of occupant behaviour is also important for a building's requirement analysis. In this paper, the parameter of Occupant Density was considered to help find behaviour of occupants towards a building space. Through this parameter, support for building energy consumption and requirements based on occupant need and demands was provided. The demonstrator presented provides information on the number of people present in a particular building space at any time, giving the space density. Such collections of density data made over a certain period of time represents occupant behaviour towards the building space, giving its usage patterns. Similarly, inventory items were tracked and monitored for moving out or being brought into a particular read zone. For both, people and inventory items, this was achieved using small, low-cost, passive Ultra-High Frequency (UHF) Radio Frequency Identification (RFID) tags. Occupants were given the tags in a form factor of a credit card to be possessed at all times. A central database was built where occupant and inventory information for a particular building space was maintained for monitoring and providing a central data access.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{HuhntGielsdorf, author = {Huhnt, Wolfgang and Gielsdorf, F.}, title = {TOPOLOGICAL INFORMATION AS LEADING INFORMATION IN BUILDING PRODUCT MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2911}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29110}, pages = {11}, abstract = {Digital models of buildings are widely used in civil engineering. In these models, geometric information is used as leading information. Engineers are used to have geometric information, and, for instance, it is state of the art to specify a point by its three coordinates. However, the traditional approaches have disadvantages. Geometric information is over-determined. Thus, more geometric information is specified and stored than needed. In addition, engineers already deal with topological information. A denotation of objects in buildings is of topological nature. It has to be answered whether approaches where topological information becomes a leading role would be more efficient in civil engineering. This paper presents such an approach. Topological information is modelled independently of geometric information. It is used for denoting the objects of a building. Geometric information is associated to topological information so that geometric information "weights" a topology. The concept presented in this paper has already been used in surveying existing buildings. Experiences in the use of this concept showed that the number of geometric information that is required for a complete specification of a building could be reduced by a factor up to 100. Further research will show how this concept can be used in planning processes.}, subject = {Architektur }, language = {en} } @inproceedings{WolkowiczRuthStahr, author = {Wolkowicz, Christian and Ruth, J{\"u}rgen and Stahr, Alexander}, title = {TOOL TO CHECK TOPOLOGY AND GEOMETRY FOR SPATIAL STRUCTURES ON BASIS OF THE EXTENDED MAXWELL'S RULE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3037}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30370}, pages = {8}, abstract = {One of the simplest principle in the design of light-weight structures is to avoid bending. This can be achieved by dissolving girders into members acting purely in axial tension or compression. The employment of cables for the tensioned members leads to even lighter structures which are called cable-strut structures. They constitute a subclass of spatial structures. To give fast information about the general feasibility of an architectural concept employing cable-strut structures is a challenging task due to their sophisticated mechanical behavior. In this regard it is essential to control if the structure is stable and if pre-stress can be applied. This paper presents a tool using the spreadsheet software Microsoft (MS) Excel which can give such information. Therefore it is not necessary to purchase special software and the according time consuming training is much lower. The tool was developed on basis of the extended Maxwell's rule, which besides topology also considers the geometry of the structure. For this the rank of the node equilibrium matrix is crucial. Significance and determination of the rank and the implementation of the corresponding algorithms in MS Excel are described in the following. The presented tool is able to support the structural designer in an early stage of the project in finding a feasible architectural concept for cable-strut structures. As examples for the application of the software tool two special cable-strut structures, so called tensegrity structures, were examined for their mechanical behavior.}, subject = {Architektur }, language = {en} } @inproceedings{ChangChang, author = {Chang, Wei-Tsang and Chang, Teng-Wen}, title = {TIME-BASED FORM TRANSFORMATION WITH FOLDING SPACE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2937}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29371}, pages = {10}, abstract = {Design activity could be treated as state transition computationally. In stepwise processing, in-between form-states are not easily observed. However, in this research time-based concept is introduced and applied in order to bridge the gap. In architecture, folding is one method of form manipulation and architects also want to search for alternatives by this operation. Besides, folding operation has to be defined and parameterized before time factor is involved as a variable of folding. As a result, time-based transformation provides sequential form states and redirects design activity.}, subject = {Architektur }, language = {en} } @inproceedings{Heuer, author = {Heuer, Andreas}, title = {THREE-DIMENSIONAL MODELING OF CONCRETE WITH DAMAGE AND PLASTICITY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2967}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29671}, pages = {15}, abstract = {The concrete is modeled as a material with damage and plasticity, whereat the viscoplastic and the viscoelastic behaviour depends on the rate of the total strains. Due to the damage behaviour the compliance tensor develops different properties in tension and compression. There have been tested various yield surfaces and flow rules, damage rules respectively to their usability in a concrete model. One three-dimensional yield surface was developed from a failure surface based on the Willam--Warnke five-parameter model by the author. Only one general uni-axial stress-strain-relation is used for the numeric control of the yield surface. From that curve all necessary parameters for different strengths of concrete and different strain rates can be derived by affine transformations. For the flow rule in the compression zone a non associated inelastic potential is used, in the tension zone a Rankine potential. Conditional on the time-dependent formulation, the symmetry of the system equations is maintained in spite of the usage of non-associated potentials for the derivation of the inelastic strains. In case of quasi statical computations a simple viscoplastic law is used that is rested on an approach to Perzyna. The principle of equality of dissipation power in the uni-axial and the three-axial state of stress is used. It is modified by a factor that depends on the actual stress ratio and in comparison with the Kupfer experiments it implicates strains that are more realistic. The implementation of the concrete model is conducted in a mixed hybrid finite element. Examples in the structural level are introduced for verification of the concrete model.}, subject = {Architektur }, language = {en} } @inproceedings{Klingert, author = {Klingert, Maik}, title = {THE USAGE OF IMAGE PROCESSING METHODS FOR INTERPRETATION OF THERMOGRAPHY DATA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2977}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29777}, pages = {13}, abstract = {For assessment of old buildings, thermal graphic analysis aided with infra-red camera have been employed in a wide range nowadays. Image processing and evaluation can be economically practicable only if the image evaluation can also be automated to the largest extend. For that reason methods of computer vision are presented in this paper to evaluate thermal images. To detect typical thermal image elements, such as thermal bridges and lintels in thermal images respectively gray value images, methods of digital image processing have been applied, of which numerical procedures are available to transform, modify and encode images. At the same time, image processing can be regarded as a multi-stage process. In order to be able to accomplish the process of image analysis from image formation through perfecting and segmentation to categorization, appropriate functions must be implemented. For this purpose, different measuring procedures and methods for automated detection and evaluation have been tested.}, subject = {Architektur }, language = {en} } @inproceedings{KnauerDammeierMeffert, author = {Knauer, Uwe and Dammeier, T. and Meffert, Beate}, title = {THE STRUCTURE OF ROAD TRAFFIC SCENES AS REVEALED BY UNSUPERVISED ANALYSIS OF THE TIME AVERAGED OPTICAL FLOW}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2978}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29780}, pages = {9}, abstract = {The Lucas-Kanade tracker has proven to be an efficient and accurate method for calculation of the optical flow. However, this algorithm can reliably track only suitable image features like corners and edges. Therefore, the optical flow can only be calculated for a few points in each image, resulting in sparse optical flow fields. Accumulation of these vectors over time is a suitable method to retrieve a dense motion vector field. However, the accumulation process limits application of the proposed method to fixed camera setups. Here, a histogram based approach is favored to allow more than a single typical flow vector per pixel. The resulting vector field can be used to detect roads and prescribed driving directions which constrain object movements. The motion structure can be modeled as a graph. The nodes represent entry and exit points for road users as well as crossings, while the edges represent typical paths.}, subject = {Architektur }, language = {en} } @inproceedings{GrobConstalesKrausshar, author = {Grob, Dennis and Constales, Denis and Kraußhar, Rolf S{\"o}ren}, title = {THE HYPERCOMPLEX SZEG{\"O} KERNEL METHOD FOR 3D MAPPING PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2846}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28464}, pages = {7}, abstract = {In this paper we present rudiments of a higher dimensional analogue of the Szeg{\"o} kernel method to compute 3D mappings from elementary domains onto the unit sphere. This is a formal construction which provides us with a good substitution of the classical conformal Riemann mapping. We give explicit numerical examples and discuss a comparison of the results with those obtained alternatively by the Bergman kernel method.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{LavickaDelangheSoucek, author = {Lavicka, Roman and Delanghe, Richard and Soucek, Vladimir}, title = {THE HOWE DUALITY FOR HODGE SYSTEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2866}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28669}, pages = {11}, abstract = {In this note, we describe quite explicitly the Howe duality for Hodge systems and connect it with the well-known facts of harmonic analysis and Clifford analysis. In Section 2, we recall briefly the Fisher decomposition and the Howe duality for harmonic analysis. In Section 3, the well-known fact that Clifford analysis is a real refinement of harmonic analysis is illustrated by the Fisher decomposition and the Howe duality for the space of spinor-valued polynomials in the Euclidean space under the so-called L-action. On the other hand, for Clifford algebra valued polynomials, we can consider another action, called in Clifford analysis the H-action. In the last section, we recall the Fisher decomposition for the H-action obtained recently. As in Clifford analysis the prominent role plays the Dirac equation in this case the basic set of equations is formed by the Hodge system. Moreover, analysis of Hodge systems can be viewed even as a refinement of Clifford analysis. In this note, we describe the Howe duality for the H-action. In particular, in Proposition 1, we recognize the Howe dual partner of the orthogonal group O(m) in this case as the Lie superalgebra sl(2 1). Furthermore, Theorem 2 gives the corresponding multiplicity free decomposition with an explicit description of irreducible pieces.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DeSchepperBrackxSommen, author = {De Schepper, Nele and Brackx, Fred and Sommen, Frank}, title = {THE FOURIER-BESSEL TRANSFORM}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2838}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28387}, pages = {18}, abstract = {In this paper we devise a new multi-dimensional integral transform within the Clifford analysis setting, the so-called Fourier-Bessel transform. It appears that in the two-dimensional case, it coincides with the Clifford-Fourier and cylindrical Fourier transforms introduced earlier. We show that this new integral transform satisfies operational formulae which are similar to those of the classical tensorial Fourier transform. Moreover the L2-basis elements consisting of generalized Clifford-Hermite functions appear to be eigenfunctions of the Fourier-Bessel transform.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{DjordjevicPetkovicZivkovic, author = {Djordjevic, Djordje and Petkovic, Dusan and Zivkovic, Darko}, title = {THE APPLICATION OF INTERVAL CALCULUS TO ESTIMATION OF PLATE DEFLECTION BY SOLVING POISSON'S PARTIAL DIFFERENTIAL EQUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2839}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28397}, pages = {12}, abstract = {This paper describes the application of interval calculus to calculation of plate deflection, taking in account inevitable and acceptable tolerance of input data (input parameters). The simply supported reinforced concrete plate was taken as an example. The plate was loaded by uniformly distributed loads. Several parameters that influence the plate deflection are given as certain closed intervals. Accordingly, the results are obtained as intervals so it was possible to follow the direct influence of a change of one or more input parameters on output (in our example, deflection) values by using one model and one computing procedure. The described procedure could be applied to any FEM calculation in order to keep calculation tolerances, ISO-tolerances, and production tolerances in close limits (admissible limits). The Wolfram Mathematica has been used as tool for interval calculation.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{EblingScheuermann, author = {Ebling, Julia and Scheuermann, G.}, title = {TEMPLATE MATCHING ON VECTOR FIELDS USING CLIFFORD ALGEBRA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2946}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29464}, pages = {25}, abstract = {Due to the amount of flow simulation and measurement data, automatic detection, classification and visualization of features is necessary for an inspection. Therefore, many automated feature detection methods have been developed in recent years. However, only one feature class is visualized afterwards in most cases, and many algorithms have problems in the presence of noise or superposition effects. In contrast, image processing and computer vision have robust methods for feature extraction and computation of derivatives of scalar fields. Furthermore, interpolation and other filter can be analyzed in detail. An application of these methods to vector fields would provide a solid theoretical basis for feature extraction. The authors suggest Clifford algebra as a mathematical framework for this task. Clifford algebra provides a unified notation for scalars and vectors as well as a multiplication of all basis elements. The Clifford product of two vectors provides the complete geometric information of the relative positions of these vectors. Integration of this product results in Clifford correlation and convolution which can be used for template matching of vector fields. For frequency analysis of vector fields and the behavior of vector-valued filters, a Clifford Fourier transform has been derived for 2D and 3D. Convolution and other theorems have been proved, and fast algorithms for the computation of the Clifford Fourier transform exist. Therefore the computation of Clifford convolution can be accelerated by computing it in Clifford Fourier domain. Clifford convolution and Fourier transform can be used for a thorough analysis and subsequent visualization of flow fields.}, subject = {Architektur }, language = {en} } @inproceedings{BultheelJansenMaesetal., author = {Bultheel, Adhemar and Jansen, M. and Maes, J. and Van Aerschot, W. and Vanraes, E.}, title = {SUBDIVIDE AND CONQUER RESOLUTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2909}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29091}, pages = {47}, abstract = {This contribution will be freewheeling in the domain of signal, image and surface processing and touch briefly upon some topics that have been close to the heart of people in our research group. A lot of the research of the last 20 years in this domain that has been carried out world wide is dealing with multiresolution. Multiresolution allows to represent a function (in the broadest sense) at different levels of detail. This was not only applied in signals and images but also when solving all kinds of complex numerical problems. Since wavelets came into play in the 1980's, this idea was applied and generalized by many researchers. Therefore we use this as the central idea throughout this text. Wavelets, subdivision and hierarchical bases are the appropriate tools to obtain these multiresolution effects. We shall introduce some of the concepts in a rather informal way and show that the same concepts will work in one, two and three dimensions. The applications in the three cases are however quite different, and thus one wants to achieve very different goals when dealing with signals, images or surfaces. Because completeness in our treatment is impossible, we have chosen to describe two case studies after introducing some concepts in signal processing. These case studies are still the subject of current research. The first one attempts to solve a problem in image processing: how to approximate an edge in an image efficiently by subdivision. The method is based on normal offsets. The second case is the use of Powell-Sabin splines to give a smooth multiresolution representation of a surface. In this context we also illustrate the general method of construction of a spline wavelet basis using a lifting scheme.}, subject = {Architektur }, language = {en} } @inproceedings{Szolomicki, author = {Szolomicki, Jerzy Pawel}, title = {STRUCTURAL BEHAVIOUR OF MASONRY VAULTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2896}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28966}, pages = {11}, abstract = {This paper deals with the modelling and the analysis of masonry vaults. Numerical FEM analyses are performed using LUSAS code. Two vault typologies are analysed (barrel and cross-ribbed vaults) parametrically varying geometrical proportions and constraints. The proposed model and the developed numerical procedure are implemented in a computer analysis. Numerical applications are developed to assess the model effectiveness and the efficiency of the numerical procedure. The main object of the present paper is the development of a computational procedure which allows to define 3D structural behaviour of masonry vaults. For each investigated example, the homogenized limit analysis approach has been employed to predict ultimate load and failure mechanisms. Finally, both a mesh dependence study and a sensitivity analysis are reported. Sensitivity analysis is conducted varying in a wide range mortar tensile strength and mortar friction angle with the aim of investigating the influence of the mechanical properties of joints on collapse load and failure mechanisms. The proposed computer model is validated by a comparison with experimental results available in the literature.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BrossmannMueller, author = {Broßmann, Marko and M{\"u}ller, Karl-Heinz}, title = {STOCHASTISCHE ANALYSE VON STAHLBETONBALKEN IM GRENZZUSTAND DER ADAPTION UNTER BER{\"u}CKSICHTIGUNG DER STEIFIGKEITSDEGRADATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2934}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29341}, pages = {20}, abstract = {Am Beispiel eines 3-feldrigen Durchlauftr{\"a}gers wird die Versagenswahrscheinlichkeit von wechselnd belasteten Stahlbetonbalken bez{\"u}glich des Grenzzustandes der Adaption (Einspielen, shakedown) untersucht. Die Adaptionsanalyse erfolgt unter Ber{\"u}cksichtigung der beanspruchungschabh{\"a}ngigen Degradation der Biegesteifigkeit infolge Rissbildung. Die damit verbundene mechanische Problemstellung kann auf die Adaptionsanalyse linear elastisch - ideal plastischer Balkentragwerke mit unbekannter aber begrenzter Biegesteifigkeit zur{\"u}ckgef{\"u}hrt werden. Die Versagenswahrscheinlichkeit wird unter Ber{\"u}cksichtigung stochastischer Tragwerks- und Belastungsgr{\"o}ßen berechnet. Tragwerkseigenschaften und st{\"a}ndige Lasten gelten als zeitunabh{\"a}ngige Zufallsgr{\"o}ßen. Zeitlich ver{\"a}nderliche Lasten werden als nutzungsdauerbezogene Extremwerte POISSONscher Rechteck-Pulsprozesse unter Ber{\"u}cksichtigung zeitlicher {\"U}berlagerungseffekte modelliert, so dass die Versagenswahrscheinlichkeit ebenfalls eine nutzungsdauerbezogene Gr{\"o}ße ist. Die mechanischen Problemstellungen werden numerisch mit der mathematischen Optimierung gel{\"o}st. Die Versagenswahrscheinlichkeit wird auf statistischem Weg mit der Monte-Carlo-Methode gesch{\"a}tzt.}, subject = {Architektur }, language = {de} } @inproceedings{BauerRichter, author = {Bauer, Marek and Richter, Matthias}, title = {STATISTICAL ANALYSIS OF TIME LOST BY TRAMS BEFORE DEPARTURE FROM STOPS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2922}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29226}, pages = {18}, abstract = {The ride of the tram along the line, defined by a time-table, consists of the travel time between the subsequent sections and the time spent by tram on the stops. In the paper, statistical data collected in the city of Krakow is presented and evaluated. In polish conditions, for trams the time spent on stops makes up the remarkable amount of 30 \% of the total time of tram line operation. Moreover, this time is characterized by large variability. The time spent by tram on a stop consists of alighting and boarding time and time lost by tram on stop after alighting and boarding time ending, but before departure. Alighting and boarding time itself usually depends on the random number of alighting and boarding passengers and also on the number of passengers which are inside the vehicle. However, the time spent by tram on stop after alighting and boarding time ending is an effect of certain random events, mainly because of impossibility of departure from stop, caused by lack of priorities for public transport vehicles. The main focus of the talk lies on the description and the modelling of these effects. This paper is involved with CIVITAS-CARAVEL project: "Clean and better transport in cites". The project has received research funding from the Community's Sixth Framework Programme. The paper reflects only the author's views and the Community is not liable for any use that may be made of the information contained therein.}, subject = {Architektur }, language = {en} } @inproceedings{Petrova, author = {Petrova, Lyllia B.}, title = {STATIC ANALYSIS ON MODELS OF CONTINUOUS ORTHOTROPIC THIN-WALLED PRISMATIC SHELL STRUCTURES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3000}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30007}, pages = {8}, abstract = {The paper presents a linear static analysis on continuous orthotropic thin-walled shell structures simply supported at the transverse ends with a random deformable contour of the cross section. The external loads can be random as well. The class of this structures involves most of the bridges, scaffold bridges, some roof structures etc. A numerical example of steel continuous structures on five spans with an open contour of the cross-section has been solved. The examination of the structure has used the following two computation models: a prismatic structure consisting of isotropic strips, a plates and ribs, with considering their real interaction, and a smooth orthotropic plate equivalent to the structure in the first model. The displacements and forces of the structure characterizing its stressed and deformed condition have been determined. The results obtained from the two solutions have been analyzed. The study on the structure is made with the force method in combination with the analytical finite strip method (AFSM) in displacements. The basic system is obtained by separating the superstructure from the understructure at the places of intermediate supports and consists of two parts. The first part is a single span thin-walled prismatic shell structure; the second part presents supports (columns, space frames etc.). The connection between the superstructure and intermediate supports is made under random supporting conditions. The forces at the supporting points in the direction of the connections removed are assumed to be the basic unknowns of the force method. The solution of the superstructure has been accomplished by the AFSM in displacements. The structure is divided in only one (transverse) direction into a finite number of plain strips connected to each other in longitudinal linear nodes. The three displacements of the points on the node lines and the rotation around those lines have been assumed to be the basic unknown in each node. The boundary conditions of each strip of the basic system correspond to the simply support along the transverse ends and the restraint along the longitudinal ones. The particular strip of the basic system has been solved by the method of the single trigonometric series. The method is reduced to solving a discrete structure in displacements and restoring its continuity at the places of the sections made in respect to both the displacements and forces. The two parts of the basic system have been solved in sequence under the action of single values of each of the basic unknowns and with the external load. The solution of the support part is accomplished using software for analyzing structures by the FEM. The basic unknown forces have been determined from system of canonic equations, the conditions of the deformations continuity on the places of the removed connections under superstructure and intermediate supports. The final displacements and forces at a random point of a continuous superstructure have been determined using the principle of superposition. The computations have been carried by software developed with Visual Fortran version 5.0 for PC.}, subject = {Architektur }, language = {en} } @inproceedings{Malonek, author = {Malonek, Helmuth Robert}, title = {SPECIAL FUNCTIONS VERSUS ELEMENTARY FUNCTIONS IN HYPERCOMPLEX FUNCTION THEORY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2870}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28702}, pages = {3}, abstract = {In recent years special hypercomplex Appell polynomials have been introduced by several authors and their main properties have been studied by different methods and with different objectives. Like in the classical theory of Appell polynomials, their generating function is a hypercomplex exponential function. The observation that this generalized exponential function has, for example, a close relationship with Bessel functions confirmed the practical significance of such an approach to special classes of hypercomplex differentiable functions. Its usefulness for combinatorial studies has also been investigated. Moreover, an extension of those ideas led to the construction of complete sets of hypercomplex Appell polynomial sequences. Here we show how this opens the way for a more systematic study of the relation between some classes of Special Functions and Elementary Functions in Hypercomplex Function Theory.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{SchraderKoenke, author = {Schrader, Kai and K{\"o}nke, Carsten}, title = {SPARSE APPROXIMATE COMPUTATION OF SADDLE POINT PROBLEMS ARISING FROM FETI-DP DISCRETIZATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2887}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28874}, pages = {12}, abstract = {The numerical simulation of microstructure models in 3D requires, due to enormous d.o.f., significant resources of memory as well as parallel computational power. Compared to homogeneous materials, the material hetrogeneity on microscale induced by different material phases demand for adequate computational methods for discretization and solution process of the resulting highly nonlinear problem. To enable an efficient/scalable solution process of the linearized equation systems the heterogeneous FE problem will be described by a FETI-DP (Finite Element Tearing and Interconnecting - Dual Primal) discretization. The fundamental FETI-DP equation can be solved by a number of different approaches. In our approach the FETI-DP problem will be reformulated as Saddle Point system, by eliminating the primal and Lagrangian variables. For the reduced Saddle Point system, only defined by interior and dual variables, special Uzawa algorithms can be adapted for iteratively solving the FETI-DP saddle-point equation system (FETI-DP SPE). A conjugate gradient version of the Uzawa algorithm will be shown as well as some numerical tests regarding to FETI-DP discretization of small examples using the presented solution technique. Furthermore the inversion of the interior-dual Schur complement operator can be approximated using different techniques building an adequate preconditioning matrix and therewith leading to substantial gains in computing time efficiency.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Loemker, author = {L{\"o}mker, Thorsten Michael}, title = {SOLVING REVITALIZATION-PROBLEMS BY THE USE OF A CONSTRAINT PROGRAMING LANGUAGE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2987}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29874}, pages = {13}, abstract = {This research focuses on an approach to describe principles in architectural layout planning within the domain of revitalization. With the aid of mathematical rules, which are executed by a computer, solutions to design problems are generated. Provided that "design" is in principle a combinatorial problem, i.e. a constraint-based search for an overall optimal solution of a problem, an exemplary method will be described to solve such problems in architectural layout planning. To avoid conflicts relating to theoretical subtleness, a customary approach adopted from Operations Research has been chosen in this work. In this approach, design is a synonym for planning, which could be described as a systematic and methodical course of action for the analysis and solution of current or future problems. The planning task is defined as an analysis of a problem with the aim to prepare optimal decisions by the use of mathematical methods. The decision problem of a planning task is represented by an optimization model and the application of an efficient algorithm in order to aid finding one or more solutions to the problem. The basic principle underlying the approach presented herein is the understanding of design in terms of searching for solutions that fulfill specific criteria. This search is executed by the use of a constraint programming language.}, subject = {Architektur }, language = {en} } @inproceedings{PerepelitsaTebuevaShenkao, author = {Perepelitsa, V. A. and Tebueva, F.B. and Shenkao, Timour}, title = {SOLVABILITY EXPLORATION OF SEGMENTATION PROBLEM WITH LINEAR CONVOLUTION ALGORITHMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2999}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29999}, pages = {13}, abstract = {The paper is dedicated to decidability exploration of market segmentation problem with the help of linear convolution algorithms. Mathematical formulation of this problem represents interval task of bipartite graph cover by stars. Vertices of the first partition correspond to types of commodities, vertices of the second - to customers groups. Appropriate method is offered for interval problem reduction to two-criterion task that has one implemented linear convolution algorithm. Unsolvability with the help of linear convolution algorithm of multicriterion, and consequently interval, market segmentation problem is proved.}, subject = {Architektur }, language = {en} } @inproceedings{EiermannErnstUllmann, author = {Eiermann, Michael and Ernst, O. and Ullmann, Elisabeth}, title = {SOLUTION STRATEGIES FOR STOCHASTIC FINITE ELEMENT DISCRETIZATIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2949}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29493}, pages = {11}, abstract = {We consider efficient numerical methods for the solution of partial differential equations with stochastic coefficients or right hand side. The discretization is performed by the stochastic finite element method (SFEM). Separation of spatial and stochastic variables in the random input data is achieved via a Karhunen-Lo{\`e}ve expansion or Wiener's polynomial chaos expansion. We discuss solution strategies for the Galerkin system that take advantage of the special structure of the system matrix. For stochastic coefficients linear in a set of independent random variables we employ Krylov subspace recycling techniques after having decoupled the large SFEM stiffness matrix.}, subject = {Architektur }, language = {en} } @inproceedings{BeranHromada, author = {Beran, V{\´a}clav and Hromada, E.}, title = {SOFTWARE FOR PROJECT RELIABILITY ESTIMATION AND RISK EVALUATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2925}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29255}, pages = {16}, abstract = {The contribution presents a model that is able to simulate construction duration and cost for a building project. This model predicts set of expected project costs and duration schedule depending on input parameters such as production speed, scope of work, time schedule, bonding conditions and maximum and minimum deviations from scope of work and production speed. The simulation model is able to calculate, on the basis of input level of probability, the adequate construction cost and time duration of a project. The reciprocal view attends to finding out the adequate level of probability for construction cost and activity durations. Among interpretive outputs of the application software belongs the compilation of a presumed dynamic progress chart. This progress chart represents the expected scenario of development of a building project with the mapping of potential time dislocations for particular activities. The calculation of a presumed dynamic progress chart is based on an algorithm, which calculates mean values as a partial result of the simulated building project. Construction cost and time models are, in many ways, useful tools in project management. Clients are able to make proper decisions about the time and cost schedules of their investments. Consequently, building contractors are able to schedule predicted project cost and duration before any decision is finalized.}, subject = {Architektur }, language = {en} } @inproceedings{HammBeissertKoenig, author = {Hamm, Matthias and Beißert, Ulrike and K{\"o}nig, Markus}, title = {SIMULATION-BASED OPTIMIZATION OF CONSTRUCTION SCHEDULES BY USING PARETO SIMULATED ANNEALING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2849}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28499}, pages = {13}, abstract = {Within the scheduling of construction projects, different, partly conflicting objectives have to be considered. The specification of an efficient construction schedule is a challenging task, which leads to a NP-hard multi-criteria optimization problem. In the past decades, so-called metaheuristics have been developed for scheduling problems to find near-optimal solutions in reasonable time. This paper presents a Simulated Annealing concept to determine near-optimal construction schedules. Simulated Annealing is a well-known metaheuristic optimization approach for solving complex combinatorial problems. To enable dealing with several optimization objectives the Pareto optimization concept is applied. Thus, the optimization result is a set of Pareto-optimal schedules, which can be analyzed for selecting exactly one practicable and reasonable schedule. A flexible constraint-based simulation approach is used to generate possible neighboring solutions very quickly during the optimization process. The essential aspects of the developed Pareto Simulated Annealing concept are presented in detail.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Vorechovsky, author = {Vorechovsk{\´y}, Miroslav}, title = {SIMULATION OF SIMPLY CROSS CORRELATED RANDOM FIELDS BY SERIES EXPANSION METHODS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2899}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28995}, pages = {13}, abstract = {A practical framework for generating cross correlated fields with a specified marginal distribution function, an autocorrelation function and cross correlation coefficients is presented in the paper. The contribution promotes a recent journal paper [1]. The approach relies on well known series expansion methods for simulation of a Gaussian random field. The proposed method requires all cross correlated fields over the domain to share an identical autocorrelation function and the cross correlation structure between each pair of simulated fields to be simply defined by a cross correlation coefficient. Such relations result in specific properties of eigenvectors of covariance matrices of discretized field over the domain. These properties are used to decompose the eigenproblem which must normally be solved in computing the series expansion into two smaller eigenproblems. Such decomposition represents a significant reduction of computational effort. Non-Gaussian components of a multivariate random field are proposed to be simulated via memoryless transformation of underlying Gaussian random fields for which the Nataf model is employed to modify the correlation structure. In this method, the autocorrelation structure of each field is fulfilled exactly while the cross correlation is only approximated. The associated errors can be computed before performing simulations and it is shown that the errors happen especially in the cross correlation between distant points and that they are negligibly small in practical situations.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BauerRichterWeiss, author = {Bauer, Marek and Richter, Matthias and Weiß, Hendrik}, title = {SIMULATION MODEL OF TRAM ROUTE OPERATION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2829}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28295}, pages = {19}, abstract = {From passenger's perspective, punctuality is one of the most important features of tram route operation. We present a stochastic simulation model with special focus on determining important factors of influence. The statistical analysis bases on large samples (sample size is nearly 2000) accumulated from comprehensive measurements on eight tram routes in Cracow. For the simulation, we are not only interested in average values but also in stochastic characteristics like the variance and other properties of the distribution. A realization of trams operations is assumed to be a sequence of running times between successive stops and times spent by tram at the stops divided in passengers alighting and boarding times and times waiting for possibility of departure . The running time depends on the kind of track separation including the priorities in traffic lights, the length of the section and the number of intersections. For every type of section, a linear mixed regression model describes the average running time and its variance as functions of the length of the section and the number of intersections. The regression coefficients are estimated by the iterative re-weighted least square method. Alighting and boarding time mainly depends on type of vehicle, number of passengers alighting and boarding and occupancy of vehicle. For the distribution of the time waiting for possibility of departure suitable distributions like Gamma distribution and Lognormal distribution are fitted.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{FlaigApel, author = {Flaig, Thomas and Apel, Thomas}, title = {SIMULATION AND MATHEMATICAL OPTIMIZATION OF THE HYDRATION OF CONCRETE FOR AVOIDING THERMAL CRACKS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2842}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28424}, pages = {15}, abstract = {After mixing of concrete, the hardening starts by an exothermic chemical reaction known as hydration. As the reaction rate depends on the temperature the time in the description of the hydration is replaced by the maturity which is defined as an integral over a certain function depending on the temperature. The temperature distribution is governed by the heat equation with a right hand side depending on the maturity and the temperature itself. We compare of the performance of different time integration schemes of higher order with an automatic time step control. The simulation of the heat distribution is of importance as the development of mechanical properties is driven by the hydration. During this process it is possible that the tensile stresses exceed the tensile strength and cracks occur. The goal is to produce cheap concrete without cracks. Simple crack-criterions use only temperature differences, more involved ones are based on thermal stresses. If the criterion predicts cracks some changes in the input data are needed. This can be interpreted as optimization. The final goal will be to adopt model based optimization (in contrast to simulation based optimization) to the problem of the hydration of young concrete and the avoidance of cracks. The first step is the simulation of the hydration, which we focus in this paper.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Weitzmann, author = {Weitzmann, R{\"u}diger}, title = {SIMPLIFIED CYCLE-BASED DESIGN OF EXTREMELY LOADED STRUCTURES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3033}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30338}, pages = {14}, abstract = {The design of safety-critical structures, exposed to cyclic excitations demands for non-degrading or limited-degrading behavior during extreme events. Among others, the structural behavior is mainly determined by the amount of plastic cycles, completed during the excitation. Existing simplified methods often ignore this dependency, or assume/request sufficient cyclic capacity. The paper introduces a new performance based design method that considers explicitly a predefined number of re-plastifications. Hereby approaches from the shakedown theory and signal processing methods are utilized. The paper introduces the theoretical background, explains the steps of the design procedure and demonstrates the applicability with help of an example. This project was supported by German Science Foundation (Deutsche Forschungsgemeinschaft, DFG)}, subject = {Architektur }, language = {en} } @inproceedings{HarbrechtEppler, author = {Harbrecht, Helmut and Eppler, K.}, title = {SHAPE OPTIMIZATION FOR FREE BOUNDARY PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2850}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28508}, pages = {8}, abstract = {In this paper three different formulations of a Bernoulli type free boundary problem are discussed. By analyzing the shape Hessian in case of matching data it is distinguished between well-posed and ill-posed formulations. A nonlinear Ritz-Galerkin method is applied for discretizing the shape optimization problem. In case of well-posedness existence and convergence of the approximate shapes is proven. In combination with a fast boundary element method efficient first and second order shape optimization algorithms are obtained.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Meis, author = {Meis, Jochen}, title = {SERVICE DESIGN AND SERVICE MANAGEMENT WITH THE SERVICE BLUEPRINTING METHODOLOGY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2990}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29908}, pages = {11}, abstract = {A new application of software technology is the application area of smart living or sustainable living. Within this area application platforms are designed and realized with the goal to support value added services. In this context value added services integrates microelectronics, home automation and services to enhance the attractiveness of flats, homes and buildings. Especially real estate companies or service providers dealing with home services are interested in an effective design and management of their services. Service Engineering is the approved approach for designing customer oriented service processes. Service engineering consists of several phases; from situation analysis to service creation and service design to service management. This article will describe how the method service blueprint can be used to design service processes. Smart living includes all actions to enlarge a flat to a smart home for living. One special requirement of this application domain is the use of local components (actuators, sensors) within service processes. This article will show how this extended method supports service providers to improve the quality of customer oriented service processes and the derivation of needed interfaces of involved actors. For the civil engineering process it will be possible to derive needed information from a built in home automation system. The aim is to show, how to get needed smart local components to fullfill later offered it-supported value added services. Value added services focused on inhabitants are grouped to consulting and information, care and supervision, leisure time activities, repairs, mobility and delivery, safety and security, supply and disposal.}, subject = {Architektur }, language = {en} } @inproceedings{SchapkeSchererKatranuschkov, author = {Schapke, Sven-Eric and Scherer, Raimar J. and Katranuschkov, Peter}, title = {SEMANTIC SERVICE ENVIRONMENTS FOR INTEGRATING TEXT WITH MODEL-BASED INFORMATION IN AEC/FM}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3012}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30125}, pages = {10}, abstract = {In distributed project organisations and collaboration there is a need for integrating unstructured self-contained text information with structured project data. We consider this a process of text integration in which various text technologies can be used to externalise text content and consolidate it into structured information or flexibly interlink it with corresponding information bases. However, the effectiveness of text technologies and the potentials of text integration greatly vary with the type of documents, the project setup and the available background knowledge. The goal of our research is to establish text technologies within collaboration environments to allow for (a) flexibly combining appropriate text and data management technologies, (b) utilising available context information and (c) the sharing of text information in accordance to the most critical integration tasks. A particular focus is on Semantic Service Environments that leverage on Web service and Semantic Web technologies and adequately support the required systems integration and parallel processing of semi-structured and structured information. The paper presents an architecture for text integration that extends Semantic Service Environments with two types of integration services. Backbone to the Information Resource Sharing and Integration Service is a shared environment ontology that consolidates information on the project context and the available model, text and general linguistic resources. It also allows for the configuration of Semantic Text Analysis and Annotation Services to analyse the text documents as well as for capturing the discovered text information and sharing it through semantic notification and retrieval engines. A particular focus of the paper is the definition of the overall integration process configuring a complementary set of analyses and information sharing components.}, subject = {Architektur }, language = {en} } @inproceedings{Nasser, author = {Nasser, Mourad}, title = {SEISMIC RESPONSE OF R/C FRAMES CONSIDERING DYNAMIC SOIL-STRUCTURE INTERACTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2875}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28759}, pages = {17}, abstract = {In spite of the extensive research in dynamic soil-structure interaction (SSI), there still exist miscon-ceptions concerning the role of SSI in the seismic performance of structures, especially the ones founded on soft soil. This is due to the fact that current analytical SSI models that are used to evaluate the influence of soil on the overall structural behavior are approximate models and may involve creeds and practices that are not always precise. This is especially true in the codified approaches which in-clude substantial approximations to provide simple frameworks for the design. As the direct numerical analysis requires a high computational effort, performing an analysis considering SSI is computationally uneconomical for regular design applications. This paper outlines the set up some milestones for evaluating SSI models. This will be achieved by investigating the different assumptions and involved factors, as well as varying the configurations of R/C moment-resisting frame structures supported by single footings which are subject to seismic excita-tions. It is noted that the scope of this paper is to highlight, rather than fully resolve, the above subject. A rough draft of the proposed approach is presented in this paper, whereas a thorough illustration will be carried out throughout the presentation in the course of the conference.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{GurtovyTynchuk, author = {Gurtovy, O. G. and Tynchuk, S.O.}, title = {RESEARCH OF DEFORMATION OF MULTILAYERED PLATES ON UNDEFORMABLE BASIS BY UNFLEXURAL SPECIFIED MODEL}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2961}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29613}, pages = {6}, abstract = {Stress-strain state (SSS) of multilayered plates on undeformable foundation is investigated. The settlement circuit of transverse loaded plate is formed by symmetrical attaching of a plate concerning a surface of contact to the foundation. The plate of the double thickness becomes bilateral symmetrically loaded concerning its median surface. It allows to model only unflexural deformation that reduces amount of unknown and the general order of differentiation of resolving system of the equations. The developed refined continual model takes into account deformations of transverse shear and transverse compression in high iterative approximation. Rigid contact between the foundation and a plate, and also shear without friction on a surface of contact of a plate with the foundation is considered. Calculations confirm efficiency of such approach, allowing to receive decisions which is qualitative and quantitatively close to three-dimensional solutions.}, subject = {Architektur }, language = {en} } @inproceedings{FalcaoCruzMalonek, author = {Falc{\~a}o, M. Irene and Cruz, J. F. and Malonek, Helmuth Robert}, title = {REMARKS ON THE GENERATION OF MONOGENIC FUNCTIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2939}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29390}, pages = {18}, abstract = { In this paper we consider three different methods for generating monogenic functions. The first one is related to Fueter's well known approach to the generation of monogenic quaternion-valued functions by means of holomorphic functions, the second one is based on the solution of hypercomplex differential equations and finally the third one is a direct series approach, based on the use of special homogeneous polynomials. We illustrate the theory by generating three different exponential functions and discuss some of their properties. Formula que se usa em preprints e artigos da nossa UI\&D (acho demasiado completo): Partially supported by the R\\&D unit \emph{Matem\'atica a Aplica\c\~es} (UIMA) of the University of Aveiro, through the Portuguese Foundation for Science and Technology (FCT), co-financed by the European Community fund FEDER.}, subject = {Architektur }, language = {en} } @inproceedings{BauerDudekRichter, author = {Bauer, Marek and Dudek, Mariusz and Richter, Matthias}, title = {RELIABILITY OF TRAM - NETWORK SECTION}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2828}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28281}, pages = {16}, abstract = {We investigate aspects of tram-network section reliability, which operates as a part of the model of whole city tram-network reliability. Here, one of the main points of interest is the character of the chronological development of the disturbances (namely the differences between time of departure provided in schedule and real time of departure) on subsequent sections during tram line operation. These developments were observed in comprehensive measurements done in Krakow, during one of the main transportation nodes (Rondo Mogilskie) rebuilding. All taken building activities cause big disturbances in tram lines operation with effects extended to neighboring sections. In a second part, the stochastic character of section running time will be analyzed more detailed. There will be taken into consideration sections with only one beginning stop and also with two or three beginning stops located at different streets at an intersection. Possibility of adding results from sections with two beginning stops to one set will be checked with suitable statistical tests which are used to compare the means of the two samples. Section running time may depend on the value of gap between two following trams and from the value of deviation from schedule. This dependence will be described by a multi regression formula. The main measurements were done in the city center of Krakow in two stages: before and after big changes in tramway infrastructure.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Suzuki, author = {Suzuki, Osamu}, title = {RECENT RESULTS ON ITERATION DYNAMICAL SYSTEMS OF DISCRETE LAPLACIANS ON THE PLANE LATTICE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2895}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28954}, pages = {13}, abstract = {The recent development on the mathematical theory and the computer simulations of iteration dynamical system of discrete Laplacian on the plane lattice is reviewed and the future problem is discussed.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{SmarslyHartmann, author = {Smarsly, Kay and Hartmann, Dietrich}, title = {REAL-TIME MONITORING OF WIND CONVERTERS BASED ON SOFTWARE AGENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2891}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28916}, pages = {11}, abstract = {Due to increasing numbers of wind energy converters, the accurate assessment of the lifespan of their structural parts and the entire converter system is becoming more and more paramount. Lifespan-oriented design, inspections and remedial maintenance are challenging because of their complex dynamic behavior. Wind energy converters are subjected to stochastic turbulent wind loading causing corresponding stochastic structural response and vibrations associated with an extreme number of stress cycles (up to 109 according to the rotation of the blades). Currently, wind energy converters are constructed for a service life of about 20 years. However, this estimation is more or less made by rule of thumb and not backed by profound scientific analyses or accurate simulations. By contrast, modern structural health monitoring systems allow an improved identification of deteriorations and, thereupon, to drastically advance the lifespan assessment of wind energy converters. In particular, monitoring systems based on artificial intelligence techniques represent a promising approach towards cost-efficient and reliable real-time monitoring. Therefore, an innovative real-time structural health monitoring concept based on software agents is introduced in this contribution. For a short time, this concept is also turned into a real-world monitoring system developed in a DFG joint research project in the authors' institute at the Ruhr-University Bochum. In this paper, primarily the agent-based development, implementation and application of the monitoring system is addressed, focusing on the real-time monitoring tasks in the deserved detail.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{Schneider, author = {Schneider, David}, title = {QUALITY OPTIMIZATION USING LOCALLY REFINED META MODELS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2886}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28864}, pages = {17}, abstract = {Quality is one of the most important properties of a product. Providing the optimal quality can reduce costs for rework, scrap, recall or even legal actions while satisfying customers demand for reliability. The aim is to achieve ``built-in'' quality within product development process (PDP). The common approach therefore is the robust design optimization (RDO). It uses stochastic values as constraint and/or objective to obtain a robust and reliable optimal design. In classical approaches the effort required for stochastic analysis multiplies with the complexity of the optimization algorithm. The suggested approach shows that it is possible to reduce this effort enormously by using previously obtained data. Therefore the support point set of an underlying metamodel is filled iteratively during ongoing optimization in regions of interest if this is necessary. In a simple example, it will be shown that this is possible without significant loss of accuracy.}, subject = {Angewandte Informatik}, language = {en} }