@inproceedings{MostEckardtSchraderetal., author = {Most, Thomas and Eckardt, Stefan and Schrader, Kai and Deckner, T.}, title = {AN IMPROVED COHESIVE CRACK MODEL FOR COMBINED CRACK OPENING AND SLIDING UNDER CYCLIC LOADING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2993}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29933}, pages = {20}, abstract = {The modeling of crack propagation in plain and reinforced concrete structures is still a field for many researchers. If a macroscopic description of the cohesive cracking process of concrete is applied, generally the Fictitious Crack Model is utilized, where a force transmission over micro cracks is assumed. In the most applications of this concept the cohesive model represents the relation between the normal crack opening and the normal stress, which is mostly defined as an exponential softening function, independently from the shear stresses in tangential direction. The cohesive forces are then calculated only from the normal stresses. By Carol et al. 1997 an improved model was developed using a coupled relation between the normal and shear damage based on an elasto-plastic constitutive formulation. This model is based on a hyperbolic yield surface depending on the normal and the shear stresses and on the tensile and shear strength. This model also represents the effect of shear traction induced crack opening. Due to the elasto-plastic formulation, where the inelastic crack opening is represented by plastic strains, this model is limited for applications with monotonic loading. In order to enable the application for cases with un- and reloading the existing model is extended in this study using a combined plastic-damage formulation, which enables the modeling of crack opening and crack closure. Furthermore the corresponding algorithmic implementation using a return mapping approach is presented and the model is verified by means of several numerical examples. Finally an investigation concerning the identification of the model parameters by means of neural networks is presented. In this analysis an inverse approximation of the model parameters is performed by using a given set of points of the load displacement curves as input values and the model parameters as output terms. It will be shown, that the elasto-plastic model parameters could be identified well with this approach, but require a huge number of simulations.}, subject = {Architektur }, language = {en} } @inproceedings{MostBucher, author = {Most, Thomas and Bucher, Christian}, title = {ADAPTIVE RESPONSE SURFACE APPROACH USING ARTIFICIAL NEURAL NETWORKS AND MOVING LEAST SQUARES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2992}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29922}, pages = {13}, abstract = {In engineering science the modeling and numerical analysis of complex systems and relations plays an important role. In order to realize such an investigation, for example a stochastic analysis, in a reasonable computational time, approximation procedure have been developed. A very famous approach is the response surface method, where the relation between input and output quantities is represented for example by global polynomials or local interpolation schemes as Moving Least Squares (MLS). In recent years artificial neural networks (ANN) have been applied as well for such purposes. Recently an adaptive response surface approach for reliability analyses was proposed, which is very efficient concerning the number of expensive limit state function evaluations. Due to the applied simplex interpolation the procedure is limited to small dimensions. In this paper this approach is extended for larger dimensions using combined ANN and MLS response surfaces for evaluating the adaptation criterion with only one set of joined limit state points. As adaptation criterion a combination by using the maximum difference in the conditional probabilities of failure and the maximum difference in the approximated radii is applied. Compared to response surfaces on directional samples or to plain directional sampling the failure probability can be estimated with a much smaller number of limit state points.}, subject = {Architektur }, language = {en} } @inproceedings{MelnikovSemenov, author = {Melnikov, B. E. and Semenov, Artem}, title = {MULTILEVEL COMPUTATION IN CIVIL ENGINEERING BASED ON MULTIMODEL ELASTO-PLASTIC ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2991}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29911}, pages = {11}, abstract = {Requires for reliability and durability of structures and their elements with simultaneous material economy have stimulated improvement of constitutive equations for description of elasto-plastic deformation processes. This has led to the development of phenomenological modelling of complex phenomena of irreversible deformation including history-dependent and rate-dependent effects. During the last several decades many works have been devoted to the development of elasto-plastic models, in order to better predict the material behavior under combined variable thermo-mechanical loading. The increase of accuracy of stress analysis and safety factors for complex structures with the help of modern finite-element packages (ABAQUS, ANSYS, COSMOS, LS-DYNA, MSC.MARC, MSC.NASTRAN, PERMAS and other) can be provided only by use of complex and special variants of plasticity theories, which are adequate for the considered loading conditions and based on authentic information about properties of materials. The areas of application of the various theories (models) are as a rule unknown to the users of finite-element packages at the existing variety loading condition sin machine-building designs. At the moment a universal theory of inelasticity is absent and even the most accomplished theories can not guarantee adequate description of deformation processes for arbitrary structure under wide range of loading programs. The classifier of materials, loading conditions, effects (phenomena) and list of basic experiments are developed by the authors. Use of these classifiers for an establishment of hierarchy of models is a first step for introduction of the multimodel analysis into computational practice. The set of the classic and modern inelasticity theories is considered, so that they are applicable for stress analysis of structures under complex loading programs. Among them there are plastic flow theories with linear and nonlinear isotropic and kinematic hardening, multisurface theories, endochronic theory, holonomic theory, rheologic models, theory of elasto-plastic processes, slip theory, physical theories (single crystal and polycrystalline models) and others. The classification of materials provides rearranging by a degree of homogeneous, chemical composition, level of strength and plasticity, behavior under cyclic loading, anisotropy of properties at initial condition, anisotropy of properties during deformation process, structural stability. The classification of loading conditions takes into consideration proportional and non-proportional loading, temperature range, combination of cyclic and monotonous loading, one-axial, two-axial and complex stress state, curvature of strain path, presence of stress concentrators and level of strain gradient. A unified general form of constitutive equations is presented for all used material models based upon the concept of internal state variables. The wide range of mentioned above inelastic material models has been implemented into finite element program PANTOCRATOR developed by authors (see for details www.pantocrator.narod.ru). Application possibility of different material models is considered both for material element and for complex structures subjected to complex non-proportional loading.}, subject = {Architektur }, language = {en} } @inproceedings{Meis, author = {Meis, Jochen}, title = {SERVICE DESIGN AND SERVICE MANAGEMENT WITH THE SERVICE BLUEPRINTING METHODOLOGY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2990}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29908}, pages = {11}, abstract = {A new application of software technology is the application area of smart living or sustainable living. Within this area application platforms are designed and realized with the goal to support value added services. In this context value added services integrates microelectronics, home automation and services to enhance the attractiveness of flats, homes and buildings. Especially real estate companies or service providers dealing with home services are interested in an effective design and management of their services. Service Engineering is the approved approach for designing customer oriented service processes. Service engineering consists of several phases; from situation analysis to service creation and service design to service management. This article will describe how the method service blueprint can be used to design service processes. Smart living includes all actions to enlarge a flat to a smart home for living. One special requirement of this application domain is the use of local components (actuators, sensors) within service processes. This article will show how this extended method supports service providers to improve the quality of customer oriented service processes and the derivation of needed interfaces of involved actors. For the civil engineering process it will be possible to derive needed information from a built in home automation system. The aim is to show, how to get needed smart local components to fullfill later offered it-supported value added services. Value added services focused on inhabitants are grouped to consulting and information, care and supervision, leisure time activities, repairs, mobility and delivery, safety and security, supply and disposal.}, subject = {Architektur }, language = {en} } @inproceedings{Markwardt, author = {Markwardt, Klaus}, title = {WAVELET ANALYSIS AND FREQUENCY BAND DECOMPOSITIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2989}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29895}, pages = {22}, abstract = {In many applications such as parameter identification of oscillating systems in civil enginee-ring, speech processing, image processing and others we are interested in the frequency con-tent of a signal locally in time. As a start wavelet analysis provides a time-scale decomposition of signals, but this wavelet transform can be connected with an appropriate time-frequency decomposition. For instance in Matlab are defined pseudo-frequencies of wavelet scales as frequency centers of the corresponding bands. This frequency bands overlap more or less which depends on the choice of the biorthogonal wavelet system. Such a definition of frequency center is possible and useful, because different frequencies predominate at different dyadic scales of a wavelet decomposition or rather at different nodes of a wavelet packet decomposition tree. The goal of this work is to offer better algorithms for characterising frequency band behaviour and for calculating frequency centers of orthogonal and biorthogonal wavelet systems. This will be done with some product formulas in frequency domain. Now the connecting procedu-res are more analytical based, better connected with wavelet theory and more assessable. This procedures doesn't need any time approximation of the wavelet and scaling functions. The method only works in the case of biorthogonal wavelet systems, where scaling functions and wavelets are defined over discrete filters. But this is the practically essential case, because it is connected with fast algorithms (FWT, Mallat Algorithm). At the end corresponding to the wavelet transform some closed formulas of pure oscillations are given. They can generally used to compare the application of different wavelets in the FWT regarding it's frequency behaviour.}, subject = {Architektur }, language = {en} } @inproceedings{LutherKoenke, author = {Luther, Torsten and K{\"o}nke, Carsten}, title = {INVESTIGATION OF CRACK GROWTH IN POLYCRYSTALLINE MESOSTRUCTURES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2988}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29886}, pages = {11}, abstract = {The design and application of high performance materials demands extensive knowledge of the materials damage behavior, which significantly depends on the meso- and microstructural complexity. Numerical simulations of crack growth on multiple length scales are promising tools to understand the damage phenomena in complex materials. In polycrystalline materials it has been observed that the grain boundary decohesion is one important mechanism that leads to micro crack initiation. Following this observation the paper presents a polycrystal mesoscale model consisting of grains with orthotropic material behavior and cohesive interfaces along grain boundaries, which is able to reproduce the crack initiation and propagation along grain boundaries in polycrystalline materials. With respect to the importance of modeling the geometry of the grain structure an advanced Voronoi algorithm is proposed to generate realistic polycrystalline material structures based on measured grain size distribution. The polycrystal model is applied to investigate the crack initiation and propagation in statically loaded representative volume elements of aluminum on the mesoscale without the necessity of initial damage definition. Future research work is planned to include the mesoscale model into a multiscale model for the damage analysis in polycrystalline materials.}, subject = {Architektur }, language = {en} } @inproceedings{Loemker, author = {L{\"o}mker, Thorsten Michael}, title = {SOLVING REVITALIZATION-PROBLEMS BY THE USE OF A CONSTRAINT PROGRAMING LANGUAGE}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2987}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29874}, pages = {13}, abstract = {This research focuses on an approach to describe principles in architectural layout planning within the domain of revitalization. With the aid of mathematical rules, which are executed by a computer, solutions to design problems are generated. Provided that "design" is in principle a combinatorial problem, i.e. a constraint-based search for an overall optimal solution of a problem, an exemplary method will be described to solve such problems in architectural layout planning. To avoid conflicts relating to theoretical subtleness, a customary approach adopted from Operations Research has been chosen in this work. In this approach, design is a synonym for planning, which could be described as a systematic and methodical course of action for the analysis and solution of current or future problems. The planning task is defined as an analysis of a problem with the aim to prepare optimal decisions by the use of mathematical methods. The decision problem of a planning task is represented by an optimization model and the application of an efficient algorithm in order to aid finding one or more solutions to the problem. The basic principle underlying the approach presented herein is the understanding of design in terms of searching for solutions that fulfill specific criteria. This search is executed by the use of a constraint programming language.}, subject = {Architektur }, language = {en} } @inproceedings{LourensvanRooyen, author = {Lourens, Eliz-Mari and van Rooyen, G.C.}, title = {Automating Preliminary Column Force Calculations In Multy-Storey Buildings}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2986}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29864}, pages = {10}, abstract = {In civil engineering practice, values of column forces are often required before any detailed analysis of the structure has been performed. One of the reasons for this arises from the fast-tracked nature of the majority of construction projects: foundations are laid and base columns constructed whilst analysis and design are still in progress. A need for quick results when feasibility studies are performed or when evaluating the effect of design changes on supporting columns form other situations in which column forces are required, but where a detailed analysis to get these forces seems superfluous. Thus it was concluded that the development of an efficient tool for column force calculations, in which the extensive input required in a finite element analysis is to be avoided, would be highly beneficial. The automation of the process is achieved by making use of a Voronoi diagram. The Voronoi diagram is used a) for subdividing the floor into influence areas and b) as a basis for automatic load assignment. The implemented procedure is integrated into a CAD system in which the relevant geometric information of the floor, i.e. its shape and column layout, can be defined or uploaded. A brief description of the implementation is included. Some comparative results and considerations regarding the continuation of the study are given.}, subject = {Architektur }, language = {en} } @inproceedings{StoimenovaLinsDatchevaetal., author = {Stoimenova, Eugenia and Lins, Yvonne and Datcheva, Maria and Schanz, Tom}, title = {INVERSE MODELLING OF SOIL HYDRAULIC CHARACTERISTIC FUNCTIONS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2985}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29858}, pages = {12}, abstract = {In this paper we evaluate 2D models for soil-water characteristic curve (SWCC), that incorporate the hysteretic nature of the relationship between volumetric water content θ and suction ψ. The models are based on nonlinear least squares estimation of the experimental data for sand. To estimate the dependent variable θ the proposed models include two independent variables, suction and sensors reading position (depth d in the column test). The variable d represents not only the position where suction and water content are measured but also the initial suction distribution before each of the hydraulic loading test phases. Due to this the proposed 2D regression models acquire the advantage that they: (a) can be applied for prediction of θ for any position along the column and (b) give the functional form for the scanning curves.}, subject = {Architektur }, language = {en} } @inproceedings{LehnerHartmann, author = {Lehner, Karlheinz and Hartmann, Dietrich}, title = {USING INTERVAL ANALYSIS FOR STRUCTURAL ENGINEERING PROBLEMS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2984}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29844}, pages = {10}, abstract = {Interval analysis extends the concept of computing with real numbers to computing with real intervals. As a consequence, some interesting properties appear, such as the delivery of guaranteed results or confirmed global values. The former property is given in the sense that unknown numerical values are in known to lie in a computed interval. The latter property states that the global minimum value, for example, of a given function is also known to be contained in a interval (or a finite set of intervals). Depending upon the amount computation effort invested in the calculation, we can often find tight bounds on these enclosing intervals. The downside of interval analysis, however, is the mathematically correct, but often very pessimistic size of the interval result. This is in particularly due to the so-called dependency effect, where a single variable is used multiple times in one calculation. Applying interval analysis to structural analysis problems, the dependency has a great influence on the quality of numerical results. In this paper, a brief background of interval analysis is presented and shown how it can be applied to the solution of structural analysis problems. A discussion of possible improvements as well as an outlook to parallel computing is also given.}, subject = {Architektur }, language = {en} } @inproceedings{Kravchenko, author = {Kravchenko, Vladislav}, title = {NEW APPLICATIONS OF PSEUDOANALYTIC FUNCTION THEORY}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2983}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29835}, pages = {3}, abstract = {We show a close relation between the Schr{\"o}dinger equation and the conductivity equation to a Vekua equation of a special form. Under quite general conditions we propose an algorithm for explicit construction of pseudoanalytic positive formal powers for the Vekua equation that as a consequence gives us a complete system of solutions for the Schr{\"o}dinger and the conductivity equations. Besides the construction of complete systems of exact solutions for the above mentioned second order equations and the Dirac equation, we discuss some other applications of pseudoanalytic function theory.}, subject = {Architektur }, language = {en} } @inproceedings{Krasnov, author = {Krasnov, Yakov}, title = {ANALYTIC FUNCTIONS IN OPERATOR VARIABLES AS SOLUTION TO PDES}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2982}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29822}, pages = {16}, abstract = {Procedures of a construction of general solutions for some classes of partial differential equations (PDEs) are proposed and a symmetry operators approach to the raising the orders of the polynomial solutions to linear PDEs are develops. We touch upon an ''operator analytic function theory'' as the solution of a frequent classes of the equations of mathematical physics, when its symmetry operators forms vast enough space. The MAPLE© package programs for the building the operator variables is elaborated also.}, subject = {Architektur }, language = {en} } @inproceedings{KoenigTauscher, author = {K{\"o}nig, Markus and Tauscher, Eike}, title = {BERECHNUNG VON BAUABL{\"A}UFEN MIT VERSCHIEDENEN AUSF{\"U}HRUNGSVARIANTEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2981}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29816}, pages = {11}, abstract = {Prozesse im Bauingenieurwesen sind komplex und beinhalten eine große Anzahl verschiedener Aufgaben mit vielen logischen Abh{\"a}ngigkeiten. Basierend auf diesen projektspezifischen Abh{\"a}ngigkeiten wird gew{\"o}hnlich ein Bauablaufplan manuell erstellt. In der Regel existieren mehrere Varianten und somit alternative Bauabl{\"a}ufe um ein Projekt zu realisieren. Welche dieser Ausf{\"u}hrungsvarianten zur praktischen Anwendung kommt, wird durch den jeweiligen Projektmanager bestimmt. Falls {\"A};nderungen oder St{\"o}rungen w{\"a}hrend des Bauablaufs auftreten, m{\"u}ssen die davon betroffenen Aufgaben und Abl{\"a}ufe per Hand modifiziert und alternative Aufgaben sowie Abl{\"a}ufe stattdessen ausgef{\"u}hrt werden. Diese Vorgehensweise ist oft sehr aufw{\"a}ndig und teuer. Aktuelle Forschungsans{\"a}tze besch{\"a}ftigen sich mit der automatischen Generierung von Bauabl{\"a}ufen. Grundlage sind dabei Aufgaben mit ihren erforderlichen Voraussetzungen und erzeugten Ergebnissen. Im Rahmen dieses Beitrags wird eine Methodik vorgestellt, um Bauabl{\"a}ufe mit Ausf{\"u}hrungsvarianten in Form von Workflow-Netzen zu jeder Zeit berechnen zu k{\"o}nnen. Die vorgestellte Methode wird anhand eines Beispiels aus dem Straßenbau schematisch dargestellt.}, subject = {Architektur }, language = {de} } @inproceedings{KoenigLang, author = {K{\"o}nig, Markus and Lang, H.}, title = {ANWENDUNG DES CASE-BASED REASONING BEI DER ERMITTLUNG VON VARIANTEN F{\"u}R DEN OBERBAU VON VERKEHRSFL{\"A}CHEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2980}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29808}, pages = {9}, abstract = {F{\"u}r die Ausf{\"u}hrung des Oberbaus von Verkehrsfl{\"a}chen existiert in Abh{\"a}ngigkeit von projektspezifischen Voraussetzungen eine Vielzahl von verschiedenen Varianten. Aufgrund von Erfahrungen der Projektplaner werden bei {\"a}hnlichen Voraussetzungen h{\"a}ufig gleichartige Ausf{\"u}hrungsvarianten gew{\"a}hlt. Um eine m{\"o}gliche L{\"o}sungsvariante f{\"u}r den Straßenoberbau zu erhalten, sollten daher nicht nur die gesetzlichen Richtlinien sondern auch bereits beendete Projekte ber{\"u}cksichtigt werden. Im Rahmen eines Wissenschaftlichen Kollegs an der Bauhaus-Universit{\"a}t Weimar wurde die Anwendung des Case-Based Reasoning f{\"u}r die Auswahl von Ausf{\"u}hrungsvarianten f{\"u}r den Straßenoberbau untersucht. In diesem Beitrag werden die grundlegenden Konzepte des Case-Based Reasoning und die Bestimmung von {\"a}hnlichen Varianten anhand einfacher Beispiele aus dem Straßenoberbau dargestellt.}, subject = {Architektur }, language = {de} } @inproceedings{Knyziak, author = {Knyziak, Piotr}, title = {ANALYSIS THE TECHNICAL STATE FOR LARGE-PANEL RESIDENTIAL BUILDINGS BEHIND ASSISTANCE OF ARTIFICIAL NEURAL NETWORKS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2979}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29792}, pages = {9}, abstract = {This paper presents two new methods for analysis of a technical state of large-panel residential buildings. The first method is based on elements extracted from the classical methods and on data about repairs and modernization collected from building documentations. The technical state of a building is calculated as a sum of several groups of elements defining the technical state. The deterioration in this method depends on: - time, which has passed since last repair of element or time which has passed since construction, - estimate of the state of element groups which can be determined on basis of yearly controls. This is a new unique method. it is easy to use, does not need expertise. The required data could be extracted easily from building documentations. For better accuracy the data from building inspections should be applied (in Poland inspections are made every year). The second method is based on the extracted data processing by means of the artificial neural networks. The aim is to learn the artificial neural network configurations for a set of data containing values of the technical state and information about building repairs for last years (or other information and building parameters) and next to analyse new buildings by the instructed neural network. The second profit from using artificial neural networks is the reduction of number of parameters. Instead of more then 40 parameters describing building, about 6-12 are usually sufficient for satisfactory accuracy. This method could have lower accuracy but it is less prone to data errors.}, subject = {Architektur }, language = {en} } @inproceedings{KnauerDammeierMeffert, author = {Knauer, Uwe and Dammeier, T. and Meffert, Beate}, title = {THE STRUCTURE OF ROAD TRAFFIC SCENES AS REVEALED BY UNSUPERVISED ANALYSIS OF THE TIME AVERAGED OPTICAL FLOW}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2978}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29780}, pages = {9}, abstract = {The Lucas-Kanade tracker has proven to be an efficient and accurate method for calculation of the optical flow. However, this algorithm can reliably track only suitable image features like corners and edges. Therefore, the optical flow can only be calculated for a few points in each image, resulting in sparse optical flow fields. Accumulation of these vectors over time is a suitable method to retrieve a dense motion vector field. However, the accumulation process limits application of the proposed method to fixed camera setups. Here, a histogram based approach is favored to allow more than a single typical flow vector per pixel. The resulting vector field can be used to detect roads and prescribed driving directions which constrain object movements. The motion structure can be modeled as a graph. The nodes represent entry and exit points for road users as well as crossings, while the edges represent typical paths.}, subject = {Architektur }, language = {en} } @inproceedings{Klingert, author = {Klingert, Maik}, title = {THE USAGE OF IMAGE PROCESSING METHODS FOR INTERPRETATION OF THERMOGRAPHY DATA}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2977}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29777}, pages = {13}, abstract = {For assessment of old buildings, thermal graphic analysis aided with infra-red camera have been employed in a wide range nowadays. Image processing and evaluation can be economically practicable only if the image evaluation can also be automated to the largest extend. For that reason methods of computer vision are presented in this paper to evaluate thermal images. To detect typical thermal image elements, such as thermal bridges and lintels in thermal images respectively gray value images, methods of digital image processing have been applied, of which numerical procedures are available to transform, modify and encode images. At the same time, image processing can be regarded as a multi-stage process. In order to be able to accomplish the process of image analysis from image formation through perfecting and segmentation to categorization, appropriate functions must be implemented. For this purpose, different measuring procedures and methods for automated detection and evaluation have been tested.}, subject = {Architektur }, language = {en} } @inproceedings{KlawitterOstrowski, author = {Klawitter, Arne and Ostrowski, M.}, title = {INTEGRATED RAINFALL RUNOFF MODELLING IN SMALL URBANIZED CATCHMENTS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2976}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29767}, pages = {12}, abstract = {A concept for integrated modeling of urban and rural hydrology is introduced. The concept allows for simulations on the catchment scale as well as on the local scale. It is based on a 2-layer-approach which facilitates the parallel coupling of a catchment hydrology model with an urban hydrology model, considering the interactions between the two systems. The concept has been implemented in a computer model combining a grid based distributed hydrological catchment model and a hydrological urban stormwater model based on elementary units. The combined model provides a flexible solution for time and spatial scale integration and offers to calculate separate water balances for urban and rural hydrology. Furthermore, it is GIS-based which allows for easy and accurate geo-referencing of urban overflow structures, which are considered as points of interactions between the two hydrologic systems. Due to the two-layer-approach, programs of measures can be incorporated in each system separately. The capabilities of the combined model have been tested on a hypothetical test case and a real world application. It could be shown that the model is capable of accurately quantifying the effects of urbanization in a catchment. The affects of urbanization can be analyzed at the catchment outlet, but can also be traced back to its origins, due to the geo-referencing of urban overflow structures. This is a mayor advantage over conventional hydrological catchment models for the analysis of land use changes.}, subject = {Architektur }, language = {en} } @inproceedings{Klauer, author = {Klauer, Thomas}, title = {MOBILE FACILITY MANAGEMENT ZUR INSPEKTION UND INSTANDHALTUNG VON INGENIEURBAUWERKEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2975}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29759}, pages = {10}, abstract = {In diesem Beitrag wird eine mobile Software-Komponente zur Vor-Ort-Unterst{\"u}tzung von Bauwerkspr{\"u}fungen gem{\"a}ß DIN 1076 „Ingenieurbauwerke im Zuge von Strassen und Wegen, {\"U}berwachung und Pr{\"u}fung" vorgestellt, welche sich im praktischen Einsatz bei der Hochbahn AG Hamburg befindet. Mit Hilfe dieses Werkzeugs kann die Aktivit{\"a}t am Bauwerk in den gesamten softwaregest{\"u}tzten Gesch{\"a}ftsprozess der auwerksinstandhaltung integriert und somit die Bearbeitungszeit einer Bauwerkspr{\"u}fung von der Vorbereitung bis zur Pr{\"u}fbericht-Erstellung reduziert werden. Die Technologie des Mobile Computing wird unter Ber{\"u}cksichtigung spezieller fachlicher Randbedingungen, wie z.B. dem Einsatzort unter freiem Himmel, erl{\"a}utert und es werden Methoden zur effizienten Datenerfassung mit Stift und Sprache vorgestellt und bewertet. Ferner wird die Einschr{\"a}nkung der Hardware durch die geringere Gr{\"o}ße der Endger{\"a}te, die sich durch die Bedingung der Mobilit{\"a}t ergibt, untersucht.}, subject = {Architektur }, language = {de} } @inproceedings{Kisil, author = {Kisil, Vladimir}, title = {FILLMORE-SPRINGER-CNOPS CONSTRUCTION IMPLEMENTED IN GINAC}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2974}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29744}, pages = {103}, abstract = {This is an implementation of the Fillmore-Springer-Cnops construction (FSCc) based on the Clifford algebra capacities of the GiNaC computer algebra system. FSCc linearises the linear-fraction action of the Mobius group. This turns to be very useful in several theoretical and applied fields including engineering. The core of this realisation of FSCc is done for an arbitrary dimension, while a subclass for two dimensional cycles add some 2D-specific routines including a visualisation to PostScript files through the MetaPost or Asymptote software. This library is a backbone of many result published in, which serve as illustrations of its usage. It can be ported (with various level of required changes) to other CAS with Clifford algebras capabilities.}, subject = {Architektur }, language = {en} }