@inproceedings{Vieira, author = {Vieira, Nelson}, title = {SOME RESULTS IN FRACTIONAL CLIFFORD ANALYSIS}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2825}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28256}, pages = {6}, abstract = {What is nowadays called (classic) Clifford analysis consists in the establishment of a function theory for functions belonging to the kernel of the Dirac operator. While such functions can very well describe problems of a particle with internal SU(2)-symmetries, higher order symmetries are beyond this theory. Although many modifications (such as Yang-Mills theory) were suggested over the years they could not address the principal problem, the need of a n-fold factorization of the d'Alembert operator. In this paper we present the basic tools of a fractional function theory in higher dimensions, for the transport operator (alpha = 1/2 ), by means of a fractional correspondence to the Weyl relations via fractional Riemann-Liouville derivatives. A Fischer decomposition, fractional Euler and Gamma operators, monogenic projection, and basic fractional homogeneous powers are constructed.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{FerreiraVieira, author = {Ferreira, Milton dos Santos and Vieira, Nelson}, title = {EIGENFUNCTIONS AND FUNDAMENTAL SOLUTIONS FOR THE FRACTIONAL LAPLACIAN IN 3 DIMENSIONS}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2796}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-27968}, pages = {6}, abstract = {Recently there has been a surge of interest in PDEs involving fractional derivatives in different fields of engineering. In this extended abstract we present some of the results developedin [3]. We compute the fundamental solution for the three-parameter fractional Laplace operator Δ by transforming the eigenfunction equation into an integral equation and applying the method of separation of variables. The obtained solutions are expressed in terms of Mittag-Leffer functions. For more details we refer the interested reader to [3] where it is also presented an operational approach based on the two Laplace transform.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{BrackxDeSchepperSommen, author = {Brackx, Fred and De Schepper, Nele and Sommen, Frank}, title = {Clifford-Hermite and Two-Dimensional Clifford-Gabor Filters For Early Vision}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2930}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29303}, pages = {22}, abstract = {Image processing has been much inspired by the human vision, in particular with regard to early vision. The latter refers to the earliest stage of visual processing responsible for the measurement of local structures such as points, lines, edges and textures in order to facilitate subsequent interpretation of these structures in higher stages (known as high level vision) of the human visual system. This low level visual computation is carried out by cells of the primary visual cortex. The receptive field profiles of these cells can be interpreted as the impulse responses of the cells, which are then considered as filters. According to the Gaussian derivative theory, the receptive field profiles of the human visual system can be approximated quite well by derivatives of Gaussians. Two mathematical models suggested for these receptive field profiles are on the one hand the Gabor model and on the other hand the Hermite model which is based on analysis filters of the Hermite transform. The Hermite filters are derivatives of Gaussians, while Gabor filters, which are defined as harmonic modulations of Gaussians, provide a good approximation to these derivatives. It is important to note that, even if the Gabor model is more widely used than the Hermite model, the latter offers some advantages like being an orthogonal basis and having better match to experimental physiological data. In our earlier research both filter models, Gabor and Hermite, have been developed in the framework of Clifford analysis. Clifford analysis offers a direct, elegant and powerful generalization to higher dimension of the theory of holomorphic functions in the complex plane. In this paper we expose the construction of the Hermite and Gabor filters, both in the classical and in the Clifford analysis framework. We also generalize the concept of complex Gaussian derivative filters to the Clifford analysis setting. Moreover, we present further properties of the Clifford-Gabor filters, such as their relationship with other types of Gabor filters and their localization in the spatial and in the frequency domain formalized by the uncertainty principle.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeKnockDeSchepper, author = {Brackx, Fred and De Knock, B. and De Schepper, Hennie}, title = {A MULTI--DIMENSIONAL HILBERT TRANSFORM IN ANISOTROPIC CLIFFORD ANALYSIS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2929}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29297}, pages = {15}, abstract = {In earlier research, generalized multidimensional Hilbert transforms have been constructed in m-dimensional Euclidean space, in the framework of Clifford analysis. Clifford analysis, centred around the notion of monogenic functions, may be regarded as a direct and elegant generalization to higher dimension of the theory of the holomorphic functions in the complex plane. The considered Hilbert transforms, usually obtained as a part of the boundary value of an associated Cauchy transform in m+1 dimensions, might be characterized as isotropic, since the metric in the underlying space is the standard Euclidean one. In this paper we adopt the idea of a so-called anisotropic Clifford setting, which leads to the introduction of a metric dependent m-dimensional Hilbert transform, showing, at least formally, the same properties as the isotropic one. The Hilbert transform being an important tool in signal analysis, this metric dependent setting has the advantage of allowing the adjustment of the co-ordinate system to possible preferential directions in the signals to be analyzed. A striking result to be mentioned is that the associated anisotropic (m+1)-dimensional Cauchy transform is no longer uniquely determined, but may stem from a diversity of (m+1)-dimensional "mother" metrics.}, subject = {Architektur }, language = {en} } @inproceedings{BrackxDeSchepperDeSchepperetal., author = {Brackx, Fred and De Schepper, Hennie and De Schepper, Nele and Sommen, Frank}, title = {HERMITIAN CLIFFORD-HERMITE WAVELETS}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2931}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29313}, pages = {13}, abstract = {The one-dimensional continuous wavelet transform is a successful tool for signal and image analysis, with applications in physics and engineering. Clifford analysis offers an appropriate framework for taking wavelets to higher dimension. In the usual orthogonal case Clifford analysis focusses on monogenic functions, i.e. null solutions of the rotation invariant vector valued Dirac operator ∂, defined in terms of an orthogonal basis for the quadratic space Rm underlying the construction of the Clifford algebra R0,m. An intrinsic feature of this function theory is that it encompasses all dimensions at once, as opposed to a tensorial approach with products of one-dimensional phenomena. This has allowed for a very specific construction of higher dimensional wavelets and the development of the corresponding theory, based on generalizations of classical orthogonal polynomials on the real line, such as the radial Clifford-Hermite polynomials introduced by Sommen. In this paper, we pass to the Hermitian Clifford setting, i.e. we let the same set of generators produce the complex Clifford algebra C2n (with even dimension), which we equip with a Hermitian conjugation and a Hermitian inner product. Hermitian Clifford analysis then focusses on the null solutions of two mutually conjugate Hermitian Dirac operators which are invariant under the action of the unitary group. In this setting we construct new Clifford-Hermite polynomials, starting in a natural way from a Rodrigues formula which now involves both Dirac operators mentioned. Due to the specific features of the Hermitian setting, four different types of polynomials are obtained, two types of even degree and two types of odd degree. These polynomials are used to introduce a new continuous wavelet transform, after thorough investigation of all necessary properties of the involved polynomials, the mother wavelet and the associated family of wavelet kernels.}, subject = {Architektur }, language = {en} } @inproceedings{SharmakScherer, author = {Sharmak, Wael and Scherer, Raimar J.}, title = {ADAPTABLE PROJECT MANAGEMENT PLANS USING CHANGE TEMPLATES-BASED APPROACH}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2888}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28886}, pages = {14}, abstract = {The uncertainty existing in the construction industry is bigger than in other industries. Consequently, most construction projects do not go totally as planned. The project management plan needs therefore to be adapted repeatedly within the project lifecycle to suit the actual project conditions. Generally, the risks of change in the project management plan are difficult to be identified in advance, especially if these risks are caused by unexpected events such as human errors or changes in the client preferences. The knowledge acquired from different resources is essential to identify the probable deviations as well as to find proper solutions to the faced change risks. Hence, it is necessary to have a knowledge base that contains known solutions for the common exceptional cases that may cause changes in each construction domain. The ongoing research work presented in this paper uses the process modeling technique of Event-driven Process Chains to describe different patterns of structure changes in the schedule networks. This results in several so called "change templates". Under each template different types of change risk/ response pairs can be categorized and stored in a knowledge base. This knowledge base is described as an ontology model populated with reference construction process data. The implementation of the developed approach can be seen as an iterative scheduling cycle that will be repeated within the project lifecycle as new change risks surface. This can help to check the availability of ready solutions in the knowledge base for the situation at hand. Moreover, if the solution is adopted, CPSP, "Change Project Schedule Plan „a prototype developed for the purpose of this research work, will be used to make the needed structure changes of the schedule network automatically based on the change template. What-If scenarios can be implemented using the CPSP prototype in the planning phase to study the effect of specific situations without endangering the success of the project objectives. Hence, better designed and more maintainable project schedules can be achieved.}, subject = {Angewandte Informatik}, language = {en} } @phdthesis{Nikulla, author = {Nikulla, Susanne}, title = {Quality assessment of kinematical models by means of global and goal-oriented error estimation techniques}, publisher = {Verlag der Bauhaus-Universit{\"a}t Weimar}, address = {Weimar}, doi = {10.25643/bauhaus-universitaet.1616}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20120419-16161}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {117}, abstract = {Methods for model quality assessment are aiming to find the most appropriate model with respect to accuracy and computational effort for a structural system under investigation. Model error estimation techniques can be applied for this purpose when kinematical models are investigated. They are counted among the class of white box models, which means that the model hierarchy and therewith the best model is known. This thesis gives an overview of discretisation error estimators. Deduced from these, methods for model error estimation are presented. Their general goal is to make a prediction of the inaccuracies that are introduced using the simpler model without knowing the solution of a more complex model. This information can be used to steer an adaptive process. Techniques for linear and non-linear problems as well as global and goal-oriented errors are introduced. The estimation of the error in local quantities is realised by solving a dual problem, which serves as a weight for the primal error. So far, such techniques have mainly been applied in material modelling and for dimensional adaptivity. Within the scope of this thesis, available model error estimators are adapted for an application to kinematical models. Their applicability is tested regarding the question of whether a geometrical non-linear calculation is necessary or not. The analysis is limited to non-linear estimators due to the structure of the underlying differential equations. These methods often involve simplification, e.g linearisations. It is investigated to which extent such assumptions lead to meaningful results, when applied to kinematical models.}, subject = {Model quality, Model error estimation, Kinematical model, Geometric non-linearity, Finite Element method}, language = {en} } @inproceedings{Musial, author = {Musial, Michal}, title = {THE INFLUENCE OF CRACKS AND OVERESTIMATION ERRORS ON THE DEFLECTION OF THE REINFORCED CONCRETE BEAMS IN THE RIGID FINITE ELEMENT METHOD}, series = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, booktitle = {Digital Proceedings, International Conference on the Applications of Computer Science and Mathematics in Architecture and Civil Engineering : July 20 - 22 2015, Bauhaus-University Weimar}, editor = {G{\"u}rlebeck, Klaus and Lahmer, Tom}, organization = {Bauhaus-Universit{\"a}t Weimar}, issn = {1611-4086}, doi = {10.25643/bauhaus-universitaet.2814}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170314-28142}, pages = {6}, abstract = {This article presents the Rigid Finite Element Method in the calculation of reinforced concrete beam deflection with cracks. Initially, this method was used in the shipbuilding industry. Later, it was adapted in the homogeneous calculations of the bar structures. In this method, rigid mass discs serve as an element model. In the flat layout, three generalized coordinates (two translational and one rotational) correspond to each disc. These discs are connected by elastic ties. The genuine idea is to take into account a discrete crack in the Rigid Finite Element Method. It consists in the suitable reduction of the rigidity in rotational ties located in the spots, where cracks occurred. The susceptibility of this tie results from the flexural deformability of the element and the occurrence of the crack. As part of the numerical analyses, the influence of cracks on the total deflection of beams was determined. Furthermore, the results of the calculations were compared to the results of the experiment. Overestimations of the calculated deflections against the measured deflections were found. The article specifies the size of the overestimation and describes its causes.}, subject = {Angewandte Informatik}, language = {en} } @inproceedings{WittwerBecker, author = {Wittwer, Christof and Becker, Rainer}, title = {MODELLBASIERTE ERTRAGSKONTROLLE F{\"U}R PV ANLAGEN IN VERNETZTEN GEB{\"A}UDEN}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.3036}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-30363}, pages = {5}, abstract = {Subject of the paper is the realisation of a model based efficiency control system for PV generators using a simulation model. A standard 2-diodes model of PV generator is base of the ColSim model, which is implemented in ANSI C code for flexible code exporting. The algorithm is based on discretisized U-I characteristics, which allows the calculation of string topologies witch parallel and serial PV cells and modules. Shadowing effects can be modelled down to cell configuration using polar horizon definitions. The simulation model was ported to a real time environment, to calculate the efficiency of a PV system. Embedded System technology allows the networked operation and the integration of standard I/O devices. Futher work focus on the adaption of shadowing routine, which will be adapted to get the environment conditions from the real operation.}, subject = {Architektur }, language = {de} } @inproceedings{Geyer, author = {Geyer, Philipp}, title = {MODELS FOR MULTIDISCIPLINARY DESIGN OPTIMIZATION: AN EXEMPLARY OFFICE BUILDING}, editor = {G{\"u}rlebeck, Klaus and K{\"o}nke, Carsten}, organization = {Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.2957}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20170327-29574}, pages = {10}, abstract = {The mathematical and technical foundations of optimization have been developed to a large extent. In the design of buildings, however, optimization is rarely applied because of insufficient adaptation of this method to the needs of building design. The use of design optimization requires the consideration of all relevant objectives in an interactive and multidisciplinary process. Disciplines such as structural, light, and thermal engineering, architecture, and economics impose various objectives on the design. A good solution calls for a compromise between these often contradictory objectives. This presentation outlines a method for the application of Multidisciplinary Design Optimization (MDO) as a tool for the designing of buildings. An optimization model is established considering the fact that in building design the non-numerical aspects are of major importance than in other engineering disciplines. A component-based decomposition enables the designer to manage the non-numerical aspects in an interactive design optimization process. A fa{\c{c}}ade example demonstrates a way how the different disciplines interact and how the components integrate the disciplines in one optimization model. In this grid-based fa{\c{c}}ade example, the materials switch between a discrete number of materials and construction types. For light and thermal engineering, architecture, and economics, analysis functions calculate the performance; utility functions serve as an important means for the evaluation since not every increase or decrease of a physical value improves the design. For experimental purposes, a genetic algorithm applied to the exemplary model demonstrates the use of optimization in this design case. A component-based representation first serves to manage non-numerical characteristics such as aesthetics. Furthermore, it complies with usual fabrication methods in building design and with object-oriented data handling in CAD. Therefore, components provide an important basis for an interactive MDO process in building design.}, subject = {Architektur }, language = {en} }