@article{Knecht, author = {Knecht, Katja}, title = {Augmented Urban Model: Ein Tangible User Interface zur Unterst{\"u}tzung von Stadtplanungsprozessen}, doi = {10.25643/bauhaus-universitaet.2674}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160823-26740}, abstract = {Im architektonischen und st{\"a}dtebaulichen Kontext erf{\"u}llen physische und digitale Modelle aufgrund ihrer weitgehend komplement{\"a}ren Eigenschaften und Qualit{\"a}ten unterschiedliche, nicht verkn{\"u}pfte Aufgaben und Funktionen im Entwurfs- und Planungsprozess. W{\"a}hrend physische Modelle vor allem als Darstellungs- und Kommunikationsmittel aber auch als Arbeitswerkzeug genutzt werden, unterst{\"u}tzen digitale Modelle dar{\"u}ber hinaus die Evaluation eines Entwurfs durch computergest{\"u}tzte Analyse- und Simulationstechniken. Analysiert wurden im Rahmen der in diesem Arbeitspapier vorgestellten Arbeit neben dem Einsatz des Modells als analogem und digitalem Werkzeug im Entwurf die Bedeutung des Modells f{\"u}r den Arbeitsprozess sowie Vorbilder aus dem Bereich der Tangible User Interfaces mit Bezug zu Architek¬tur und St{\"a}dtebau. Aus diesen Betrachtungen heraus wurde ein Prototyp entwickelt, das Augmented Urban Model, das unter anderem auf den fr{\"u}hen Projekten und Forschungsans{\"a}tzen aus dem Gebiet der Tangible User Interfaces aufsetzt, wie dem metaDESK von Ullmer und Ishii und dem Urban Planning Tool Urp von Underkoffler und Ishii. Das Augmented Urban Model zielt darauf ab, die im aktuellen Entwurfs- und Planungsprozess fehlende Br{\"u}cke zwischen realen und digitalen Modellwelten zu schlagen und gleichzeitig eine neue tangible Benutzerschnittstelle zu schaffen, welche die Manipulation von und die Interaktion mit digitalen Daten im realen Raum erm{\"o}glicht.}, subject = {tangible user interface}, language = {de} } @book{OPUS4-1569, title = {Alles digital? E-Books in Studium und Forschung : Weimarer EDOC-Tage 2011}, editor = {Maier, Matthias and Simon-Ritz, Frank}, publisher = {Verlag der Bauhaus-Universit{\"a}t}, address = {Weimar}, isbn = {978-3-86068-454-2}, doi = {10.25643/bauhaus-universitaet.1569}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20120223-15699}, pages = {ca. 110}, abstract = {Es ist ein Bild aus alten Tagen: ein wissbegieriger Student, auf der Suche nach fundierter wissenschaftlicher Information, begibt sich an den heiligsten Ort aller B{\"u}cher - die Universit{\"a}tsbibliothek. Doch seit einiger Zeit tummeln sich Studierende nicht mehr nur in Bibliotheken, sondern auch immer h{\"a}ufiger im Internet. Sie suchen und finden dort digitale B{\"u}cher, sogenannte E-Books. Wie l{\"a}sst sich der Wandel durch den Einzug des E-Books in das etablierte Forschungssystem beschreiben, welche Konsequenzen lassen sich daraus ablesen und wird schließlich alles digital, sogar die Bibliothek? Diesen Fragen geht ein elfk{\"o}pfiges Expertenteam aus Deutschland und der Schweiz w{\"a}hrend der zweit{\"a}gigen Konferenz auf den Grund. Bei den Weimarer E-DOC-Tagen geht es nun um die Ver{\"a}nderung des institutionellen Gef{\"u}ges rund um das digitale Buch. Denn traditionell sind Verlage und Bibliotheken wichtige Bestandteile der Wissensversorgung in Studium und Lehre. Doch mit dem Aufkommen des E-Books verlagert sich die Recherche mehr und mehr ins Internet. Die Suchmaschine Google tritt als neuer Konkurrent der klassischen Bibliotheksrecherche auf. Aber auch Verlage m{\"u}ssen verst{\"a}rkt auf die neuen Herausforderungen eines digitalen Buchmarktes reagieren. In Kooperation mit der Universit{\"a}tsbibliothek und dem Master-Studiengang Medienmanagement diskutieren Studierende, Wissenschaftler, Bibliothekare und Verleger, wie das E-Book unseren Umgang mit Literatur ver{\"a}ndert. Der Tagungsband stellt alle Perspektiven und Ergebnisse zum Nachlesen zusammen.}, subject = {Elektronisches Buch}, language = {de} } @article{KoehlerKoenig, author = {K{\"o}hler, Hermann and K{\"o}nig, Reinhard}, title = {Aktionsr{\"a}ume in Dresden}, doi = {10.25643/bauhaus-universitaet.2672}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160822-26726}, abstract = {In vorliegender Studie werden die Aktionsr{\"a}ume von Befragten in Dresden {\"u}ber eine standardisierte Befragung (n=360) untersucht. Die den Aktionsr{\"a}umen zugrundeliegenden Aktivit{\"a}ten werden unterschieden in Einkaufen f{\"u}r den t{\"a}glichen Bedarf, Ausgehen (z.B. in Caf{\´e}, Kneipe, Gastst{\"a}tte), Erholung im Freien (z.B. spazieren gehen, Nutzung von Gr{\"u}nanlagen) und private Geselligkeit (z.B. Feiern, Besuch von Verwandten/Freunden). Der Aktionsradius wird unterschieden in Wohnviertel, Nachbarviertel und sonstiges weiteres Stadtgebiet. Um aus den vier betrachteten Aktivit{\"a}ten einen umfassenden Kennwert f{\"u}r den durchschnittlichen Aktionsradius eines Befragten zu bilden, wird ein Modell f{\"u}r den Kennwert eines Aktionsradius entwickelt. Die Studie kommt zu dem Ergebnis, dass das Alter der Befragten einen signifikanten - wenn auch geringen - Einfluss auf den Aktionsradius hat. Das Haushaltsnettoeinkommen hat einen mit Einschr{\"a}nkung signifikanten, ebenfalls geringen Einfluss auf allt{\"a}gliche Aktivit{\"a}ten der Befragten.}, subject = {Aktionsraumforschung}, language = {de} } @article{Koehler, author = {K{\"o}hler, Hermann}, title = {Ergebnisse der Befragung zu Wohnstandortpr{\"a}ferenzen von Lebensweltsegmenten in Dresden}, doi = {10.25643/bauhaus-universitaet.2670}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160822-26704}, abstract = {In vorliegender Studie werden die Wohnstandortpr{\"a}ferenzen der Sinus-Milieugruppen in Dresden {\"u}ber eine standardisierte Befragung (n=318) untersucht. Es wird unterschieden zwischen handlungsleitenden Wohnstandortpr{\"a}ferenzen, die durch Anhaltspunkte auf der Handlungsebene st{\"a}rker in Betracht gezogen werden sollten, und Wohnstandortpr{\"a}ferenzen, welche eher orientierenden Charakter haben. Die Wohnstandortpr{\"a}ferenzen werden untersucht anhand der Kategorien Ausstattung/Zustand der Wohnung/des n{\"a}heren Wohnumfeldes, Versorgungsstruktur, soziales Umfeld, Baustrukturtyp, Ortsgebundenheit sowie des Aspektes des Images eines Stadtviertels. Um die Befragten den Sinus-Milieugruppen zuordnen zu k{\"o}nnen, wird ein Lebensweltsegment-Modell entwickelt, welches den Anspruch hat, die Sinus-Milieugruppen in der Tendenz abzubilden. Die Studie kommt zu dem Ergebnis, dass die Angeh{\"o}rigen der verschiedenen Lebensweltsegmente in jeder Kategorie - wenn auch z.T. auf geringerem Niveau - signifikante Unterschiede in der Bewertung einzelner Wohnstandortpr{\"a}ferenzen aufweisen.}, subject = {Milieuforschung}, language = {de} } @phdthesis{Azari, author = {Azari, Banafsheh}, title = {Bidirectional Texture Functions: Acquisition, Rendering and Quality Evaluation}, doi = {10.25643/bauhaus-universitaet.3779}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20180820-37790}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {As one of its primary objectives, Computer Graphics aims at the simulation of fabrics' complex reflection behaviour. Characteristic surface reflectance of fabrics, such as highlights, anisotropy or retro-reflection arise the difficulty of synthesizing. This problem can be solved by using Bidirectional Texture Functions (BTFs), a 2D-texture under various light and view direction. But the acquisition of Bidirectional Texture Functions requires an expensive setup and the measurement process is very time-consuming. Moreover, the size of BTF data can range from hundreds of megabytes to several gigabytes, as a large number of high resolution pictures have to be used in any ideal cases. Furthermore, the three-dimensional textured models rendered through BTF rendering method are subject to various types of distortion during acquisition, synthesis, compression, and processing. An appropriate image quality assessment scheme is a useful tool for evaluating image processing algorithms, especially algorithms designed to leave the image visually unchanged. In this contribution, we present and conduct an investigation aimed at locating a robust threshold for downsampling BTF images without loosing perceptual quality. To this end, an experimental study on how decreasing the texture resolution influences perceived quality of the rendered images has been presented and discussed. Next, two basic improvements to the use of BTFs for rendering are presented: firstly, the study addresses the cost of BTF acquisition by introducing a flexible low-cost step motor setup for BTF acquisition allowing to generate a high quality BTF database taken at user-defined arbitrary angles. Secondly, the number of acquired textures to the perceptual quality of renderings is adapted so that the database size is not overloaded and can fit better in memory when rendered. Although visual attention is one of the essential attributes of HVS, it is neglected in most existing quality metrics. In this thesis an appropriate objective quality metric based on extracting visual attention regions from images and adequate investigation of the influence of visual attention on perceived image quality assessment, called Visual Attention Based Image Quality Metric (VABIQM), has been proposed. The novel metric indicates that considering visual saliency can offer significant benefits with regard to constructing objective quality metrics to predict the visible quality differences in images rendered by compressed and non-compressed BTFs and also outperforms straightforward existing image quality metrics at detecting perceivable differences.}, subject = {Wahrnehmung}, language = {en} } @article{TonnTatarin, author = {Tonn, Christian and Tatarin, Ren{\´e}}, title = {Volumen Rendering in der Architektur: {\"U}berlagerung und Kombination von 3D Voxel Volumendaten mit 3D Geb{\"a}udemodellen}, doi = {10.25643/bauhaus-universitaet.2671}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160822-26718}, abstract = {Volumerendering ist eine Darstellungstechnik, um verschiedene r{\"a}umliche Mess- und Simulationsdaten anschaulich, interaktiv grafisch darzustellen. Im folgenden Beitrag wird ein Verfahren vorgestellt, mehrere Volumendaten mit einem Architekturfl{\"a}chenmodell zu {\"u}berlagern. Diese komplexe Darstellungsberechnung findet mit hardwarebeschleunigten Shadern auf der Grafikkarte statt. Im Beitrag wird hierzu der implementierte Softwareprototyp "VolumeRendering" vorgestellt. Neben dem interaktiven Berechnungsverfahren wurde ebenso Wert auf eine nutzerfreundliche Bedienung gelegt. Das Ziel bestand darin, eine einfache Bewertung der Volumendaten durch Fachplaner zu erm{\"o}glichen. Durch die {\"U}berlagerung, z. B. verschiedener Messverfahren mit einem Fl{\"a}chenmodell, ergeben sich Synergien und neue Auswertungsm{\"o}glichkeiten. Abschließend wird anhand von Beispielen aus einem interdisziplin{\"a}ren Forschungsprojekt die Anwendung des Softwareprototyps illustriert.}, subject = {Multiple Volume Rendering}, language = {de} } @phdthesis{Gerold, author = {Gerold, Fabian}, title = {Konzepte zur interaktiven Entwurfsraum-Exploration im Tragwerksentwurf}, doi = {10.25643/bauhaus-universitaet.2153}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20140408-21532}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {101}, abstract = {Der Entwurfsraum f{\"u}r den Entwurf eines Tragwerks ist ein n-dimensionaler Raum, der aus allen freien Parametern des Modells aufgespannt wird. Traditionell werden nur wenige Punkte dieses Raumes durch eine numerische (computergest{\"u}tzte) Simulation evaluiert, meist auf Basis der Finite-Elemente-Methode. Mehrere Faktoren f{\"u}hren dazu, dass heute oft viele Revisionen eines Simulationsmodells durchlaufen werden: Zum einen ergeben sich oft Planungs{\"a}nderungen, zum anderen ist oft die Untersuchung von Planungsalternativen und die Suche nach einem Optimum w{\"u}nschenswert. In dieser Arbeit soll f{\"u}r ein vorhandenes Finite-Elemente-Framework die sequentielle Datei-Eingabeschnittstelle durch eine Netzwerkschnittstelle ersetzt werden, die den Erfordernissen einer interaktiven Arbeitsweise entspricht. So erlaubt die hier konzipierte Schnittstelle interaktive, inkrementelle Modell{\"a}nderungen sowie Status- und Berechnungsergebnis-Abfragen durch eine bidirektionale Schnittstelle. Die Kombination aus interaktiver numerischer Simulation und Interoperabilit{\"a}t durch die Anwendung von Konzepten zur Bauwerks-Informations-Modellierung im Tragwerksentwurf ist Ziel dieser Dissertation. Die Beschreibung der Konzeption und prototypischen Umsetzung ist Gegenstand der schriftlichen Arbeit.}, subject = {Interaktive numerische Simulation}, language = {de} } @misc{SimonRitzLiehr, author = {Simon-Ritz, Frank and Liehr, Harald S.}, title = {Das Urheberrecht - ein Pulverfass f{\"u}r Lehre und Forschung}, doi = {10.25643/bauhaus-universitaet.1775}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20121130-17753}, abstract = {Radiodiskussion bei bauhaus.fm am 5. November 2012. Harald S. Liehr ist Lektor und Leiter der Niederlassung Weimar des B{\"o}hlau-Verlags (Wien / K{\"o}ln / Weimar), Dr. Frank Simon-Ritz ist Direktor der Universit{\"a}tsbibliothek der Bauhaus-Universit{\"a}t Weimar. Die Fragen stellten Ren{\´e} Tauschke und Jean-Marie Schaldach.}, subject = {Urheberrecht}, language = {de} } @article{Kalisch, author = {Kalisch, Dominik}, title = {Wissen wer wo wohnt}, doi = {10.25643/bauhaus-universitaet.2669}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160822-26695}, abstract = {In cities people live together in neighbourhoods. Here they can find the infrastructure they need, starting with shops for the daily purpose to the life-cycle based infrastructures like kindergartens or nursing homes. But not all neighbourhoods are identical. The infrastructure mixture varies from neighbourhood to neighbourhood, but different people have different needs which can change e.g. based on the life cycle situation or their affiliation to a specific milieu. We can assume that a person or family tries to settle in a specific neighbourhood that satisfies their needs. So, if the residents are happy with a neighbourhood, we can further assume that this neighbourhood satisfies their needs. The socio-oeconomic panel (SOEP) of the German Institute for Economy (DIW) is a survey that investigates the economic structure of the German population. Every four years one part of this survey includes questions about what infrastructures can be found in the respondents neighbourhood and the satisfaction of the respondent with their neighbourhood. Further, it is possible to add a milieu estimation for each respondent or household. This gives us the possibility to analyse the typical neighbourhoods in German cities as well as the infrastructure profiles of the different milieus. Therefore, we take the environment variables from the dataset and recode them into a binary variable - whether an infrastructure is available or not. According to Faust (2005), these sets can also be understood, as a network of actors in a neighbourhood, which share two, three or more infrastructures. Like these networks, this neighbourhood network can also be visualized as a bipartite affiliation network and therefore analysed using correspondence analysis. We will show how a neighbourhood analysis will benefit from an upstream correspondence analysis and how this could be done. We will also present and discuss the results of such an analysis.}, subject = {urban planning}, language = {de} } @article{HahlbrockBraunHeideletal., author = {Hahlbrock, David and Braun, Michael and Heidel, Robin and Lemmen, Patrik and Boumann, Roland and Bruckmann, Tobias and Schramm, Dieter and Helm, Volker and Willmann, Jan}, title = {Cable Robotic 3D-printing: additive manufacturing on the construction site}, series = {Construction Robotics}, volume = {2022}, journal = {Construction Robotics}, publisher = {Springer International Publishing}, address = {Cham}, doi = {10.1007/s41693-022-00082-3}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20230124-48791}, pages = {1 -- 14}, abstract = {This paper outlines an important step in characterizing a novel field of robotic construction research where a cable-driven parallel robot is used to extrude cementitious material in three-dimensional space, and thus offering a comprehensive new approach to computational design and construction, and to robotic fabrication at larger scales. Developed by the Faculty of Art and Design at Bauhaus-University Weimar (Germany), the faculty of Architecture at the University of Applied Sciences Dortmund (Germany) and the Chair of Mechatronics at the University of Duisburg-Essen (Germany), this approach offers unique advantages over existing additive manufacturing methods: the system is easily transportable and scalable, it does not require additional formwork or scaffolding, and it offers digital integration and informational oversight across the entire design and building process. This paper considers 1) key research components of cable robotic 3D-printing (such as computational design, material exploration, and robotic control), and 2) the integration of these parameters into a unified design and building process. The demonstration of the approach at full-scale is of particular concern.}, subject = {Robotik}, language = {en} } @article{Stadler, author = {Stadler, Max}, title = {Gr{\"u}nderzeit. Hightech und Alternativen der Wissenschaft in West-Berlin}, series = {NTM Zeitschrift f{\"u}r Geschichte der Wissenschaften, Technik und Medizin}, volume = {2022}, journal = {NTM Zeitschrift f{\"u}r Geschichte der Wissenschaften, Technik und Medizin}, number = {30 (2022)}, publisher = {Basel}, address = {Birkh{\"a}user}, doi = {10.1007/s00048-022-00352-9}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20230124-48800}, pages = {599 -- 632}, abstract = {Zu den diversen Unternehmungen sozialbewegter „Gegenwissenschaft", die um 1980 auf der Bildfl{\"a}che der BRD erschienen, z{\"a}hlte der 1982 gegr{\"u}ndete Berliner Wissenschaftsladen e. V., kurz WILAB - eine Art „alternatives" Spin-off der Technischen Universit{\"a}t Berlin. Der vorliegende Beitrag situiert die Ausgr{\"u}ndung des „Ladens" im Kontext zeitgen{\"o}ssischer Fortschritte der (regionalen) Forschungs- und Technologiepolitik. Gezeigt wird, wie der deindustrialisierenden Inselstadt, qua „innovationspolitischer" Gegensteuerung, dabei sogar eine gewisse Vorreiterrolle zukam: {\"u}ber die Stadtgrenzen hinaus sichtbare Neuerungen wie die Gr{\"u}ndermesse BIG TECH oder das 1983 er{\"o}ffnete Berliner Innovations- und Gr{\"u}nderzentrum (BIG), der erste „Incubator" [sic] der BRD, etwa gingen auf das Konto der 1977/78 lancierten Technologie-Transferstelle der TU Berlin, TU-transfer. Anders gesagt: tendenziell bekam man es hier nun mit Verh{\"a}ltnissen zu tun, die immer weniger mit den Tr{\"a}umen einer „kritischen", nicht-fremdbestimmten (Gegen‑)Wissenschaft kompatibel waren. Latent kontr{\"a}r zur historiographischen Prominenz des wissenschaftskritischen Zeitgeists fristeten „alternativen" Zielsetzungen verpflichtete Unternehmungen wie „WILAB" ein relativ marginalisiertes Nischendasein. Dennoch wirft das am WILAB verfolgte, so gesehen wenig aussichtsreiche Anliegen, eine andere, n{\"a}mlich „humanere" Informationstechnologie in die Wege zu leiten, ein instruktives Licht auf die Aufbr{\"u}che „unternehmerischer" Wissenschaft in der BRD um 1980.}, subject = {Berlin }, language = {de} } @phdthesis{Dang, author = {Dang, Trang}, title = {Automated Detailing of 4D Schedules}, doi = {10.25643/bauhaus-universitaet.2310}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20141006-23103}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {120}, abstract = {The increasing success of BIM (Building Information Model) and the emergence of its implementation in 3D construction models have paved a way for improving scheduling process. The recent research on application of BIM in scheduling has focused on quantity take-off, duration estimation for individual trades, schedule visualization, and clash detection. Several experiments indicated that the lack of detailed planning causes about 30\% non-productive time and stacking of trades. However, detailed planning still has not been implemented in practice despite receiving a lot of interest from researchers. The reason is associated with the huge amount and complexity of input data. In order to create a detailed planning, it is time consuming to manually decompose activities, collect and calculate the detailed information in relevant. Moreover, the coordination of detailed activities requires much effort for dealing with their complex constraints. This dissertation aims to support the generation of detailed schedules from a rough schedule. It proposes a model for automated detailing of 4D schedules by integrating BIM, simulation and Pareto-based optimization.}, subject = {Simulation}, language = {en} } @misc{CarvalhoDaher, type = {Master Thesis}, author = {Carvalho Daher, Cesar Felipe}, title = {Horoskopos: a virtual planetarium for astrology}, doi = {10.25643/bauhaus-universitaet.4718}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220930-47181}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {104}, abstract = {This report details the development of Horoskopos, a virtual planetarium for astrology. This project was an attempt to develop a learning tool for studying astrological concepts as connected to observational astronomy. The premise that astrology and observational astronomy were once inseparable from each other in ancient times guided the conceptualization of this tool as an interactive planetarium. The main references were existing software and applications for visualization in astrology and astronomy. Professional astrology teachers were consulted in order to understand better the state of astrological teaching and learning, as well as existing tools and practice. Horoskopos was built using the Unity3D development interface, which is based on the C\# programming language. It also relied on the Swiss Ephemeris coding interface from Astrodienst. The development process was experimental and many of the needed skills were developed as needed. Usability tests were performed as new features were added to the interface. The final version of Horoskopos is fully usable, with many interactive visualization features and a defined visual identity. It was validated together with professional astrologers for its effectiveness in concept and visualization.}, subject = {Mediendesign}, language = {en} } @inproceedings{KoenigTreyerSchmitt, author = {K{\"o}nig, Reinhard and Treyer, Lukas and Schmitt, Gerhard}, title = {Graphical smalltalk with my optimization system for urban planning tasks}, series = {31st eCAADe Conference - Volume 2}, booktitle = {31st eCAADe Conference - Volume 2}, publisher = {TU Delft}, address = {Delft, Netherlands}, doi = {10.25643/bauhaus-universitaet.2517}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160121-25171}, pages = {195 -- 203}, abstract = {Based on the description of a conceptual framework for the representation of planning problems on various scales, we introduce an evolutionary design optimization system. This system is exemplified by means of the generation of street networks with locally defined properties for centrality. We show three different scenarios for planning requirements and evaluate the resulting structures with respect to the requirements of our framework. Finally the potentials and challenges of the presented approach are discussed in detail.}, subject = {St{\"a}dtebau}, language = {en} } @inproceedings{ChirkinKoenig, author = {Chirkin, Artem and K{\"o}nig, Reinhard}, title = {Concept of Interactive Machine Learning in Urban Design Problems : proceedings}, publisher = {ACM New York, NY, USA}, address = {San Jose, CA, USA}, doi = {10.25643/bauhaus-universitaet.2600}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26000}, pages = {10 -- 13}, abstract = {This work presents a concept of interactive machine learning in a human design process. An urban design problem is viewed as a multiple-criteria optimization problem. The outlined feature of an urban design problem is the dependence of a design goal on a context of the problem. We model the design goal as a randomized fitness measure that depends on the context. In terms of multiple-criteria decision analysis (MCDA), the defined measure corresponds to a subjective expected utility of a user. In the first stage of the proposed approach we let the algorithm explore a design space using clustering techniques. The second stage is an interactive design loop; the user makes a proposal, then the program optimizes it, gets the user's feedback and returns back the control over the application interface.}, subject = {Stadtgestaltung}, language = {en} } @inproceedings{KoenigMueller, author = {K{\"o}nig, Reinhard and M{\"u}ller, Daniela}, title = {Simulating the development of residential areas of the city of Vienna from 1888 to 2001}, series = {Compendium of Abstracts of the 8th International Conference on Urban Planning and Environment (UPE8)}, booktitle = {Compendium of Abstracts of the 8th International Conference on Urban Planning and Environment (UPE8)}, address = {Kaiserslautern, Germany}, doi = {10.25643/bauhaus-universitaet.2606}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26066}, pages = {23}, abstract = {The structure and development of cities can be seen and evaluated from different points of view. By replicating the growth or shrinkage of a city using historical maps depicting different time states, we can obtain momentary snapshots of the dynamic mechanisms of the city. An examination of how these snapshots change over the course of time and a comparison of the different static time states reveals the various interdependencies of population density, technical infrastructure and the availability of public transport facilities. Urban infrastructure and facilities are not distributed evenly across the city - rather they are subject to different patterns and speeds of spread over the course of time and follow different spatial and temporal regularities. The reasons and underlying processes that cause the transition from one state to another result from the same recurring but varyingly pronounced hidden forces and their complex interactions. Such forces encompass a variety of economic, social, cultural and ecological conditions whose respective weighting defines the development of a city in general. Urban development is, however, not solely a product of the different spatial distribution of economic, legal or social indicators but also of the distribution of infrastructure. But to what extent is the development of a city affected by the changing provision of infrastructure? As}, subject = {Simulation}, language = {en} } @inproceedings{TreyerKleinKoenigetal., author = {Treyer, Lukas and Klein, Bernhard and K{\"o}nig, Reinhard and Meixner, Christine}, title = {Lightweight urban computation interchange (LUCI) system}, series = {Proceedings}, booktitle = {Proceedings}, publisher = {FOSS4G}, address = {Seoul, South Korea}, doi = {10.25643/bauhaus-universitaet.2598}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-25982}, pages = {12}, abstract = {In this paper we introduce LUCI, a Lightweight Urban Calculation Interchange system, designed to bring the advantages of a calculation and content co-ordination system to small planning and design groups by the means of an open source middle-ware. The middle-ware focuses on problems typical to urban planning and therefore features a geo-data repository as well as a job runtime administration, to coordinate simulation models and its multiple views. The described system architecture is accompanied by two exemplary use cases that have been used to test and further develop our concepts and implementations.}, language = {en} } @inproceedings{HijaziHusseinKoenig, author = {Hijazi, Ihab Hamzi and Hussein, M. H. and K{\"o}nig, Reinhard}, title = {Enabling geo-design: Evaluating the capacity of 3D city model to support thermal design in building}, series = {9th 3DGeoInfo Conference}, booktitle = {9th 3DGeoInfo Conference}, address = {Dubai, UAE}, doi = {10.25643/bauhaus-universitaet.2508}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160118-25089}, pages = {4}, abstract = {Enabling geo-design: Evaluating the capacity of 3D city model to support thermal design in building}, subject = {Informatik}, language = {en} } @techreport{KoenigTapiasSchmitt, author = {K{\"o}nig, Reinhard and Tapias, Estefania and Schmitt, Gerhard}, title = {New Methods in Urban Analysis and Simulation: Documentation of teaching results from the spring semester 2015}, organization = {ETH Zurich}, doi = {10.25643/bauhaus-universitaet.2505}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160118-25052}, pages = {76}, abstract = {Documentation of teaching results from the spring semester 2015 at the chair of Information Architecture at ETH Zurich}, subject = {Architektur}, language = {en} } @inproceedings{Koenig, author = {K{\"o}nig, Reinhard}, title = {CPlan: An Open Source Library for Computational Analysis and Synthesis}, series = {33rd eCAADe Conference}, booktitle = {33rd eCAADe Conference}, editor = {Martens, Bob and Wurzer, G, Gabriel and Grasl, Tomas and Lorenz, Wolfgang and Schaffranek, Richard}, publisher = {Vienna University of Technology}, address = {Vienna}, doi = {10.25643/bauhaus-universitaet.2503}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160118-25037}, pages = {245 -- 250}, abstract = {Some caad packages offer additional support for the optimization of spatial configurations, but the possibilities for applying optimization are usually limited either by the complexity of the data model or by the constraints of the underlying caad system. Since we missed a system that allows to experiment with optimization techniques for the synthesis of spatial configurations, we developed a collection of methods over the past years. This collection is now combined in the presented open source library for computational planning synthesis, called CPlan. The aim of the library is to provide an easy to use programming framework with a flat learning curve for people with basic programming knowledge. It offers an extensible structure that allows to add new customized parts for various purposes. In this paper the existing functionality of the CPlan library is described.}, subject = {Architektur}, language = {en} } @article{KnechtKoenig, author = {Knecht, Katja and K{\"o}nig, Reinhard}, title = {Automatische Grundst{\"u}cksumlegung mithilfe von Unterteilungsalgorithmen und typenbasierte Generierung von Stadtstrukturen}, doi = {10.25643/bauhaus-universitaet.2673}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160822-26730}, abstract = {Dieses Arbeitspapier beschreibt, wie ausgehend von einem vorhandenen Straßennetzwerk Bebauungsareale mithilfe von Unterteilungsalgorithmen automatisch umgelegt, d.h. in Grundst{\"u}cke unterteilt, und anschließend auf Basis verschiedener st{\"a}dtebaulicher Typen bebaut werden k{\"o}nnen. Die Unterteilung von Bebauungsarealen und die Generierung von Bebauungsstrukturen unterliegen dabei bestimmten stadtplanerischen Einschr{\"a}nkungen, Vorgaben und Parametern. Ziel ist es aus den dargestellten Untersuchungen heraus ein Vorschlagssystem f{\"u}r stadtplanerische Entw{\"u}rfe zu entwickeln, das anhand der Umsetzung eines ersten Softwareprototyps zur Generierung von Stadtstrukturen weiter diskutiert wird.}, subject = {Automatisierung}, language = {de} } @techreport{KoenigTapiasSchmitt, author = {K{\"o}nig, Reinhard and Tapias, Estefania and Schmitt, Gerhard}, title = {New Methods in Urban Analysis and Simulation: Documentation of teaching results from the autumn semester 2013}, doi = {10.25643/bauhaus-universitaet.2516}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160121-25168}, pages = {60}, abstract = {Documentation of teaching results from the autumn semester 2013 at ETH Zurich}, subject = {St{\"a}dtebau}, language = {en} } @phdthesis{Schrader, author = {Schrader, Kai}, title = {Hybrid 3D simulation methods for the damage analysis of multiphase composites}, doi = {10.25643/bauhaus-universitaet.2059}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20131021-20595}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {174}, abstract = {Modern digital material approaches for the visualization and simulation of heterogeneous materials allow to investigate the behavior of complex multiphase materials with their physical nonlinear material response at various scales. However, these computational techniques require extensive hardware resources with respect to computing power and main memory to solve numerically large-scale discretized models in 3D. Due to a very high number of degrees of freedom, which may rapidly be increased to the two-digit million range, the limited hardware ressources are to be utilized in a most efficient way to enable an execution of the numerical algorithms in minimal computation time. Hence, in the field of computational mechanics, various methods and algorithms can lead to an optimized runtime behavior of nonlinear simulation models, where several approaches are proposed and investigated in this thesis. Today, the numerical simulation of damage effects in heterogeneous materials is performed by the adaption of multiscale methods. A consistent modeling in the three-dimensional space with an appropriate discretization resolution on each scale (based on a hierarchical or concurrent multiscale model), however, still contains computational challenges in respect to the convergence behavior, the scale transition or the solver performance of the weak coupled problems. The computational efficiency and the distribution among available hardware resources (often based on a parallel hardware architecture) can significantly be improved. In the past years, high-performance computing (HPC) and graphics processing unit (GPU) based computation techniques were established for the investigationof scientific objectives. Their application results in the modification of existing and the development of new computational methods for the numerical implementation, which enables to take advantage of massively clustered computer hardware resources. In the field of numerical simulation in material science, e.g. within the investigation of damage effects in multiphase composites, the suitability of such models is often restricted by the number of degrees of freedom (d.o.f.s) in the three-dimensional spatial discretization. This proves to be difficult for the type of implementation method used for the nonlinear simulation procedure and, simultaneously has a great influence on memory demand and computational time. In this thesis, a hybrid discretization technique has been developed for the three-dimensional discretization of a three-phase material, which is respecting the numerical efficiency of nonlinear (damage) simulations of these materials. The increase of the computational efficiency is enabled by the improved scalability of the numerical algorithms. Consequently, substructuring methods for partitioning the hybrid mesh were implemented, tested and adapted to the HPC computing framework using several hundred CPU (central processing units) nodes for building the finite element assembly. A memory-efficient iterative and parallelized equation solver combined with a special preconditioning technique for solving the underlying equation system was modified and adapted to enable combined CPU and GPU based computations. Hence, it is recommended by the author to apply the substructuring method for hybrid meshes, which respects different material phases and their mechanical behavior and which enables to split the structure in elastic and inelastic parts. However, the consideration of the nonlinear material behavior, specified for the corresponding phase, is limited to the inelastic domains only, and by that causes a decreased computing time for the nonlinear procedure. Due to the high numerical effort for such simulations, an alternative approach for the nonlinear finite element analysis, based on the sequential linear analysis, was implemented in respect to scalable HPC. The incremental-iterative procedure in finite element analysis (FEA) during the nonlinear step was then replaced by a sequence of linear FE analysis when damage in critical regions occured, known in literature as saw-tooth approach. As a result, qualitative (smeared) crack initiation in 3D multiphase specimens has efficiently been simulated.}, subject = {high-performance computing}, language = {en} } @phdthesis{CarvajalBermudez, author = {Carvajal Berm{\´u}dez, Juan Carlos}, title = {New methods of citizen participation based on digital technologies}, doi = {10.25643/bauhaus-universitaet.4712}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220906-47124}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {The current thesis presents research about new methods of citizen participation based on digital technologies. The focus on the research lies on decentralized methods of participation where citizens take the role of co-creators. The research project first conducted a review of the literature on citizen participation, its origins and the different paradigms that have emerged over the years. The literature review also looked at the influence of technologies on participation processes and the theoretical frameworks that have emerged to understand the introduction of technologies in the context of urban development. The literature review generated the conceptual basis for the further development of the thesis. The research begins with a survey of technology enabled participation applications that examined the roles and structures emerging due to the introduction of technology. The results showed that cities use technology mostly to control and monitor urban infrastructure and are rather reluctant to give citizens the role of co-creators. Based on these findings, three case studies were developed. Digital tools for citizen participation were conceived and introduced for each case study. The adoption and reaction of the citizens were observed using three data collection methods. The results of the case studies showed consistently that previous participation and engagement with informal citizen participation are a determinining factor in the potential adoption of digital tools for decentralized engagement. Based on these results, the case studies proposed methods and frameworks that can be used for the conception and introduction of technologies for decentralized citizen participation.}, subject = {Partizipation}, language = {en} } @phdthesis{Vogler, author = {Vogler, Verena}, title = {A framework for artificial coral reef design: Integrating computational modelling and high precision monitoring strategies for artificial coral reefs - an Ecosystem-aware design approach in times of climate change}, isbn = {978-3-00-074495-2}, doi = {10.25643/bauhaus-universitaet.4611}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220322-46115}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {243}, abstract = {Tropical coral reefs, one of the world's oldest ecosystems which support some of the highest levels of biodiversity on the planet, are currently facing an unprecedented ecological crisis during this massive human-activity-induced period of extinction. Hence, tropical reefs symbolically stand for the destructive effects of human activities on nature [4], [5]. Artificial reefs are excellent examples of how architectural design can be combined with ecosystem regeneration [6], [7], [8]. However, to work at the interface between the artificial and the complex and temporal nature of natural systems presents a challenge, i.a. in respect to the B-rep modelling legacy of computational modelling. The presented doctorate investigates strategies on how to apply digital practice to realise what is an essential bulwark to retain reefs in impossibly challenging times. Beyond the main question of integrating computational modelling and high precision monitoring strategies in artificial coral reef design, this doctorate explores techniques, methods, and linking frameworks to support future research and practice in ecology led design contexts. Considering the many existing approaches for artificial coral reefs design, one finds they often fall short in precisely understanding the relationships between architectural and ecological aspects (e.g. how a surface design and material composition can foster coral larvae settlement, or structural three-dimensionality enhance biodiversity) and lack an integrated underwater (UW) monitoring process. Such a process is necessary in order to gather knowledge about the ecosystem and make it available for design, and to learn whether artificial structures contribute to reef regeneration or rather harm the coral reef ecosystem. For the research, empirical experimental methods were applied: Algorithmic coral reef design, high precision UW monitoring, computational modelling and simulation, and validated through parallel real-world physical experimentation - two Artificial Reef Prototypes (ARPs) in Gili Trawangan, Indonesia (2012-today). Multiple discrete methods and sub techniques were developed in seventeen computational experiments and applied in a way in which many are cross valid and integrated in an overall framework that is offered as a significant contribution to the field. Other main contributions include the Ecosystem-aware design approach, Key Performance Indicators (KPIs) for coral reef design, algorithmic design and fabrication of Biorock cathodes, new high precision UW monitoring strategies, long-term real-world constructed experiments, new digital analysis methods and two new front-end web-based tools for reef design and monitoring reefs. The methodological framework is a finding of the research that has many technical components that were tested and combined in this way for the very first time. In summary, the thesis responds to the urgency and relevance in preserving marine species in tropical reefs during this massive extinction period by offering a differentiated approach towards artificial coral reefs - demonstrating the feasibility of digitally designing such 'living architecture' according to multiple context and performance parameters. It also provides an in-depth critical discussion of computational design and architecture in the context of ecosystem regeneration and Planetary Thinking. In that respect, the thesis functions as both theoretical and practical background for computational design, ecology and marine conservation - not only to foster the design of artificial coral reefs technically but also to provide essential criteria and techniques for conceiving them. Keywords: Artificial coral reefs, computational modelling, high precision underwater monitoring, ecology in design.}, subject = {Korallenriff}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Computers in the design phase - Ten thesis on their uselessness}, series = {Der Generalist}, journal = {Der Generalist}, doi = {10.25643/bauhaus-universitaet.2607}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26075}, abstract = {At the end of the 1960s, architects at various universities world- wide began to explore the potential of computer technology for their profession. With the decline in prices for PCs in the 1990s and the development of various computer-aided architectural design systems (CAAD), the use of such systems in architectural and planning offices grew continuously. Because today no ar- chitectural office manages without a costly CAAD system and because intensive soſtware training has become an integral part of a university education, the question arises about what influence the various computer systems have had on the design process forming the core of architectural practice. The text at hand devel- ops ten theses about why there has been no success to this day in introducing computers such that new qualitative possibilities for design result. RESTRICTEDNESS}, subject = {CAD}, language = {en} } @techreport{KoenigTapiasSchmitt, author = {K{\"o}nig, Reinhard and Tapias, Estefania and Schmitt, Gerhard}, title = {New Methods in Urban Analysis and Simulation: Documentation of the teaching results from the spring semester 2014}, doi = {10.25643/bauhaus-universitaet.2515}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160121-25154}, pages = {62}, abstract = {Documentation of the teaching results from the spring semester 2014 at ETH Zurich}, subject = {St{\"a}dtebau}, language = {en} } @techreport{KoenigTapiasKoenig, author = {K{\"o}nig, Reinhard and Tapias, Estefania and K{\"o}nig, Gerhard}, title = {Digital Urban Simulation: Documentation of the teaching results from the fall semester 2014}, organization = {ETH Zurich}, doi = {10.25643/bauhaus-universitaet.2512}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160121-25125}, pages = {102}, abstract = {Documentation of the teaching results from the fall semester 2014}, subject = {St{\"a}dtebau}, language = {en} } @article{KoenigStandfestSchmitt, author = {K{\"o}nig, Reinhard and Standfest, Matthias and Schmitt, Gerhard}, title = {Evolutionary multi-criteria optimization for building layout planning: Exemplary application based on the PSSA framework}, series = {32nd eCAADe Conference - Volume 2}, journal = {32nd eCAADe Conference - Volume 2}, editor = {Thompson, Emine Mine}, doi = {10.25643/bauhaus-universitaet.2513}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160121-25139}, pages = {567 -- 574}, abstract = {When working on urban planning projects there are usually multiple aspects to consider. Often these aspects are contradictory and it is not possible to choose one over the other; instead, they each need to be fulfilled as well as possible. Planners typically draw on past experience when subjectively prioritising which aspects to consider with which degree of importance for their planning concepts. This practice, although understandable, places power and authority in the hands of people who have varying degrees of expertise, which means that the best possible solution is not always found, because it is either not sought or the problem is regarded as being too complex for human capabilities. To improve this situation, the project presented here shows the potential of multi-criteria optimisation algorithms using the example of a new housing layout for an urban block. In addition it is shown, how Self-Organizing-Maps can be used to visualise multi-dimensional solution spaces in an easy analysable and comprehensible form.}, subject = {Architektur}, language = {en} } @article{KoenigKnecht, author = {K{\"o}nig, Reinhard and Knecht, Katja}, title = {Comparing two evolutionary algorithm based methods for layout generation: Dense packing versus subdivision}, series = {Artificial Intelligence for Engineering Design, Analysis and Manufacturing}, journal = {Artificial Intelligence for Engineering Design, Analysis and Manufacturing}, pages = {285 -- 299}, abstract = {We present and compare two evolutionary algorithm based methods for rectangular architectural layout generation: dense packing and subdivision algorithms.We analyze the characteristics of the two methods on the basis of three floor plan sce- narios. Our analyses include the speed with which solutions are generated, the reliability with which optimal solutions can be found, and the number of different solutions that can be found overall. In a following step, we discuss the methods with respect to their different user interaction capabilities. In addition, we show that each method has the capability to generate more complex L-shaped layouts. Finally,we conclude that neither of the methods is superior but that each of them is suitable for use in distinct application scenarios because of its different properties.}, subject = {Architektur}, language = {en} } @inproceedings{TreyerKleinKoenigetal., author = {Treyer, Lukas and Klein, Bernhard and K{\"o}nig, Reinhard and Meixner, Christine}, title = {Lightweight urban computation interchange (LUCI) system}, series = {FOSS4G 2015 Conference}, booktitle = {FOSS4G 2015 Conference}, publisher = {FOSS4G}, address = {Seoul, South Korea}, doi = {10.25643/bauhaus-universitaet.2504}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160118-25042}, pages = {12}, abstract = {In this paper we introduce LUCI, a Lightweight Urban Calculation Interchange system, designed to bring the advantages of a calculation and content co-ordination system to small planning and design groups by the means of an open source middle-ware. The middle-ware focuses on problems typical to urban planning and therefore features a geo-data repository as well as a job runtime administration, to coordinate simulation models and its multiple views. The described system architecture is accompanied by two exemplary use cases that have been used to test and further develop our concepts and implementations.}, subject = {Architektur}, language = {en} } @inproceedings{KoenigBauriedel, author = {K{\"o}nig, Reinhard and Bauriedel, Christian}, title = {Computer-generated Urban Structures}, series = {Proceedings of the Generative Art Conference}, booktitle = {Proceedings of the Generative Art Conference}, address = {Milan, Italy}, doi = {10.25643/bauhaus-universitaet.2609}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160623-26090}, pages = {1 -- 10}, abstract = {How does it come to particular structure formations in the cities and which strengths play a role in this process? On which elements can the phenomena be reduced to find the respective combination rules? How do general principles have to be formulated to be able to describe the urban processes so that different structural qualities can be produced? With the aid of mathematic methods, models based on four basic levels are generated in the computer, through which the connections between the elements and the rules of their interaction can be examined. Conclusions on the function of developing processes and the further urban origin can be derived.}, language = {en} } @article{KleinKoenig, author = {Klein, Bernhard and K{\"o}nig, Reinhard}, title = {Computational Urban Planning: Using the Value Lab as Control Center}, series = {FCL Magazine, Special Issue Simulation Platform}, journal = {FCL Magazine, Special Issue Simulation Platform}, doi = {10.25643/bauhaus-universitaet.2601}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26011}, pages = {38 -- 45}, abstract = {Urban planning involves many aspects and various disciplines, demanding an asynchronous planning approach. The level of complexity rises with each aspect to be considered and makes it difficult to find universally satisfactory solutions. To improve this situation we propose a new approach, which complement traditional design methods with a computational urban plan- ning method that can fulfil formalizable design requirements automatically. Based on this approach we present a design space exploration framework for complex urban planning projects. For a better understanding of the idea of design space exploration, we introduce the concept of a digital scout which guides planners through the design space and assists them in their creative explorations. The scout can support planners during manual design by informing them about potential im- pacts or by suggesting different solutions that fulfill predefined quality requirements. The planner can change flexibly between a manually controlled and a completely automated design process. The developed system is presented using an exemplary urban planning scenario on two levels from the street layout to the placement of building volumes. Based on Self-Organizing Maps we implemented a method which makes it possible to visualize the multi-dimensional solution space in an easily analysable and comprehensible form.}, subject = {Stadtgestaltung}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Die Stadt der Agenten und Automaten}, series = {FORUM - Architektur \& Bauforum}, journal = {FORUM - Architektur \& Bauforum}, doi = {10.25643/bauhaus-universitaet.2608}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26083}, abstract = {PLANUNGSUNTERST{\"U}TZUNG DURCH DIE ANALYSE R{\"A}UMLICHER PROZESSE MITTELS COMPUTERSIMULATIONEN. Erst wenn man - zumindest im Prinzip - versteht, wie eine Stadt mit ihren komplexen, verwobenen Vorg{\"a}ngen im Wesentlichen funktioniert, ist eine sinnvolle Stadtplanung m{\"o}glich. Denn jede Planung bedeutet einen Eingriff in den komplexen Organismus einer Stadt. Findet dieser Eingriff ohne Wissen {\"u}ber die Funktionsweise des Organismus statt, k{\"o}nnen auch die Auswirkungen nicht abgesch{\"a}tzt werden. Dieser Beitrag stellt dar, wie urbane Prozesse mittels Computersimulationen unter Zuhilfenahme so genannter Multi-Agenten-Systeme und Zellul{\"a}rer Automaten verstanden werden k{\"o}nnen. von}, subject = {CAD}, language = {de} } @article{TreyerKleinKoenigetal., author = {Treyer, Lukas and Klein, Bernhard and K{\"o}nig, Reinhard and Meixner, Christine}, title = {Lightweight Urban Computation Interchange (LUCI): A System to Couple Heterogenous Simulations and Views}, series = {Spatial Information Research}, journal = {Spatial Information Research}, doi = {10.25643/bauhaus-universitaet.2603}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26037}, pages = {1 -- 12}, abstract = {In this paper we introduce LUCI, a Lightweight Urban Calculation Interchange system, designed to bring the advantages of calculation and content co-ordination system to small planning and design groups by the means of an open source middle-ware. The middle-ware focuses on problems typical to urban planning and therefore features a geo-data repository as well as a job runtime administration, to coordinate simulation models and its multiple views. The described system architecture is accompanied by two exemplary use cases, that have been used to test and further develop our concepts and implementations.}, language = {en} } @inproceedings{KoenigSchmitt, author = {K{\"o}nig, Reinhard and Schmitt, Gerhard}, title = {Backcasting and a new way of command in computational design : Proceedings}, series = {CAADence in Architecture Conference}, booktitle = {CAADence in Architecture Conference}, editor = {Szoboszlai, Mih{\´a}ly}, address = {Budapest}, doi = {10.25643/bauhaus-universitaet.2599}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-25996}, pages = {15 -- 25}, abstract = {It's not uncommon that analysis and simulation methods are used mainly to evaluate finished designs and to proof their quality. Whereas the potential of such methods is to lead or control a design process from the beginning on. Therefore, we introduce a design method that move away from a "what-if" forecasting philosophy and increase the focus on backcasting approaches. We use the power of computation by combining sophisticated methods to generate design with analysis methods to close the gap between analysis and synthesis of designs. For the development of a future-oriented computational design support we need to be aware of the human designer's role. A productive combination of the excellence of human cognition with the power of modern computing technology is needed. We call this approach "cognitive design computing". The computational part aim to mimic the way a designer's brain works by combining state-of-the-art optimization and machine learning approaches with available simulation methods. The cognition part respects the complex nature of design problems by the provision of models for human-computation interaction. This means that a design problem is distributed between computer and designer. In the context of the conference slogan "back to command", we ask how we may imagine the command over a cognitive design computing system. We expect that designers will need to let go control of some parts of the design process to machines, but in exchange they will get a new powerful command on complex computing processes. This means that designers have to explore the potentials of their role as commanders of partially automated design processes. In this contribution we describe an approach for the development of a future cognitive design computing system with the focus on urban design issues. The aim of this system is to enable an urban planner to treat a planning problem as a backcasting problem by defining what performance a design solution should achieve and to automatically query or generate a set of best possible solutions. This kind of computational planning process offers proof that the designer meets the original explicitly defined design requirements. A key way in which digital tools can support designers is by generating design proposals. Evolutionary multi-criteria optimization methods allow us to explore a multi-dimensional design space and provide a basis for the designer to evaluate contradicting requirements: a task urban planners are faced with frequently. We also reflect why designers will give more and more control to machines. Therefore, we investigate first approaches learn how designers use computational design support systems in combination with manual design strategies to deal with urban design problems by employing machine learning methods. By observing how designers work, it is possible to derive more complex artificial solution strategies that can help computers make better suggestions in the future.}, subject = {CAD}, language = {en} } @article{KoenigBauriedel, author = {K{\"o}nig, Reinhard and Bauriedel, Christian}, title = {Generating settlement structures: a method for urban planning and analysis supported by cellular automata}, series = {Environment and Planning B: Planning and Design}, journal = {Environment and Planning B: Planning and Design}, doi = {10.25643/bauhaus-universitaet.2605}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160624-26054}, pages = {602 -- 624}, abstract = {Previous models for the explanation of settlement processes pay little attention to the interactions between settlement spreading and road networks. On the basis of a dielectric breakdown model in combination with cellular automata, we present a method to steer precisely the generation of settlement structures with regard to their global and local density as well as the size and number of forming clusters. The resulting structures depend on the logic of how the dependence of the settlements and the road network is implemented to the simulation model. After analysing the state of the art we begin with a discussion of the mutual dependence of roads and land development. Next, we elaborate a model that permits the precise control of permeability in the developing structure as well as the settlement density, using the fewest necessary control parameters. On the basis of different characteristic values, possible settlement structures are analysed and compared with each other. Finally, we reflect on the theoretical contribution of the model with regard to the context of urban dynamics.}, language = {en} } @inproceedings{KoenigVaroudis, author = {K{\"o}nig, Reinhard and Varoudis, Tasos}, title = {Spatial Optimizations: Merging depthmapX , spatial graph networks and evolutionary design in Grasshopper}, series = {Proceedings of ecaade 34: Complexity \& Simplicity}, booktitle = {Proceedings of ecaade 34: Complexity \& Simplicity}, address = {Oulu, Finland}, doi = {10.25643/bauhaus-universitaet.2604}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160622-26040}, pages = {1 -- 6}, abstract = {In the Space Syntax community, the standard tool for computing all kinds of spatial graph network measures is depthmapX (Turner, 2004; Varoudis, 2012). The process of evaluating many design variants of networks is relatively complicated, since they need to be drawn in a separated CAD system, exported and imported in depthmapX via dxf file format. This procedure disables a continuous integration into a design process. Furthermore, the standalone character of depthmapX makes it impossible to use its network centrality calculation for optimization processes. To overcome this limitations, we present in this paper the first steps of experimenting with a Grasshopper component (reference omitted until final version) that can access the functions of depthmapX and integrate them into Grasshopper/Rhino3D. Here the component is implemented in a way that it can be used directly for an evolutionary algorithm (EA) implemented in a Python scripting component in Grasshopper}, language = {en} } @article{BimberIwai2009, author = {Bimber, Oliver and Iwai, Daisuke}, title = {Superimposing Dynamic Range}, series = {Eurographics 2009}, journal = {Eurographics 2009}, doi = {10.25643/bauhaus-universitaet.1532}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20120130-15325}, year = {2009}, abstract = {Replacing a uniform illumination by a high-frequent illumination enhances the contrast of observed and captured images. We modulate spatially and temporally multiplexed (projected) light with reflective or transmissive matter to achieve high dynamic range visualizations of radiological images on printed paper or ePaper, and to boost the optical contrast of images viewed or imaged with light microscopes.}, subject = {CGI }, language = {en} } @phdthesis{Walsdorf, author = {Walsdorf, Joern}, title = {M-Learning: Lernen im mobilen Kontext an Hochschulen}, doi = {10.25643/bauhaus-universitaet.2136}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20140304-21361}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {A fundamental characteristic of human beings is the desire to start learning at the moment of birth. The rather formal learning process that learners have to deal with in school, on vocational training or in university, is currently subject to fundamental changes. The increasing technologization, overall existing mobile devices, the ubiquitous access to digital information, and students being early adaptors of all these technological innovations require reactions on the part of the educational system. This study examines such a reaction: The use of mobile learning in higher education. Examining the subject m-learning first requires an investigation of the educational model e-learning. Many universities already established e-learning as one of their educational segments, providing a wide range of methods to support this kind of teaching. This study includes an empirical acceptance analysis regarding the general learning behavior of students and their approval of e-learning methods. A survey on the approval of m-learning supplements the results. Mobile learning is characterized by both the mobility of the communication devices and the users. Both factors lead to new correlations, demonstrate the potential of today's mobile devices and the probability to increase the learning performance. The dissertation addresses these correlations and the use of mobile devices in the context of m-learning. M-learning and the usage of mobile devices not only require a reflection from a technological point of view. In addition to the technical features of such mobile devices, the usability of their applications plays an important role, especially with regard to the limited display size. For the purpose of evaluating mobile apps and browser-based applications, various analytical methods are suitable. The concluding heuristic evaluation points out the vulnerability of an established m-learning application, reveals the need for improvement, and shows an approach to rectify the shortcoming.}, subject = {Mobile Learning}, language = {de} } @phdthesis{Kulik, author = {Kulik, Alexander}, title = {User Interfaces for Cooperation}, doi = {10.25643/bauhaus-universitaet.2720}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20161202-27207}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {261}, abstract = {This thesis suggests cooperation as a design paradigm for human-computer interaction. The basic idea is that the synergistic co-operation of interfaces through concurrent user activities enables increased interaction fluency and expressiveness. This applies to bimanual interaction and multi-finger input, e.g., touch typing, as well as the collaboration of multiple users. Cooperative user interfaces offer more interaction flexibility and expressivity for single and multiple users. Part I of this thesis analyzes the state of the art in user interface design. It explores limitations of common approaches and reveals the crucial role of cooperative action in several established user interfaces and research prototypes. A review of related research in psychology and human-computer interaction offers insights to the cognitive, behavioral, and ergonomic foundations of cooperative user interfaces. Moreover, this thesis suggests a broad applicability of generic cooperation patterns and contributes three high-level design principles. Part II presents three experiments towards cooperative user interfaces in detail. A study on desktop-based 3D input devices, explores fundamental benefits of cooperative bimanual input and the impact of interface design on bimanual cooperative behavior. A novel interaction technique for multitouch devices is presented that follows the paradigm of cooperative user interfaces and demonstrates advantages over the status quo. Finally, this thesis introduces a fundamentally new display technology that provides up to six users with their individual perspectives of a shared 3D environment. The system creates new possibilities for the cooperative interaction of multiple users. Part III of this thesis builds on the research results described in Part II, in particular, the multi-user 3D display system. A series of case studies in the field of collaborative virtual reality provides exemplary evidence for the relevance and applicability of the suggested design principles.}, subject = {Human-Computer Interaction (HCI)}, language = {en} } @phdthesis{Moehring, author = {Moehring, Mathias}, title = {Realistic Interaction with Virtual Objects within Arm's Reach}, doi = {10.25643/bauhaus-universitaet.1859}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130301-18592}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {124}, abstract = {The automotive industry requires realistic virtual reality applications more than other domains to increase the efficiency of product development. Currently, the visual quality of virtual invironments resembles reality, but interaction within these environments is usually far from what is known in everyday life. Several realistic research approaches exist, however they are still not all-encompassing enough to be usable in industrial processes. This thesis realizes lifelike direct multi-hand and multi-finger interaction with arbitrary objects, and proposes algorithmic and technical improvements that also approach lifelike usability. In addition, the thesis proposes methods to measure the effectiveness and usability of such interaction techniques as well as discusses different types of grasping feedback that support the user during interaction. Realistic and reliable interaction is reached through the combination of robust grasping heuristics and plausible pseudophysical object reactions. The easy-to-compute grasping rules use the objects' surface normals, and mimic human grasping behavior. The novel concept of Normal Proxies increases grasping stability and diminishes challenges induced by adverse normals. The intricate act of picking-up thin and tiny objects remains challenging for some users. These cases are further supported by the consideration of finger pinches, which are measured with a specialized finger tracking device. With regard to typical object constraints, realistic object motion is geometrically calculated as a plausible reaction on user input. The resulting direct finger-based interaction technique enables realistic and intuitive manipulation of arbitrary objects. The thesis proposes two methods that prove and compare effectiveness and usability. An expert review indicates that experienced users quickly familiarize themselves with the technique. A quantitative and qualitative user study shows that direct finger-based interaction is preferred over indirect interaction in the context of functional car assessments. While controller-based interaction is more robust, the direct finger-based interaction provides greater realism, and becomes nearly as reliable when the pinch-sensitive mechanism is used. At present, the haptic channel is not used in industrial virtual reality applications. That is why it can be used for grasping feedback which improves the users' understanding of the grasping situation. This thesis realizes a novel pressure-based tactile feedback at the fingertips. As an alternative, vibro-tactile feedback at the same location is realized as well as visual feedback by the coloring of grasp-involved finger segments. The feedback approaches are also compared within the user study, which reveals that grasping feedback is a requirement to judge grasp status and that tactile feedback improves interaction independent of the used display system. The considerably stronger vibrational tactile feedback can quickly become annoying during interaction. The interaction improvements and hardware enhancements make it possible to interact with virtual objects in a realistic and reliable manner. By addressing realism and reliability, this thesis paves the way for the virtual evaluation of human-object interaction, which is necessary for a broader application of virtual environments in the automotive industry and other domains.}, subject = {Virtuelle Realit{\"a}t}, language = {en} } @phdthesis{Lux, author = {Lux, Christopher}, title = {A Data-Virtualization System for Large Model Visualization}, doi = {10.25643/bauhaus-universitaet.1985}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130725-19855}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {211}, abstract = {Interactive scientific visualizations are widely used for the visual exploration and examination of physical data resulting from measurements or simulations. Driven by technical advancements of data acquisition and simulation technologies, especially in the geo-scientific domain, large amounts of highly detailed subsurface data are generated. The oil and gas industry is particularly pushing such developments as hydrocarbon reservoirs are increasingly difficult to discover and exploit. Suitable visualization techniques are vital for the discovery of the reservoirs as well as their development and production. However, the ever-growing scale and complexity of geo-scientific data sets result in an expanding disparity between the size of the data and the capabilities of current computer systems with regard to limited memory and computing resources. In this thesis we present a unified out-of-core data-virtualization system supporting geo-scientific data sets consisting of multiple large seismic volumes and height-field surfaces, wherein each data set may exceed the size of the graphics memory or possibly even the main memory. Current data sets fall within the range of hundreds of gigabytes up to terabytes in size. Through the mutual utilization of memory and bandwidth resources by multiple data sets, our data-management system is able to share and balance limited system resources among different data sets. We employ multi-resolution methods based on hierarchical octree and quadtree data structures to generate level-of-detail working sets of the data stored in main memory and graphics memory for rendering. The working set generation in our system is based on a common feedback mechanism with inherent support for translucent geometric and volumetric data sets. This feedback mechanism collects information about required levels of detail during the rendering process and is capable of directly resolving data visibility without the application of any costly occlusion culling approaches. A central goal of the proposed out-of-core data management system is an effective virtualization of large data sets. Through an abstraction of the level-of-detail working sets, our system allows developers to work with extremely large data sets independent of their complex internal data representations and physical memory layouts. Based on this out-of-core data virtualization infrastructure, we present distinct rendering approaches for specific visualization problems of large geo-scientific data sets. We demonstrate the application of our data virtualization system and show how multi-resolution data can be treated exactly the same way as regular data sets during the rendering process. An efficient volume ray casting system is presented for the rendering of multiple arbitrarily overlapping multi-resolution volume data sets. Binary space-partitioning volume decomposition of the bounding boxes of the cube-shaped volumes is used to identify the overlapping and non-overlapping volume regions in order to optimize the rendering process. We further propose a ray casting-based rendering system for the visualization of geological subsurface models consisting of multiple very detailed height fields. The rendering of an entire stack of height-field surfaces is accomplished in a single rendering pass using a two-level acceleration structure, which combines a minimum-maximum quadtree for empty-space skipping and sorted lists of depth intervals to restrict ray intersection searches to relevant height fields and depth ranges. Ultimately, we present a unified rendering system for the visualization of entire geological models consisting of highly detailed stacked horizon surfaces and massive volume data. We demonstrate a single-pass ray casting approach facilitating correct visual interaction between distinct translucent model components, while increasing the rendering efficiency by reducing processing overhead of potentially invisible parts of the model. The combination of image-order rendering approaches and the level-of-detail feedback mechanism used by our out-of-core data-management system inherently accounts for occlusions of different data types without the application of costly culling techniques. The unified out-of-core data-management and virtualization infrastructure considerably facilitates the implementation of complex visualization systems. We demonstrate its applicability for the visualization of large geo-scientific data sets using output-sensitive rendering techniques. As a result, the magnitude and multitude of data sets that can be interactively visualized is significantly increased compared to existing approaches.}, subject = {Computer Graphics}, language = {en} } @phdthesis{Fleischmann, author = {Fleischmann, Ewan}, title = {Analysis and Design of Blockcipher Based Cryptographic Algorithms}, doi = {10.25643/bauhaus-universitaet.1983}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130722-19835}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {272}, abstract = {This thesis focuses on the analysis and design of hash functions and authenticated encryption schemes that are blockcipher based. We give an introduction into these fields of research - taking in a blockcipher based point of view - with special emphasis on the topics of double length, double call blockcipher based compression functions. The first main topic (thesis parts I - III) is on analysis and design of hash functions. We start with a collision security analysis of some well known double length blockcipher based compression functions and hash functions: Abreast-DM, Tandem-DM and MDC-4. We also propose new double length compression functions that have elevated collision security guarantees. We complement the collision analysis with a preimage analysis by stating (near) optimal security results for Abreast-DM, Tandem-DM, and Hirose-DM. Also, some generalizations are discussed. These are the first preimage security results for blockcipher based double length hash functions that go beyond the birthday barrier. We then raise the abstraction level and analyze the notion of 'hash function indifferentiability from a random oracle'. So we not anymore focus on how to obtain a good compression function but, instead, on how to obtain a good hash function using (other) cryptographic primitives. In particular we give some examples when this strong notion of hash function security might give questionable advice for building a practical hash function. In the second main topic (thesis part IV), which is on authenticated encryption schemes, we present an on-line authenticated encryption scheme, McOEx, that simultaneously achieves privacy and confidentiality and is secure against nonce-misuse. It is the first dedicated scheme that achieves high standards of security and - at the same time - is on-line computable.}, subject = {Kryptologie}, language = {en} } @phdthesis{Gollub, author = {Gollub, Tim}, title = {Information Retrieval for the Digital Humanities}, doi = {10.25643/bauhaus-universitaet.4673}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220801-46738}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {177}, abstract = {In ten chapters, this thesis presents information retrieval technology which is tailored to the research activities that arise in the context of corpus-based digital humanities projects. The presentation is structured by a conceptual research process that is introduced in Chapter 1. The process distinguishes a set of five research activities: research question generation, corpus acquisition, research question modeling, corpus annotation, and result dissemination. Each of these research activities elicits different information retrieval tasks with special challenges, for which algorithmic approaches are presented after an introduction of the core information retrieval concepts in Chapter 2. A vital concept in many of the presented approaches is the keyquery paradigm introduced in Chapter 3, which represents an operation that returns relevant search queries in response to a given set of input documents. Keyqueries are proposed in Chapter 4 for the recommendation of related work, and in Chapter 5 for improving access to aspects hidden in the long tail of search result lists. With pseudo-descriptions, a document expansion approach is presented in Chapter 6. The approach improves the retrieval performance for corpora where only bibliographic meta-data is originally available. In Chapter 7, the keyquery paradigm is employed to generate dynamic taxonomies for corpora in an unsupervised fashion. Chapter 8 turns to the exploration of annotated corpora, and presents scoped facets as a conceptual extension to faceted search systems, which is particularly useful in exploratory search settings. For the purpose of highlighting the major topical differences in a sequence of sub-corpora, an algorithm called topical sequence profiling is presented in Chapter 9. The thesis concludes with two pilot studies regarding the visualization of (re)search results for the means of successful result dissemination: a metaphoric interpretation of the information nutrition label, as well as the philosophical bodies, which are 3D-printed search results.}, subject = {Information Retrieval}, language = {en} } @phdthesis{Anderka, author = {Anderka, Maik}, title = {Analyzing and Predicting Quality Flaws in User-generated Content: The Case of Wikipedia}, doi = {10.25643/bauhaus-universitaet.1977}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20130709-19778}, school = {Bauhaus-Universit{\"a}t Weimar}, abstract = {Web applications that are based on user-generated content are often criticized for containing low-quality information; a popular example is the online encyclopedia Wikipedia. The major points of criticism pertain to the accuracy, neutrality, and reliability of information. The identification of low-quality information is an important task since for a huge number of people around the world it has become a habit to first visit Wikipedia in case of an information need. Existing research on quality assessment in Wikipedia either investigates only small samples of articles, or else deals with the classification of content into high-quality or low-quality. This thesis goes further, it targets the investigation of quality flaws, thus providing specific indications of the respects in which low-quality content needs improvement. The original contributions of this thesis, which relate to the fields of user-generated content analysis, data mining, and machine learning, can be summarized as follows: (1) We propose the investigation of quality flaws in Wikipedia based on user-defined cleanup tags. Cleanup tags are commonly used in the Wikipedia community to tag content that has some shortcomings. Our approach is based on the hypothesis that each cleanup tag defines a particular quality flaw. (2) We provide the first comprehensive breakdown of Wikipedia's quality flaw structure. We present a flaw organization schema, and we conduct an extensive exploratory data analysis which reveals (a) the flaws that actually exist, (b) the distribution of flaws in Wikipedia, and, (c) the extent of flawed content. (3) We present the first breakdown of Wikipedia's quality flaw evolution. We consider the entire history of the English Wikipedia from 2001 to 2012, which comprises more than 508 million page revisions, summing up to 7.9 TB. Our analysis reveals (a) how the incidence and the extent of flaws have evolved, and, (b) how the handling and the perception of flaws have changed over time. (4) We are the first who operationalize an algorithmic prediction of quality flaws in Wikipedia. We cast quality flaw prediction as a one-class classification problem, develop a tailored quality flaw model, and employ a dedicated one-class machine learning approach. A comprehensive evaluation based on human-labeled Wikipedia articles underlines the practical applicability of our approach.}, subject = {Data Mining}, language = {en} } @phdthesis{Forler, author = {Forler, Christian}, title = {Analysis Design \& Applications of Cryptographic Building Blocks}, publisher = {Shaker Verlag}, doi = {10.25643/bauhaus-universitaet.2376}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20150330-23764}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {213}, abstract = {This thesis deals with the basic design and rigorous analysis of cryptographic schemes and primitives, especially of authenticated encryption schemes, hash functions, and password-hashing schemes. In the last decade, security issues such as the PS3 jailbreak demonstrate that common security notions are rather restrictive, and it seems that they do not model the real world adequately. As a result, in the first part of this work, we introduce a less restrictive security model that is closer to reality. In this model it turned out that existing (on-line) authenticated encryption schemes cannot longer beconsidered secure, i.e. they can guarantee neither data privacy nor data integrity. Therefore, we present two novel authenticated encryption scheme, namely COFFE and McOE, which are not only secure in the standard model but also reasonably secure in our generalized security model, i.e. both preserve full data inegrity. In addition, McOE preserves a resonable level of data privacy. The second part of this thesis starts with proposing the hash function Twister-Pi, a revised version of the accepted SHA-3 candidate Twister. We not only fixed all known security issues of Twister, but also increased the overall soundness of our hash-function design. Furthermore, we present some fundamental groundwork in the area of password-hashing schemes. This research was mainly inspired by the medial omnipresence of password-leakage incidences. We show that the password-hashing scheme scrypt is vulnerable against cache-timing attacks due to the existence of a password-dependent memory-access pattern. Finally, we introduce Catena the first password-hashing scheme that is both memory-consuming and resistant against cache-timing attacks.}, subject = {Kryptologie}, language = {en} } @phdthesis{Schneider, author = {Schneider, Sven}, title = {Sichtbarkeitsbasierte Raumerzeugung - Automatisierte Erzeugung r{\"a}umlicher Konfigurationen in Architektur und St{\"a}dtebau auf Basis sichtbarkeitsbasierter Raumrepr{\"a}sentationen}, doi = {10.25643/bauhaus-universitaet.2590}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20160613-25900}, school = {Bauhaus-Universit{\"a}t Weimar}, pages = {225}, abstract = {Das Erzeugen r{\"a}umlicher Konfigurationen ist eine zentrale Aufgabe im architektonischen bzw. st{\"a}dtebaulichen Entwurfsprozess und hat zum Ziel, eine f{\"u}r Menschen angenehme Umwelt zu schaffen. Der Geometrie der entstehenden R{\"a}ume kommt hierbei eine zentrale Rolle zu, da sie einen großen Einfluss auf das Empfinden und Verhalten der Menschen aus{\"u}bt und nur noch mit großem Aufwand ver{\"a}ndert werden kann, wenn sie einmal gebaut wurde. Die meisten Entscheidungen zur Festlegung der Geometrie von R{\"a}umen werden w{\"a}hrend eines sehr kurzen Zeitraums (Entwurfsphase) getroffen. Fehlentscheidungen die in dieser Phase getroffen werden haben langfristige Auswirkungen auf das Leben von Menschen, und damit auch Konsequenzen auf {\"o}konomische und {\"o}kologische Aspekte. Mittels computerbasierten Layoutsystemen l{\"a}sst sich der Entwurf r{\"a}umlicher Konfigurationen sinnvoll unterst{\"u}tzen, da sie es erm{\"o}glichen, in k{\"u}rzester Zeit eine große Anzahl an Varianten zu erzeugen und zu {\"u}berpr{\"u}fen. Daraus ergeben sich zwei Vorteile. Erstens kann die große Menge an Varianten dazu beitragen, bessere L{\"o}sungen zu finden. Zweitens kann das Formalisieren von Bewertungskriterien zu einer gr{\"o}ßeren Objektivit{\"a}t und Transparenz bei der L{\"o}sungsfindung f{\"u}hren. Um den Entwurf r{\"a}umlicher Konfigurationen optimal zu unterst{\"u}tzen, muss ein Layoutsystem in der Lage sein, ein m{\"o}glichst großes Spektrum an Grundrissvarianten zu erzeugen (Vielfalt); und zahlreiche M{\"o}glichkeiten und Detaillierungsstufen zur Problembeschreibung (Flexibilit{\"a}t), sowie Mittel anzubieten, mit denen sich die Anforderungen an die r{\"a}umliche Konfiguration ad{\"a}quat beschreiben lassen (Relevanz). Bez{\"u}glich Letzterem spielen wahrnehmungs- und nutzungsbezogene Kriterien (wie z. B. Grad an Privatheit, Gef{\"u}hl von Sicherheit, Raumwirkung, Orientierbarkeit, Potenzial zu sozialer Interaktion) eine wichtige Rolle. Die bislang entwickelten Layoutsysteme weisen hinsichtlich Vielfalt, Flexibilit{\"a}t und Relevanz wesentliche Beschr{\"a}nkungen auf, welche auf eine ungeeignete Methode zur Repr{\"a}sentation von R{\"a}umen zur{\"u}ckzuf{\"u}hren sind. Die in einem Layoutsystem verwendeten Raumrepr{\"a}sentationsmethoden bestimmen die M{\"o}glichkeiten zur Formerzeugung und Problembeschreibung wesentlich. Sichtbarkeitsbasierte Raumrepr{\"a}sentationen (Sichtfelder, Sichtachsen, Konvexe R{\"a}ume) eignen sich in besonderer Weise zur Abbildung von R{\"a}umen in Layoutsystemen, da sie einerseits ein umfangreiches Repertoire zur Verf{\"u}gung stellen, um r{\"a}umliche Konfigurationen hinsichtlich wahrnehmungs- und nutzungsbezogener Kriterien zu beschreiben. Andererseits lassen sie sich vollst{\"a}ndig aus der Geometrie der begrenzenden Oberfl{\"a}chen ableiten und sind nicht an bestimmte zur Formerzeugung verwendete geometrische Objekte gebunden. In der vorliegenden Arbeit wird ein Layoutsystem entwickelt, welches auf diesen Raumrepr{\"a}sentationen basiert. Es wird ein Evaluationsmechanismus (EM) entwickelt, welcher es erm{\"o}glicht, beliebige zweidimensionale r{\"a}umliche Konfigurationen hinsichtlich wahrnehmungs- und nutzungsrelevanter Kriterien zu bewerten. Hierzu wurde eine Methodik entwickelt, die es erm{\"o}glicht automatisch Raumbereiche (O-Spaces und P-Spaces) zu identifizieren, welche bestimmte Eigenschaften haben (z.B. sichtbare Fl{\"a}che, Kompaktheit des Sichtfeldes, Tageslicht) und bestimmte Relationen zueinander (wie gegenseitige Sichtbarkeit, visuelle und physische Distanz) aufweisen. Der EM wurde mit Generierungsmechanismen (GM) gekoppelt, um zu pr{\"u}fen, ob dieser sich eignet, um in großen Variantenr{\"a}umen nach geeigneten r{\"a}umlichen Konfigurationen zu suchen. Die Ergebnisse dieser Experimente zeigen, dass die entwickelte Methodik einen vielversprechenden Ansatz zur automatisierten Erzeugung von r{\"a}umlichen Konfigurationen darstellt: Erstens ist der EM vollst{\"a}ndig vom GM getrennt, wodurch es m{\"o}glich ist, verschiedene GM in einem Entwurfssystem zu verwenden und somit den Variantenraum zu vergr{\"o}ßern (Vielfalt). Zweitens erlaubt der EM die Anforderungen an eine r{\"a}umliche Konfiguration flexibel zu beschreiben (unterschiedliche Maßst{\"a}be, unterschiedlicher Detaillierungsgrad). Letztlich erlauben die verwendeten Repr{\"a}sentationsmethoden eine Problembeschreibung vorzunehmen, die stark an der Wirkung des Raumes auf den Menschen orientiert ist (Relevanz). Die in der Arbeit entwickelte Methodik leistet einen wichtigen Beitrag zur Verbesserung evidenzbasierter Entwurfsprozesse, da sie eine Br{\"u}cke zwischen der nutzerorientierten Bewertung von r{\"a}umlichen Konfigurationen und deren Erzeugung schl{\"a}gt.}, subject = {Architektur}, language = {de} } @article{SoebkeLueck, author = {S{\"o}bke, Heinrich and L{\"u}ck, Andrea}, title = {Framing Algorithm-Driven Development of Sets of Objectives Using Elementary Interactions}, series = {Applied System Innovation}, volume = {2022}, journal = {Applied System Innovation}, number = {Volume 5, issue 3, article 49}, publisher = {MDPI}, address = {Basel}, doi = {10.3390/asi5030049}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20220713-46624}, pages = {1 -- 20}, abstract = {Multi-criteria decision analysis (MCDA) is an established methodology to support the decision-making of multi-objective problems. For conducting an MCDA, in most cases, a set of objectives (SOO) is required, which consists of a hierarchical structure comprised of objectives, criteria, and indicators. The development of an SOO is usually based on moderated development processes requiring high organizational and cognitive effort from all stakeholders involved. This article proposes elementary interactions as a key paradigm of an algorithm-driven development process for an SOO that requires little moderation efforts. Elementary interactions are self-contained information requests that may be answered with little cognitive effort. The pairwise comparison of elements in the well-known analytical hierarchical process (AHP) is an example of an elementary interaction. Each elementary interaction in the development process presented contributes to the stepwise development of an SOO. Based on the hypothesis that an SOO may be developed exclusively using elementary interactions (EIs), a concept for a multi-user platform is proposed. Essential components of the platform are a Model Aggregator, an Elementary Interaction Stream Generator, a Participant Manager, and a Discussion Forum. While the latter component serves the professional exchange of the participants, the first three components are intended to be automatable by algorithms. The platform concept proposed has been evaluated partly in an explorative validation study demonstrating the general functionality of the algorithms outlined. In summary, the platform concept suggested demonstrates the potential to ease SOO development processes as the platform concept does not restrict the application domain; it is intended to work with little administration moderation efforts, and it supports the further development of an existing SOO in the event of changes in external conditions. The algorithm-driven development of SOOs proposed in this article may ease the development of MCDA applications and, thus, may have a positive effect on the spread of MCDA applications.}, subject = {Multikriteria-Entscheidung}, language = {en} } @article{Koenig, author = {K{\"o}nig, Reinhard}, title = {Interview on Information Architecture}, series = {Swiss Architecture in the Moving Image}, journal = {Swiss Architecture in the Moving Image}, doi = {10.25643/bauhaus-universitaet.2507}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20180422-25078}, pages = {151 -- 154}, abstract = {Interview on Information Architecture}, subject = {Architektur}, language = {en} }