@unpublished{WetzsteinBimber2006, author = {Wetzstein, Gordon and Bimber, Oliver}, title = {A Generalized Approach to Radiometric}, doi = {10.25643/bauhaus-universitaet.762}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-7625}, year = {2006}, abstract = {We propose a novel method that applies the light transport matrix for performing an image-based radiometric compensation which accounts for all possible types of light modulation. For practical application the matrix is decomposed into clusters of mutually influencing projector and camera pixels. The compensation is modeled as a linear system that can be solved with respect to the projector patterns. Precomputing the inverse light transport in combination with an efficient implementation on the GPU makes interactive compensation rates possible. Our generalized method unifies existing approaches that address individual problems. Based on examples, we show that it is possible to project corrected images onto complex surfaces such as an inter-reflecting statuette, glossy wallpaper, or through highly-refractive glass. Furthermore, we illustrate that a side-effect of our approach is an increase in the overall sharpness of defocused projections.}, subject = {Association for Computing Machinery / Special Interest Group on Graphics}, language = {en} } @techreport{BrunsBimber2007, author = {Bruns, Erich and Bimber, Oliver}, title = {Adaptive Training of Video Sets for Image Recognition on Mobile Phones}, doi = {10.25643/bauhaus-universitaet.822}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8223}, year = {2007}, abstract = {We present an enhancement towards adaptive video training for PhoneGuide, a digital museum guidance system for ordinary camera-equipped mobile phones. It enables museum visitors to identify exhibits by capturing photos of them. In this article, a combined solution of object recognition and pervasive tracking is extended to a client-server-system for improving data acquisition and for supporting scale-invariant object recognition.}, subject = {Objektverfolgung}, language = {en} } @techreport{AmanoBimberGrundhoefer2010, author = {Amano, Toshiyuki and Bimber, Oliver and Grundh{\"o}fer, Anselm}, title = {Appearance Enhancement for Visually Impaired with Projector Camera Feedback}, doi = {10.25643/bauhaus-universitaet.1411}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20100106-14974}, year = {2010}, abstract = {Visually impaired is a common problem for human life in the world wide. The projector-based AR technique has ability to change appearance of real object, and it can help to improve visibility for visually impaired. We propose a new framework for the appearance enhancement with the projector camera system that employed model predictive controller. This framework enables arbitrary image processing such as photo-retouch software in the real world and it helps to improve visibility for visually impaired. In this article, we show the appearance enhancement result of Peli's method and Wolffshon's method for the low vision, Jefferson's method for color vision deficiencies. Through experiment results, the potential of our method to enhance the appearance for visually impaired was confirmed as same as appearance enhancement for the digital image and television viewing.}, subject = {Maschinelles Sehen}, language = {en} } @techreport{GrosseBimber2008, author = {Grosse, Max and Bimber, Oliver}, title = {Coded Aperture Projection}, doi = {10.25643/bauhaus-universitaet.1234}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20080227-13020}, year = {2008}, abstract = {In computer vision, optical defocus is often described as convolution with a filter kernel that corresponds to an image of the aperture being used by the imaging device. The degree of defocus correlates to the scale of the kernel. Convolving an image with the inverse aperture kernel will digitally sharpen the image and consequently compensate optical defocus. This is referred to as deconvolution or inverse filtering. In frequency domain, the reciprocal of the filter kernel is its inverse, and deconvolution reduces to a division. Low magnitudes in the Fourier transform of the aperture image, however, lead to intensity values in spatial domain that exceed the displayable range. Therefore, the corresponding frequencies are not considered, which then results in visible ringing artifacts in the final projection. This is the main limitation of previous approaches, since in frequency domain the Gaussian PSF of spherical apertures does contain a large fraction of low Fourier magnitudes. Applying only small kernel scales will reduce the number of low Fourier magnitudes (and consequently the ringing artifacts) -- but will also lead only to minor focus improvements. To overcome this problem, we apply a coded aperture whose Fourier transform has less low magnitudes initially. Consequently, more frequencies are retained and more image details are reconstructed.}, subject = {Association for Computing Machinery / Special Interest Group on Graphics}, language = {en} } @article{GrundhoeferSeegerHaentschetal.2007, author = {Grundh{\"o}fer, Anselm and Seeger, Manja and H{\"a}ntsch, Ferry and Bimber, Oliver}, title = {Coded Projection and Illumination for Television Studios}, organization = {Bimber, Fak. M, BUW}, doi = {10.25643/bauhaus-universitaet.800}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8005}, year = {2007}, abstract = {We propose the application of temporally and spatially coded projection and illumination in modern television studios. In our vision, this supports ad-hoc re-illumination, automatic keying, unconstrained presentation of moderation information, camera-tracking, and scene acquisition. In this paper we show how a new adaptive imperceptible pattern projection that considers parameters of human visual perception, linked with real-time difference keying enables an in-shot optical tracking using a novel dynamic multi-resolution marker technique}, subject = {Association for Computing Machinery / Special Interest Group on Graphics}, language = {en} } @article{BimberGrundhoeferZollmannetal.2006, author = {Bimber, Oliver and Grundh{\"o}fer, Anselm and Zollmann, Stefanie and Kolster, Daniel}, title = {Digital Illumination for Augmented Studios}, doi = {10.25643/bauhaus-universitaet.857}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8576}, year = {2006}, abstract = {Virtual studio technology plays an important role for modern television productions. Blue-screen matting is a common technique for integrating real actors or moderators into computer generated sceneries. Augmented reality offers the possibility to mix real and virtual in a more general context. This article proposes a new technological approach for combining real studio content with computergenerated information. Digital light projection allows a controlled spatial, temporal, chrominance and luminance modulation of illumination - opening new possibilities for TV studios.}, subject = {Studiotechnik}, language = {en} } @techreport{GrundhoeferSeegerHaentschetal.2007, author = {Grundh{\"o}fer, Anselm and Seeger, Manja and H{\"a}ntsch, Ferry and Bimber, Oliver}, title = {Dynamic Adaptation of Projected Imperceptible Codes}, doi = {10.25643/bauhaus-universitaet.816}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-8168}, year = {2007}, abstract = {In this paper we present a novel adaptive imperceptible pattern projection technique that considers parameters of human visual perception. A coded image that is invisible for human observers is temporally integrated into the projected image, but can be reconstructed by a synchronized camera. The embedded code is dynamically adjusted on the fly to guarantee its non-perceivability and to adapt it to the current camera pose. Linked with real-time flash keying, for instance, this enables in-shot optical tracking using a dynamic multi-resolution marker technique. A sample prototype is realized that demonstrates the application of our method in the context of augmentations in television studios.}, subject = {Association for Computing Machinery / Special Interest Group on Graphics}, language = {en} } @techreport{GrundhoeferBimber2008, author = {Grundh{\"o}fer, Anselm and Bimber, Oliver}, title = {Dynamic Bluescreens}, doi = {10.25643/bauhaus-universitaet.1233}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20080226-13016}, year = {2008}, abstract = {Blue screens and chroma keying technology are essential for digital video composition. Professional studios apply tracking technology to record the camera path for perspective augmentations of the original video footage. Although this technology is well established, it does not offer a great deal of flexibility. For shootings at non-studio sets, physical blue screens might have to be installed, or parts have to be recorded in a studio separately. We present a simple and flexible way of projecting corrected keying colors onto arbitrary diffuse surfaces using synchronized projectors and radiometric compensation. Thereby, the reflectance of the underlying real surface is neutralized. A temporal multiplexing between projection and flash illumination allows capturing the fully lit scene, while still being able to key the foreground objects. In addition, we embed spatial codes into the projected key image to enable the tracking of the camera. Furthermore, the reconstruction of the scene geometry is implicitly supported.}, subject = {Association for Computing Machinery / Special Interest Group on Graphics}, language = {en} } @techreport{BrunsBrombachZeidleretal.2005, author = {Bruns, Erich and Brombach, Benjamin and Zeidler, Thomas and Bimber, Oliver}, title = {Enabling Mobile Phones To Support Large-Scale Museum Guidance}, doi = {10.25643/bauhaus-universitaet.677}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20111215-6777}, year = {2005}, abstract = {We present a museum guidance system called PhoneGuide that uses widespread camera equipped mobile phones for on-device object recognition in combination with pervasive tracking. It provides additional location- and object-aware multimedia content to museum visitors, and is scalable to cover a large number of museum objects.}, subject = {Objektverfolgung}, language = {en} } @techreport{ExnerBrunsKurzetal.2009, author = {Exner, David and Bruns, Erich and Kurz, Daniel and Grundh{\"o}fer, Anselm and Bimber, Oliver}, title = {Fast and Reliable CAMShift Tracking}, organization = {JP AUgmented Reality, Bauhaus-Universit{\"a}t Weimar}, doi = {10.25643/bauhaus-universitaet.1410}, url = {http://nbn-resolving.de/urn:nbn:de:gbv:wim2-20091217-14962}, year = {2009}, abstract = {CAMShift is a well-established and fundamental algorithm for kernel-based visual object tracking. While it performs well with objects that have a simple and constant appearance, it is not robust in more complex cases. As it solely relies on back projected probabilities it can fail in cases when the object's appearance changes (e.g. due to object or camera movement, or due to lighting changes), when similarly colored objects have to be re-detected or when they cross their trajectories. We propose extensions to CAMShift that address and resolve all of these problems. They allow the accumulation of multiple histograms to model more complex object appearance and the continuous monitoring of object identi- ties to handle ambiguous cases of partial or full occlusion. Most steps of our method are carried out on the GPU for achieving real-time tracking of multiple targets simultaneously. We explain an ecient GPU implementations of histogram generation, probability back projection, im- age moments computations, and histogram intersection. All of these techniques make full use of a GPU's high parallelization.}, subject = {Bildverarbeitung}, language = {en} }