# This BibTeX File has been generated by
# the Typo3 extension 'Sixpack-4-T3 by Sixten Boeck'
#
# URL:
# Date: 12/10/2016
# Non-Standard BibTex fields are included.
# state: 0 = published, 1 = accepted, 2 = submitted, 3 = to be published // if missing, published is assumed
# extern,deleted,hidden: 0 = false, 1 = true // if missing, false is assumed
# link format: Title Url // separated by a whitespace
@article{KHD2014,
author = { Klimke, Jan
Hagedorn, Benjamin
Döllner, Jürgen },
title = { Scalable Multi-Platform Distribution of Spatial 3D Contents },
journal = { International Journal of 3-D Information Modeling },
year = { 2014 },
volume = { 3 },
number = { 3 },
pages = { 35-49 },
month = { 7 },
abstract = { Virtual 3D city models provide powerful user interfaces for communication of 2D and 3D geoinformation. Providing high quality visualization of massive 3D geoinformation in a scalable, fast, and cost efficient manner is still a challenging task. Especially for mobile and web-based system environments, software and hardware configurations of target systems differ significantly. This makes it hard to provide fast, visually appealing renderings of 3D data throughout a variety of platforms and devices. Current mobile or web-based solutions for 3D visualization usually require raw 3D scene data such as triangle meshes together with textures delivered from server to client, what makes them strongly limited in terms of size and complexity of the models they can handle. This paper introduces a new approach for provisioning of massive, virtual 3D city models on different platforms namely web browsers, smartphones or tablets, by means of an interactive map assembled from artificial oblique image tiles. The key concept is to synthesize such images of a virtual 3D city model by a 3D rendering service in a preprocessing step. This service encapsulates model handling and 3D rendering techniques for high quality visualization of massive 3D models. By generating image tiles using this service, the 3D rendering process is shifted from the client side, which provides major advantages: a The complexity of the 3D city model data is decoupled from data transfer complexity b the implementation of client applications is simplified significantly as 3D rendering is encapsulated on server side c 3D city models can be easily deployed for and used by a large number of concurrent users, leading to a high degree of scalability of the overall approach. All core 3D rendering techniques are performed on a dedicated 3D rendering server, and thin-client applications can be compactly implemented for various devices and platforms. },
keywords = { 3D City Models, CityGML, slippy Map, Olbique Views, Prvisioning },
url = { http://dl.acm.org/citation.cfm?id=2738648 },
editor = { U. Isikdag },
publisher = { IGI Publishing Hershey, PA, USA },
booktitle = { International Journal of 3-D Information Modeling },
organization = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/People/jklimke/klimke_2014_ij3dim.pdf },
issn = { 2156-1710 },
doi = { 10.4018/ij3dim.2014070103 },
sorting = { 16 }
}
@article{Klimke2012a,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Service-oriented Visualization of Virtual 3D City Models },
journal = { Directions Magazine },
year = { 2012 },
abstract = { Virtual 3D city models can effectively communicate complex 2D and 3D geospatial data. Myriad applications, from urban planning to solar potential of roof surfaces to noise pollution, can be modeled and visualized. However, massive amounts of data need to be processed. Authors Jan Klimke and Jürgen Döllner of the Hasso-Plattner-Institut, University of Potsdam, Germany detail a new, service-oriented approach that may solve some of the challenges of visualizing 3D data. },
note = { http://www.directionsmag.com/articles/service-oriented-visualization-of-virtual-3d-city-models/226560 },
url = { http://www.directionsmag.com/articles/service-oriented-visualization-of-virtual-3d-city-models/226560 },
howpublished = { Online Magazin Article },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2012/KD2012a/directionsmag.pdf },
sorting = { 4352 }
}
@article{HKHD11,
author = { Hildebrandt, Dieter and Klimke, Jan and Hagedorn, Benjamin and Döllner, Jürgen },
title = { Service-Oriented Interactive 3D Visualization of Massive 3D City Models on Thin Clients },
journal = { 2nd Int. Conference on Computing for Geospatial Research & Application COM.Geo 2011 },
year = { 2011 },
month = { 5 },
abstract = { Virtual 3D city models serve as integration platforms for complex geospatial and georeferenced information and as medium for effctive communication of spatial information. In this paper, we present a system architecture for service-oriented, interactive 3D visualization of massive 3D city models on thin clients such as mobile phones and tablets. It is based on high performance, server-side 3D rendering of extended cube maps, which are interactively visualized by corresponding 3D thin clients. As key property, the complexity of the cube map data transmitted between server and client does not depend on the model's complexity. In addition, the system allows the integration of thematic raster and vector geodata into the visualization process. Users have extensive control over the contents and styling of the visual representations. The approach provides a solution for safely, robustly distributing and interactively presenting massive 3D city models. A case study related to city marketing based on our prototype implementation shows the potentials of both server-side 3D rendering and fully interactive 3D thin clients on mobile phones. },
url = { dl.acm.org/authorize?432087 },
booktitle = { COM.Geo '11 Proceedings of the 2nd International Conference on Computing for Geospatial Research & Applications },
project = { NFG HPI },
isbn = { 978-1-4503-0681-2 },
doi = { 10.1145/1999320.1999326 },
sorting = { 1024 }
}
@inproceedings{KHTD2014,
author = { Klimke, Jan and Hagedorn, Benjamin and Trapp, Matthias and Döllner, Jürgen },
title = { Web-based and Mobile Provisioning of Virtual 3D Reconstructions },
year = { 2014 },
pages = { 17--28 },
month = { 5 },
abstract = { Communication of cultural heritage by means of digital information systems has been gaining more and more importance over recent years. Interactive virtual 3D applications enable users to explore 3D virtual reconstructions in real-time, to directly interact with the contained digital cultural heritage artifacts, and to obtain insights into this data. Nevertheless, these artifacts are usually very detailed and complex 3D models that are hard to handle for end-user systems. This paper presents the concept and a prototypical implementation of an image-based, web-based approach for the communication of digital cultural heritage and its provisioning for the Web and mobile devices by the example of the project Colonia3D – a high-detail, virtual reconstruction and high-detail 3D city model of Roman Cologne. Through this web-based and mobile provisioning, complex digital reconstructions can be used, e.g., on-site to match local findings and reconstructions. },
editor = { R. Franken-Wendelstorf and E. Lindinger and J. Sieck },
publisher = { Werner Hülsbusch Verlag },
chapter = { 2 },
booktitle = { Tagungsband der 12. Konferenz Kultur und Informatik: Reality and Virtuality },
isbn = { 978-3-86488-064-3 },
sorting = { 2048 }
}
@inproceedings{KHD2012,
author = { Klimke, Jan
Hagedorn, Benjamin
Döllner, Jürgen },
title = { A Service-Oriented Platform for Interactive 3D Web Mapping },
year = { 2012 },
month = { 11 },
abstract = { Design, implementation, and operation of interactive 3D map services are faced with a large number of challenges including (a) processing and integration of massive amounts of heterogeneous and distributed 2D and 3D geodata such as terrain models, buildings models, and thematic georeferenced data, (b) assembling, styling, and rendering 3D map contents according to application requirements and design principles, and (c) interactive provisioning of created 3D maps on mobile devices and thin clients as well as their integration as third-party components into domain-specific web and information systems. This paper discusses concept and implementation of a service-oriented platform that addresses these major requirements of 3D web mapping systems. It is based on a separation of concerns for data management, 3D rendering, application logic, and user interaction. The main idea is to divide 3D rendering process into two stages. In the first stage, at the server side, we construct an image-based, omni-directional approximation of the 3D scene by means of multi-layered virtual 3D panoramas; in the second stage, at the client side, we interactively reconstruct the 3D scene based on the panorama. We demonstrate the prototype implementation for real-time 3D rendering service and related iOS 3D client applications. In our case study, we show how to interactively visualize a complex, large-scale 3D city model based on our service-oriented platform. },
keywords = { Service-oriented 3D Mapping, 3D Map Creation, 3D Map Delivery, 3D Map Styling, 3D City Models },
booktitle = { Proceedings of the Symposium of Service-oriented Mapping },
project = { HPI },
sorting = { 8 }
}
@inproceedings{DHK2012,
author = { Döllner, Jürgen and Hagedorn, Benjamin and Klimke, Jan },
title = { Server-Based Rendering of Large 3D Scenes for Mobile Devices Using G-Buffer Cube Maps },
year = { 2012 },
pages = { 97-100 },
month = { 8 },
abstract = { Large virtual 3D scenes play a major role in growing number of applications, systems, and technologies to effectively communicate complex spatial information. Their web-based provision, in particular on mobile devices, represents a key challenge for system and application development. In contrast to approaches based on streaming 3D scene data to clients, our approach splits 3D rendering into two processes: A server process is responsible for realtime rendering of virtual panoramas, represented by G-buffer cube maps, for a requested camera setting. The client reconstruction process uses these cube maps to reconstruct the 3D scene and allows users to operate on and interact with that representation. The key properties of this approach include that (a) the complexity of transmitted data not depend on the 3D scene’s complexity; (b) 3D rendering can take place within a controlled and a-priori known server environment; (c) crucial 3D model data never leaves the server environment; and (d) the clients can flexibly extend the 3D cube map viewer by adding both local 3D models and specialized 3D operations. },
booktitle = { Web3D '12 Proceedings of the 17th International Conference on 3D Web Technology },
project = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2012/DHK2012/paper.pdf },
isbn = { 978-1-4503-1432-9 },
doi = { 10.1145/2338714.2338729 },
link1 = { http://dl.acm.org/citation.cfm?id=2338729 },
sorting = { 64 }
}
@inproceedings{Klimke2012b,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Datenintegration in dienstbasierte 3D-Geovisualisierungssysteme für mobile Geräte },
year = { 2012 },
booktitle = { Tagungsbände der 32. Wissenschaftlich-Technischen Jahrestagung der DGPF },
project = { HPI },
sorting = { 4864 },
state = { 1 }
}
@inproceedings{KHD12,
author = { Klimke, Jan and Hagedorn, Benjamin and D{\"o}llner, J{\"u}rgen },
title = { A Service-Based Concept for Camera Control in 3D Geovirtual Environments },
year = { 2012 },
abstract = { 3D geovirtual environments (3D GeoVEs) such as virtual 3D city models serve as integration platforms for complex geospatial information and facilitate effective use and communication of that information. Recent developments towards standards and service-based, interactive 3D geovisualization systems enable the large-scale distribution of 3D GeoVEs also by thin client applications that work on mobile devices or in web browsers. To construct such systems, 3D portrayal services can be used as building blocks for service-based rendering. Service-based approaches for 3D user interaction, however, have not been formalized and specified to a similar degree. In this paper, we present a concept for service-based 3D camera control as a key element of 3D user interaction used to explore and manipulate 3D GeoVEs and their objects. It is based on the decomposition of 3D user interaction functionality into a set of services that can be flexibly combined to build automated, assisting, and application-specific 3D user interaction tools, which fit into service-oriented architectures of GIS and SDI based IT solutions. We discuss 3D camera techniques, categories of 3D camera tasks, and derive a collection of general-purpose 3D interaction services. We also explain how to eficiently compose these services and discuss their impact on the architecture of service-based visualization systems. Furthermore, we outline an example of a distributed 3D geovisualization system that shows how the concepts can be applied applications based on virtual 3D city models. },
booktitle = { Proceedings of the 7th 3D GeoInfo Conference 2012 },
project = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2012/KHD12/300878_1_En_6_Chapter_OnlinePDF.pdf },
sorting = { 2816 }
}
@inproceedings{Klimke2011c,
author = { Klimke, Jan and Hildebrandt, Dieter and Hagedorn, Benjamin and Döllner, Jürgen },
title = { Integrating 3D Data in Service-based Visualization Systems },
year = { 2011 },
abstract = { Georeferenced data is available from a wide range of sources, e.g., Directory Services, Sensor Observation Services, Web Feature Services or even proprietary interfaces. Many of the data originating from an Internet of things will be threedimensional representing outdoor as well as indoor geographic features and their properties. Based on this data, its integration, and its visualization totally new applications and systems could be designed and implemented supporting various applications domains. Recent work in the area of service-based 3D visualization enables high-quality visualization of complex 3D geodata, e.g., 3D city models and 3D indoor building models, on thin clients as well as mobile devices such as smartphones and tablets. This work uses a service-based, image-based visualization approach that decouples the server-side resource-intensive management and rendering of complex, massive 3D geodata from client-side display functionalities: A
Web View Service provides image representations of a 3D scene; these images, which can contain different types of information per pixel, are transmitted to a client application that can reconstruct a 3D representation of this scene. – In this talk, we will describe how to combine 3D geodata originating from the Internet of Things with this service-based approach in a way that allows for the interactive exploration of and interaction with 3D worlds and objects of interest. In detail, this 3D geodata can be integrated into the visualization process a) at the rendering stage of a portrayal service, b) through an image post processing step or c) in the client application itself. Moreover, this data can be visually represented directly by modifying the appearance of existing features, e.g., for visualizing measurements, or indirectly by introducing additional objects, e.g., icons, into the 3D scene. We will discuss advantages and disadvantages of these different approaches for implementing visualization applications using live geodata sources. },
publisher = { ACM },
booktitle = { COM.Geo '11 Proceedings of the 2nd International Conference on Computing for Geospatial Research & Applications },
project = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2012/KD2012a/Abstract_ComGeoWorkshop_klimke.pdf,fileadmin/user_upload/fachgebiete/doellner/publications/2012/KD2012a/Jan_Klimke_Integrating_3D_Data_in_Service-based.pdf },
isbn = { 978-1-4503-0681-2 },
doi = { 10.1145/1999320.1999395 },
sorting = { 64 }
}
@inproceedings{KD10b,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Combining Synchronous and Asynchronous
Collaboration within 3D City Models },
year = { 2010 },
abstract = { This paper presents and approach for combining spatially
distributed synchronous and asynchronous collaboration within 3D city models. Applications use these models as additional communication medium - beside traditional ones like chat, audio, and video conferences - to facilitate communication of georeferenced and geospatial information.
Among many challenges collaboration tools should support both the communication with other collaborators and their awareness of the current collaboration context. To support knowledge construction and gathering, we have designed a collaboration tool that facilitates (a) creation of comments that have 3D references to the virtual 3D city model and (b) information about the context in which these comments are reated. The synchronous collaboration in connection with the creation of non volatile, precisely georeferenced units of information allows users a comprehensible form of cooperation in spatially distributed settings. In our approach media breaks are avoided by using a single 3D user interface
to avoid media switches and disruption in working processes. By visualizing the information associated with a geographic feature or 3D geometry of a virtual 3D city model a coherent representation of content and reference is possible. },
editor = { Fabrikant, S. and Reichenbacher, T. and van Kreveld, M. and Schlieder, C. },
publisher = { Springer },
series = { LNCS },
booktitle = { Geographic Information Science },
project = { HPI },
sorting = { 10240 },
state = { 1 }
}
@inproceedings{KD10a,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Geospatial Annotations for 3D Environments and their WFS-based Implementation },
year = { 2010 },
pages = { 379-397 },
abstract = { Collaborative geovisualization provides effective means to communicate spatial information among a group of users. Annotations as one key element of collaborative geovisualization systems enable comprehension of collaboration processes and support time-shifted communication. By annotations we refer to user-generated information such as remarks, comments, findings and any other information related to the 3D environment. They have to be efficiently modeled, stored and visualized while precisely retaining their spatial reference and creation context. Existing models for an-notations generally do not fully support spatial references and, therefore, do not fully take advantage of the spatial relationships associated with annotations. This paper presents a GML-based data model for geospatial annotations that explicitly incorporates spatial references and allows different types of annotations to be stored together with their context of creation. With this approach annotations can be represented as first-class spatial features. Consequently, annotations can be seamlessly integrated into their 3D environment and the author's original intention and message can be better expressed and understood. An OGC Web Feature Service is used as standardized interface for storage and retrieval of annotations, which assures data interoperability with existing geodata infrastructures. We have identi-fied three types of annotation subjects, namely geographic features, geometry, and scene views, represented by their corresponding 2D/3D geometry. The model also defines a point-based approximation for complex geometry, such that annotations can also be used by client application with limited abilities regarding display size, bandwidth or geometry handling. Furthermore we extended our model by annotations that can contain 3D geometry besides textual information. In this way the expressiveness of annotations can be further enhanced for communicating spatial relationships such as distances or arrangements of geographic features. },
editor = { Painho, M. and Santos, M.Y. and Pundt, H. },
publisher = { Springer },
series = { Lecture Notes in Geoinformation and Cartography },
booktitle = { Geospatial Thinking },
project = { HPI },
isbn = { 978-3-642-12326-2 },
issn = { 1863-2246 },
doi = { 10.1007/978-3-642-12326-9_20 },
sorting = { 9984 }
}
@inproceedings{KD10a,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Geospatial Annotations for 3D Environments and their WFS-based Implementation },
year = { 2010 },
pages = { 379-397 },
abstract = { Collaborative geovisualization provides effective means to communicate spatial information among a group of users. Annotations as one key element of collaborative geovisualization systems enable comprehension of collaboration processes and support time-shifted communication. By annotations we refer to user-generated information such as remarks, comments, findings and any other information related to the 3D environment. They have to be efficiently modeled, stored and visualized while precisely retaining their spatial reference and creation context. Existing models for an-notations generally do not fully support spatial references and, therefore, do not fully take advantage of the spatial relationships associated with annotations. This paper presents a GML-based data model for geospatial annotations that explicitly incorporates spatial references and allows different types of annotations to be stored together with their context of creation. With this approach annotations can be represented as first-class spatial features. Consequently, annotations can be seamlessly integrated into their 3D environment and the author's original intention and message can be better expressed and understood. An OGC Web Feature Service is used as standardized interface for storage and retrieval of annotations, which assures data interoperability with existing geodata infrastructures. We have identi-fied three types of annotation subjects, namely geographic features, geometry, and scene views, represented by their corresponding 2D/3D geometry. The model also defines a point-based approximation for complex geometry, such that annotations can also be used by client application with limited abilities regarding display size, bandwidth or geometry handling. Furthermore we extended our model by annotations that can contain 3D geometry besides textual information. In this way the expressiveness of annotations can be further enhanced for communicating spatial relationships such as distances or arrangements of geographic features. },
editor = { Painho, M. and Santos, M.Y. and Pundt, H. },
publisher = { Springer },
series = { Lecture Notes in Geoinformation and Cartography },
booktitle = { Geospatial Thinking },
project = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2010/KD10a/GeoCommentsAGILE2010.pdf },
isbn = { 978-3-642-12326-2 },
issn = { 1863-2246 },
doi = { 10.1007/978-3-642-12326-9_20 },
sorting = { 5376 }
}
@inproceedings{KD10b,
author = { Klimke, Jan and Döllner, Jürgen },
title = { Combining Synchronous and Asynchronous
Collaboration within 3D City Models },
year = { 2010 },
number = { 6292 },
abstract = { This paper presents and approach for combining spatially
distributed synchronous and asynchronous collaboration within 3D city models. Applications use these models as additional communication medium - beside traditional ones like chat, audio, and video conferences - to facilitate communication of georeferenced and geospatial information.
Among many challenges collaboration tools should support both the communication with other collaborators and their awareness of the current collaboration context. To support knowledge construction and gathering, we have designed a collaboration tool that facilitates (a) creation of comments that have 3D references to the virtual 3D city model and (b) information about the context in which these comments are reated. The synchronous collaboration in connection with the creation of non volatile, precisely georeferenced units of information allows users a comprehensible form of cooperation in spatially distributed settings. In our approach media breaks are avoided by using a single 3D user interface
to avoid media switches and disruption in working processes. By visualizing the information associated with a geographic feature or 3D geometry of a virtual 3D city model a coherent representation of content and reference is possible. },
editor = { Fabrikant, S.I. and Reichenbacher, T. and van Kreveld, M. and Schlieder, C. },
publisher = { Springer },
series = { LNCS },
booktitle = { Sixth International Conference, GIScience 2010, Zürich, Switzerland, Sep. 14-17 2010, Proceedings },
project = { HPI },
files = { fileadmin/user_upload/fachgebiete/doellner/publications/2010/KD10b/PaperGIScience-Draft.pdf },
sorting = { 3840 }
}
@techreport{K2011,
author = { Klimke, Jan },
title = { Towards 3D Interaction Services for 3D Geovirtual Environments },
year = { 2011 },
abstract = { Service-oriented architectures provide a flexible approach for 3D geodata access, processing, management and visualization. Services for 3D visualization allow for reliable, high quality visualization on client platforms that were otherwise not capable of storing, mapping, or rendering of 3D geodata. This extends the range of applications using 3D visualization of such data. While approaches for interactive visualization on lightweight clients exist, especially camera interaction remains a challenging task. Thin
client applications do not have the information necessary for user interaction that provides, e.g., collision detection with scene object or assisting 3D camera control.
This report provides an overview over the current status of my work in the area of interactive, service-based systems for 3D geovisualization. It outlines progress made in creating useful camera services delivering camera specifications derived from 3D geometry and semantic city model data. Further, my current research in progress is outlined. },
project = { HPI },
sorting = { 8 }
}