MetaVis: Visual Exploration in Visualization Space

Visualization is the transformation of data into readily comprehensible images, and has proven to be an indispensable part of the discovery process in many fields of contemporary science and engineering. A vast number of different visualization methods have been developed, some very general, some only applicable to specific types of data. This makes it increasingly difficult to chose between the many alternatives given a certain task. This project is devoted to the study of the space of visualizations itself and aims to develop means for its interactive exploration. A key realization of our approach is that by regarding the visualization process as a complex phenomenon, it becomes amenable to parameter space analysis techniques. Within the scope of this project, methods for systematically structuring this visualization space will be devised and embedded in an interactive web-based framework. In particular, we plan to investigate techniques to automatically infer the performance of individual visualizations for particular tasks based on a sparse user classification and to provide an environment for interactively presenting and refining these measures. The MetaVis project addresses an important challenge which affects visualization experts as well as users of visualization methods in their daily work.

Publications

2020

    [PDF] [DOI] [Bibtex]
    @article{Kristiansen-2020-VIV,
    author = {Yngve Sekse Kristiansen and Stefan Bruckner},
    title = {Visception: An Interactive Visual Framework for Nested Visualization Design},
    journal = {Computers \& Graphics},
    volume = {92},
    pages = {13--27},
    keywords = {information visualization, nested visualizations, nesting},
    doi = {10.1016/j.cag.2020.08.007},
    abstract = {Nesting is the embedding of charts into the marks of another chart. Related to principles such as Tufte’s rule of utilizing micro/macro readings, nested visualizations have been employed to increase information density, providing compact representations of multi-dimensional and multi-typed data entities. Visual authoring tools are becoming increasingly prevalent, as they make visualization technology accessible to non-expert users such as data journalists, but existing frameworks provide no or only very limited functionality related to the creation of nested visualizations. In this paper, we present an interactive visual approach for the flexible generation of nested multilayer visualizations. Based on a hierarchical representation of nesting relationships coupled with a highly customizable mechanism for specifying data mappings, we contribute a flexible framework that enables defining and editing data-driven multi-level visualizations. As a demonstration of the viability of our framework, we contribute a visual builder for exploring, customizing and switching between different designs, along with example visualizations to demonstrate the range of expression. The resulting system allows for the generation of complex nested charts with a high degree of flexibility and fluidity using a drag and drop interface.},
    year = {2020},
    pdf = "pdfs/Kristiansen-2020-VIV.pdf",
    thumbnails = "images/Kristiansen-2020-VIV.png",
    images = "images/Kristiansen-2020-VIV.jpg",
    project = "MetaVis"
    }
    [PDF] [DOI] [Bibtex]
    @article{StormFurru-2020-VGT,
    author = {Syver Storm-Furru and Stefan Bruckner},
    title = {VA-TRAC: Geospatial Trajectory Analysis for Monitoring, Identification, and Verification in Fishing Vessel Operations},
    journal = {Computer Graphics Forum},
    volume = {39},
    number = {3},
    pages = {101--114},
    keywords = {visual analytics, fisheries, monitoring},
    doi = {10.1111/cgf.13966},
    abstract = {In order to ensure sustainability, fishing operations are governed by many rules and regulations that restrict the use of certain techniques and equipment, specify the species and size of fish that can be harvested, and regulate commercial activities based on licensing schemes. As the world’s second largest exporter of fish and seafood products, Norway invests a significant amount of effort into maintaining natural ecosystem dynamics by ensuring compliance with its constantly evolving sciencebased regulatory body. This paper introduces VA-TRAC, a geovisual analytics application developed in collaboration with the Norwegian Directorate of Fisheries in order to address this complex task. Our approach uses automatic methods to identify possible catch operations based on fishing vessel trajectories, embedded in an interactive web-based visual interface used to explore the results, compare them with licensing information, and incorporate the analysts’ domain knowledge into the decision making process. We present a data and task analysis based on a close collaboration with domain experts, and the design and implementation of VA-TRAC to address the identified requirements.},
    year = {2020},
    pdf = "pdfs/StormFurru-2020-VGT.pdf",
    thumbnails = "images/StormFurru-2020-VGT.png",
    images = "images/StormFurru-2020-VGT.jpg",
    project = "MetaVis"
    }
    [PDF] [DOI] [VID] [YT] [Bibtex]
    @article{Trautner-2020-SunspotPlots,
    author = {Trautner, T. and Bolte, F. and Stoppel, S. and Bruckner, S.},
    title = {Sunspot Plots: Model-based Structure Enhancement for Dense Scatter Plots},
    journal = {Computer Graphics Forum},
    volume = {39},
    number = {3},
    pages = {551--563},
    keywords = {information visualization, scatterplots, kernel density estimation},
    doi = {10.1111/cgf.14001},
    abstract = {Scatter plots are a powerful and well-established technique for visualizing the relationships between two variables as a collection of discrete points. However, especially when dealing with large and dense data, scatter plots often exhibit problems such as overplotting, making the data interpretation arduous. Density plots are able to overcome these limitations in highly populated regions, but fail to provide accurate information of individual data points. This is particularly problematic in sparse regions where the density estimate may not provide a good representation of the underlying data. In this paper, we present sunspot plots, a visualization technique that communicates dense data as a continuous data distribution, while preserving the discrete nature of data samples in sparsely populated areas. We furthermore demonstrate the advantages of our approach on typical failure cases of scatter plots within synthetic and real-world data sets and validate its effectiveness in a user study.},
    year = {2020},
    pdf = "pdfs/Trautner_2020_SunspotPlots_PDF.pdf",
    thumbnails = "images/Trautner_2020_SunspotPlots_thumb.png",
    images = "images/Trautner_2020_SunspotPlots_thumb.png",
    vid = "vids/Trautner_2020_SunspotPlots_video.mp4",
    youtube = "https://youtu.be/G6l-y6YGjzQ",
    project = "MetaVis"
    }
    [PDF] [Bibtex]
    @INPROCEEDINGS {Bolte-2020-ONC,
    author = "Fabian Bolte and Stefan Bruckner",
    title = "Organic Narrative Charts",
    booktitle = "Proceedings of Eurographics 2020 (Short Papers)",
    year = "2020",
    pages = "93--96"
    doi = "10.2312/egs.20201026",
    month = "may",
    abstract = "Storyline visualizations display the interactions of groups and entities and their development over time. Existing approaches have successfully adopted the general layout from hand-drawn illustrations to automatically create similar depictions. Ward Shelley is the author of several diagrammatic paintings that show the timeline of art-related subjects, such as Downtown Body, a history of art scenes. His drawings include many stylistic elements that are not covered by existing storyline visualizations, like links between entities, splits and merges of streams, and tags or labels to describe the individual elements. We present a visualization method that provides a visual mapping for the complex relationships in the data, creates a layout for their display, and adopts a similar styling of elements to imitate the artistic appeal of such illustrations.We compare our results to the original drawings and provide an open-source authoring tool prototype.",
    pdf = "pdfs/Bolte-2020-ONC.pdf",
    images = "images/Bolte-2020-ONC.jpg",
    thumbnails = "images/Bolte-2020-ONC.png",
    event = "Eurographics 2020",
    keywords = "narrative charts, storylines, aesthetics",
    project = "MetaVis",
    git = "https://github.com/cadanox/orcha"
    }
    [PDF] [DOI] [Bibtex]
    @article{bolte2020splitstreams,
    author= {Bolte, Fabian and Nourani, Mahsan and Ragan, Eric and Bruckner, Stefan},
    journal= {IEEE Transactions on Visualization and Computer Graphics},
    title= {SplitStreams: A Visual Metaphor for Evolving Hierarchies},
    year= {2020},
    keywords= {Information Visualization, Trees, Data Structures and Data Types, Visualization Techniques and Methodologies},
    doi= {10.1109/TVCG.2020.2973564},
    url= {https://arxiv.org/pdf/2002.03891.pdf},
    note= {This paper is accepted and will be published soon.},
    abstract= {The visualization of hierarchically structured data over time is an ongoing challenge and several approaches exist trying to solve it. Techniques such as animated or juxtaposed tree visualizations are not capable of providing a good overview of the time series and lack expressiveness in conveying changes over time. Nested streamgraphs provide a better understanding of the data evolution, but lack the clear outline of hierarchical structures at a given timestep. Furthermore, these approaches are often limited to static hierarchies or exclude complex hierarchical changes in the data, limiting their use cases. We propose a novel visual metaphor capable of providing a static overview of all hierarchical changes over time, as well as clearly outlining the hierarchical structure at each individual time step. Our method allows for smooth transitions between tree maps and nested streamgraphs, enabling the exploration of the trade-off between dynamic behavior and hierarchical structure. As our technique handles topological changes of all types, it is suitable for a wide range of applications. We demonstrate the utility of our method on several use cases, evaluate it with a user study, and provide its full source code.},
    pdf= {pdfs/Bolte-2020-SplitStreams.pdf},
    images= {images/Bolte-2020-SplitStreams.png},
    thumbnails= {images/Bolte-2020-SplitStreams_thumb.png},
    project = "MetaVis",
    git = "https://github.com/cadanox/SplitStreams"
    }
    [PDF] [DOI] [YT] [Bibtex]
    @article{bolte2019visavis,
    author= {Bolte, Fabian and Bruckner, Stefan},
    journal= {IEEE Transactions on Visualization and Computer Graphics},
    title= {Vis-a-Vis: Visual Exploration of Visualization Source Code Evolution},
    year= {2020},
    keywords= {Visualization System and Toolkit Design;User Interfaces;Integrating Spatial and Non-Spatial Data Visualization;Software Visualization},
    doi= {10.1109/TVCG.2019.2963651},
    issn= {2160-9306},
    url= {https://arxiv.org/pdf/2001.02092.pdf},
    note= {This paper is accepted and will be published soon.},
    abstract= {Developing an algorithm for a visualization prototype often involves the direct comparison of different development stages and design decisions, and even minor modifications may dramatically affect the results. While existing development tools provide visualizations for gaining general insight into performance and structural aspects of the source code, they neglect the central importance of result images unique to graphical algorithms. In this paper, we present a novel approach that enables visualization programmers to simultaneously explore the evolution of their algorithm during the development phase together with its corresponding visual outcomes by providing an automatically updating meta visualization. Our interactive system allows for the direct comparison of all development states on both the visual and the source code level, by providing easy to use navigation and comparison tools. The on-the-fly construction of difference images, source code differences, and a visual representation of the source code structure further enhance the user's insight into the states' interconnected changes over time. Our solution is accessible via a web-based interface that provides GPU-accelerated live execution of C++ and GLSL code, as well as supporting a domain-specific programming language for scientific visualization.},
    pdf= {pdfs/Bolte-2019-Visavis.pdf},
    images= {images/Bolte-2019-Visavis.png},
    thumbnails= {images/Bolte-2019-Visavis_thumb.png},
    youtube= {https://www.youtube.com/watch?v=5XO6BU4j1KQ},
    project = "MetaVis"
    }
    [PDF] [Bibtex]
    @incollection {Bolte-2019-MVS,
    author = {Bolte, Fabian and Bruckner, Stefan},
    title = {Measures in Visualization Space},
    booktitle = {Foundations of Data Visualization},
    chapter = {3},
    publisher = {Springer},
    year = {2020},
    pdf = {pdfs/Bolte-2019-MVS.pdf},
    images = {images/Bolte-2019-MVS.png},
    thumbnails = {images/Bolte-2019-MVS.png},
    abstract = {Measurement is an integral part of modern science, providing the fundamental means for evaluation, comparison, and prediction. In the context of visualization, several different types of measures have been proposed, ranging from approaches that evaluate particular aspects of individual visualization techniques, their perceptual characteristics, and even economic factors. Furthermore, there are approaches that attempt to provide means for measuring general properties of the visualization process as a whole. Measures can be quantitative or qualitative, and one of the primary goals is to provide objective means for reasoning about visualizations and their effectiveness. As such, they play a central role in the development of scientific theories for visualization. In this chapter, we provide an overview of the current state of the art, survey and classify different types of visualization measures, characterize their strengths and drawbacks, and provide an outline of open challenges for future research.},
    note = {This is a preprint of a chapter for a planned book that was initiated by participants of the Dagstuhl Seminar 18041 ("Foundations of Data Visualization") and that is expected to be published by Springer. The final book chapter will differ from this preprint.},
    url = {https://arxiv.org/abs/1909.05295},
    project = "MetaVis"
    }
    [PDF] [DOI] [Bibtex]
    @article{Solteszova-2019-MLT,
    author = {Solteszova, V. and Smit, N. N. and Stoppel, S. and Grüner, R. and Bruckner, S.},
    title = {Memento: Localized Time-Warping for Spatio-Temporal Selection},
    journal = {Computer Graphics Forum},
    volume = {39},
    number = {1},
    pages = {231--243},
    year = {2020},
    keywords = {interaction, temporal data, visualization, spatio-temporal projection},
    images = "images/Solteszova-2019-MLT.jpg",
    thumbnails = "images/Solteszova-2019-MLT-1.jpg",
    pdf = "pdfs/Solteszova-2019-MLT.pdf",
    doi = {10.1111/cgf.13763},
    abstract = {Abstract Interaction techniques for temporal data are often focused on affecting the spatial aspects of the data, for instance through the use of transfer functions, camera navigation or clipping planes. However, the temporal aspect of the data interaction is often neglected. The temporal component is either visualized as individual time steps, an animation or a static summary over the temporal domain. When dealing with streaming data, these techniques are unable to cope with the task of re-viewing an interesting local spatio-temporal event, while continuing to observe the rest of the feed. We propose a novel technique that allows users to interactively specify areas of interest in the spatio-temporal domain. By employing a time-warp function, we are able to slow down time, freeze time or even travel back in time, around spatio-temporal events of interest. The combination of such a (pre-defined) time-warp function and brushing directly in the data to select regions of interest allows for a detailed review of temporally and spatially localized events, while maintaining an overview of the global spatio-temporal data. We demonstrate the utility of our technique with several usage scenarios.},
    project = "MetaVis,ttmedvis,VIDI"
    }

2019

    [DOI] [Bibtex]
    @incollection{Smit-2019-AtlasVis,
    title={Towards Advanced Interactive Visualization for Virtual Atlases},
    author={Smit, Noeska and Bruckner, Stefan},
    booktitle={Biomedical Visualisation},
    pages={85--96},
    year={2019},
    publisher={Springer},
    doi = {10.1007/978-3-030-19385-0_6},
    url = "http://noeskasmit.com/wp-content/uploads/2019/07/Smit_AtlasVis_2019.pdf",
    images = "images/Smit-2019-AtlasVis.png",
    thumbnails = "images/Smit-2019-AtlasVis.png",
    abstract = "An atlas is generally defined as a bound collection of tables, charts or illustrations describing a phenomenon. In an anatomical atlas for example, a collection of representative illustrations and text describes anatomy for the purpose of communicating anatomical knowledge. The atlas serves as reference frame for comparing and integrating data from different sources by spatially or semantically relating collections of drawings, imaging data, and/or text. In the field of medical image processing, atlas information is often constructed from a collection of regions of interest, which are based on medical images that are annotated by domain experts. Such an atlas may be employed for example for automatic segmentation of medical imaging data. The combination of interactive visualization techniques with atlas information opens up new possibilities for content creation, curation, and navigation in virtual atlases. With interactive visualization of atlas information, students are able to inspect and explore anatomical atlases in ways that were not possible with the traditional method of presenting anatomical atlases in book format, such as viewing the illustrations from other viewpoints. With advanced interaction techniques, it becomes possible to query the data that forms the basis for the atlas, thus empowering researchers to access a wealth of information in new ways. So far, atlasbased visualization has been employed for mainly medical education, as well as biological research. In this survey, we provide an overview of current digital biomedical atlas tasks and applications and summarize relevant visualization techniques. We discuss recent approaches for providing next-generation visual interfaces to navigate atlas data that go beyond common text-based search and hierarchical lists. Finally, we reflect on open challenges and opportunities for the next steps in interactive atlas visualization. ",
    project = "ttmedvis,MetaVis,VIDI"
    }
    [PDF] [DOI] [YT] [Bibtex]
    @ARTICLE {Stoppel-2019-LFL,
    author = "Stoppel, Sergej and Bruckner, Stefan",
    title = "LinesLab: A Flexible Low-Cost Approach for the Generation of Physical Monochrome Art",
    journal = "Computer Graphics Forum",
    year = "2019",
    abstract = "The desire for the physical generation of computer art has seen a significant body of research that has resulted in sophisticated robots and painting machines, together with specialized algorithms mimicking particular artistic techniques. The resulting setups are often expensive and complex, making them unavailable for recreational and hobbyist use. In recent years, however, a new class of affordable low-cost plotters and cutting machines has reached the market. In this paper, we present a novel system for the physical generation of line and cut-out art based on digital images, targeted at such off-the-shelf devices. Our approach uses a meta-optimization process to generate results that represent the tonal content of a digital image while conforming to the physical and mechanical constraints of home-use devices. By flexibly combining basic sets of positional and shape encodings, we are able to recreate a wide range of artistic styles. Furthermore, our system optimizes the output in terms of visual perception based on the desired viewing distance, while remaining scalable with respect to the medium size.",
    pdf = "pdfs/Stoppel-2019-LFL.pdf",
    images = "images/Stoppel-2019-LFL.jpg",
    thumbnails = "images/Stoppel-2019-LFL.png",
    publisher = "The Eurographics Association and John Wiley and Sons Ltd.",
    doi = "10.1111/cgf.13609",
    youtube = "https://www.youtube.com/watch?v=WdZJmU6fOAY",
    project = "MetaVis"
    }
    [PDF] [DOI] [YT] [Bibtex]
    @ARTICLE {Bruckner-2019-DVM,
    author = "Bruckner, Stefan",
    title = "Dynamic Visibility-Driven Molecular Surfaces",
    journal = "Computer Graphics Forum",
    year = "2019",
    volume = "38",
    number = "2",
    pages = "317--329",
    abstract = "Molecular surface representations are an important tool for the visual analysis of molecular structure and function. In this paper, we present a novel method for the visualization of dynamic molecular surfaces based on the Gaussian model. In contrast to previous approaches, our technique does not rely on the construction of intermediate representations such as grids or triangulated surfaces. Instead, it operates entirely in image space, which enables us to exploit visibility information to efficiently skip unnecessary computations. With this visibility-driven approach, we can visualize dynamic high-quality surfaces for molecules consisting of millions of atoms. Our approach requires no preprocessing, allows for the interactive adjustment of all properties and parameters, and is significantly faster than previous approaches, while providing superior quality.",
    pdf = "pdfs/Bruckner-2019-DVM.pdf",
    images = "images/Bruckner-2019-DVM-1.jpg",
    thumbnails = "images/Bruckner-2019-DVM.png",
    publisher = "The Eurographics Association and John Wiley and Sons Ltd.",
    doi = "10.1111/cgf.13640",
    youtube = "https://www.youtube.com/watch?v=aZmDhTbJlAM",
    git = "https://github.com/sbruckner/dynamol.git",
    project = "MetaVis"
    }
    [PDF] [DOI] [Bibtex]
    @ARTICLE {Bruckner-2018-MSD,
    author = "Stefan Bruckner and Tobias Isenberg and Timo Ropinski and Alexander Wiebel",
    title = "A Model of Spatial Directness in Interactive Visualization",
    journal = "IEEE Transactions on Visualization and Computer Graphics",
    volume = "25",
    number = "8",
    year = "2019",
    abstract = "We discuss the concept of directness in the context of spatial interaction with visualization. In particular, we propose a modelthat allows practitioners to analyze and describe the spatial directness of interaction techniques, ultimately to be able to better understandinteraction issues that may affect usability. To reach these goals, we distinguish between different types of directness. Each type ofdirectness depends on a particular mapping between different spaces, for which we consider the data space, the visualization space, theoutput space, the user space, the manipulation space, and the interaction space. In addition to the introduction of the model itself, we alsoshow how to apply it to several real-world interaction scenarios in visualization, and thus discuss the resulting types of spatial directness,without recommending either more direct or more indirect interaction techniques. In particular, we will demonstrate descriptive andevaluative usage of the proposed model, and also briefly discuss its generative usage.",
    pdf = "pdfs/Bruckner-2018-MSD.pdf",
    images = "images/Bruckner-2018-MSD.jpg",
    thumbnails = "images/Bruckner-2018-MSD.png",
    doi = "10.1109/TVCG.2018.2848906",
    project = "MetaVis"
    }
    [PDF] [DOI] [VID] [Bibtex]
    @ARTICLE {Stoppel-2019-FVI,
    author = "Sergej Stoppel and Magnus Paulson Erga and Stefan Bruckner",
    title = "Firefly: Virtual Illumination Drones for Interactive Visualization",
    journal = "IEEE Transactions on Visualization and Computer Graphics",
    year = "2019",
    volume = "25",
    pages = "1204-1213",
    abstract = "Light specification in three dimensional scenes is a complex problem and several approaches have been presented that aim to automate this process. However, there are many scenarios where a static light setup is insufficient, as the scene content and camera position may change. Simultaneous manual control over the camera and light position imposes a high cognitive load on the user. To address this challenge, we introduce a novel approach for automatic scene illumination with Fireflies. Fireflies are intelligent virtual light drones that illuminate the scene by traveling on a closed path. The Firefly path automatically adapts to changes in the scene based on an outcome-oriented energy function. To achieve interactive performance, we employ a parallel rendering pipeline for the light path evaluations. We provide a catalog of energy functions for various application scenarios and discuss the applicability of our method on several examples.",
    pdf = "pdfs/VIS2018-Firefly.pdf",
    vid = "vids/FinalVideo.mp4",
    images = "images/Teaser.png",
    thumbnails = "images/HeadRightCroppedThumbnail.png",
    doi = "10.1109/TVCG.2018.2864656",
    project = "MetaVis"
    }

2018

    [PDF] [DOI] [YT] [Bibtex]
    @ARTICLE {Magnus-2018-VPI,
    author = "Jens G. Magnus and Stefan Bruckner",
    title = "Interactive Dynamic Volume Illumination with Refraction and Caustics",
    journal = "IEEE Transactions on Visualization and Computer Graphics",
    year = "2018",
    volume = "24",
    number = "1",
    pages = "984--993",
    month = "jan",
    abstract = "In recent years, significant progress has been made indeveloping high-quality interactive methods for realistic volumeillumination. However, refraction -- despite being an important aspectof light propagation in participating media -- has so far only receivedlittle attention. In this paper, we present a novel approach forrefractive volume illumination including caustics capable of interactiveframe rates. By interleaving light and viewing ray propagation, ourtechnique avoids memory-intensive storage of illumination informationand does not require any precomputation. It is fully dynamic and allparameters such as light position and transfer function can be modifiedinteractively without a performance penalty.",
    pdf = "pdfs/Magnus-2018-IDV.pdf",
    images = "images/Magnus-2018-IDV.jpg",
    thumbnails = "images/Magnus-2018-IDV.png",
    youtube = "https://www.youtube.com/watch?v=3tn6sSXw4NQ",
    doi = "10.1109/TVCG.2017.2744438",
    event = "IEEE SciVis 2017",
    keywords = "interactive volume rendering, illumination, refraction, shadows, caustics",
    location = "Phoenix, USA",
    project = "MetaVis"
    }
    [PDF] [DOI] [YT] [Bibtex]
    @INPROCEEDINGS {Stoppel-2018-SSW,
    author = "Sergej Stoppel and Stefan Bruckner",
    title = "Smart Surrogate Widgets for Direct Volume Manipulation",
    booktitle = "Proceedings of IEEE PacificVis 2018",
    year = "2018",
    pages = "36--45",
    month = "apr",
    abstract = "Interaction is an essential aspect in volume visualization, yet commonmanipulation tools such as bounding boxes or clipping planewidgets provide rather crude tools as they neglect the complex structureof the underlying data. In this paper, we introduce a novelvolume interaction approach based on smart widgets that are automaticallyplaced directly into the data in a visibility-driven manner.By adapting to what the user actually sees, they act as proxies thatallow for goal-oriented modifications while still providing an intuitiveset of simple operations that is easy to control. In particular, ourmethod is well-suited for direct manipulation scenarios such as touchscreens, where traditional user interface elements commonly exhibitlimited utility. To evaluate out approach we conducted a qualitativeuser study with nine participants with various backgrounds.",
    pdf = "pdfs/Stoppel-2018-SSW.pdf",
    images = "images/Stoppel-2018-SSW.jpg",
    thumbnails = "images/Stoppel-2018-SSW.png",
    youtube = "https://www.youtube.com/watch?v=wMRw-W0SrLk",
    event = "IEEE PacificVis 2018",
    keywords = "smart interfaces, volume manipulation, volume visualization",
    doi = "10.1109/PacificVis.2018.00014",
    project = "MetaVis"
    }