@inproceedings{alissandrakis2017uma,
	author		= {Aris Alissandrakis and Nico Reski},
	title		= {{Using {Mobile Augmented Reality} to Facilitate Public Engagement}},
	booktitle	= {Extended Papers of the International Symposium on Digital Humanities (DH 2016)},
	editor		= {Koraljka Golub and Marcelo Milrad},
	volume		= {2021},
	series		= {CEUR Workshop Proceedings},
	pages		= {99--109},
	address		= {Växjö, Sweden},
	publisher	= {Sun SITE Central Europe Workshop Proceedings (CEUR-WS)},
	year		= {2017},
	language	= {english},
	url			= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-69265},
	keywords	= {Augmented Reality, public engagement, crowdsourcing},
	abstract	= {This paper presents our initial efforts towards the development of a framework for facilitating public engagement through the use of mobile Augmented Reality (mAR), that fall under the overall project title "Augmented Reality for Public Engagement" (PEAR). We present the concept, implementation, and discuss the results from the deployment of a mobile phone app (PEAR 4 VXO). The mobile app was used for a user study in conjunction with a campaign carried out by Växjö municipality (Sweden) while exploring how to get citizens more engaged in urban planning actions and decisions. These particular activities took place during spring 2016.

	One of the salient features of our approach is that it combines novel ways of using mAR together with social media, online databases, and sensors, to support public engagement. In addition, the data collection process and audience engagement were tested in a follow-up limited deployment. The analysis and outcomes of our initial results validate the overall concept and indicate the potential usefulness of the app as a tool, but also highlight the need for an active campaign from the part of the stakeholders.
	
	Our future efforts will focus on addressing some of the problems and challenges that we have identified during the different phases of this user study.}
}

@inproceedings{alissandrakis2018vdt,
	author		= {Aris Alissandrakis and Nico Reski and Mikko Laitinen and Jukka Tyrkkö and Magnus Levin and Jonas Lundberg},
	title		= {{Visualizing dynamic text corpora using {Virtual Reality}}},
	booktitle	= {The 39th Annual Conference of the International Computer Archive for Modern and Medieval English (ICAME 39): Corpus Linguistics and Changing Society},
	series		= {ICAME 39 Book of Abstracts},
	pages		= {205},
	address		= {Tampere, Finland},
	publisher	= {International Computer Archive of Modern and Medieval English (ICAME)},
	date 		= {2018-05-30/2018-06-03},
	language	= {english},
	url			= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-75064},
	abstract	= {In recent years, data visualization has become a major area in Digital Humanities research, and the same holds true also in linguistics. The rapidly increasing size of corpora, the emergence of dynamic real-time streams, and the availability of complex and enriched metadata have made it increasingly important to facilitate new and innovative approaches to presenting and exploring primary data. This demonstration showcases the uses of Virtual Reality (VR) in the visualization of geospatial linguistic data using data from the Nordic Tweet Stream (NTS) project (see Laitinen et al 2017). The NTS data for this demonstration comprises a full year of geotagged tweets (12,443,696 tweets from 273,648 user accounts) posted within the Nordic region (Denmark, Finland, Iceland, Norway, and Sweden). The dataset includes over 50 metadata parameters in addition to the tweets themselves.

	We demonstrate the potential of using VR to efficiently find meaningful patterns in vast streams of data. The VR environment allows an easy overview of any of the features (textual or metadata) in a text corpus. Our focus will be on the language identification data, which provides a previously unexplored perspective into the use of English and other non-indigenous languages in the Nordic countries alongside the native languages of the region.

	Our VR prototype utilizes the HTC Vive headset for a room-scale VR scenario, and it is being developed using the Unity3D game development engine. Each node in the VR space is displayed as a stacked cuboid, the equivalent of a bar chart in a three-dimensional space, summarizing all tweets at one geographic location for a given point in time (see: https://tinyurl.com/nts-vr). Each stacked cuboid represents information of the three most frequently used languages, appropriately color coded, enabling the user to get an overview of the language distribution at each location. The VR prototype further encourages users to move between different locations and inspect points of interest in more detail (overall location-related information, a detailed list of all languages detected, the most frequently used hashtags). An underlying map outlines country borders and facilitates orientation. In addition to spatial movement through the Nordic areas, the VR system provides an interface to explore the Twitter data based on time (days, weeks, months, or time of predefined special events), which enables users to explore data over time (see: https://tinyurl.com/nts-vr-time).

	In addition to demonstrating how the VR methods aid data visualization and exploration, we will also briefly discuss the pedagogical implications of using VR to showcase linguistic diversity.}
}

@article{alissandrakis2019vrc,
	author		= {Aris Alissandrakis and Nico Reski and Mikko Laitinen and Jukka Tyrkkö and Jonas Lundberg and Magnus Levin},
	title		= {{Visualizing rich corpus data using virtual reality}},
	journal		= {Studies in Variation, Contacts and Change in English: Corpus Approaches into World Englishes and Language Contrasts},
	series 		= {VARIENG e-Series},
	editors 	= {Hanna Parviainen and Mark Kaunisto and Päivi Pahta},
	pages		= {},
	volume		= {20},
	number		= {},
	publisher	= {Helsinki: Research Unit for Variation, Contacts and Change in English (VARIENG)},
	year		= {2019},
	month		= {12},
	language	= {english},
	url 		= {https://varieng.helsinki.fi/series/volumes/20/alissandrakis_et_al/},
	doi 		= {},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-90516},
	issn		= {1797-4453},
	keywords	= {},
	abstract	= {We demonstrate an approach that utilizes immersive virtual reality (VR) to explore and interact with corpus linguistics data. Our case study focuses on the language identification parameter in the Nordic Tweet Stream corpus, a dynamic corpus of Twitter data where each tweet originated within the Nordic countries. We demonstrate how VR can provide previously unexplored perspectives into the use of English and other non-indigenous languages in the Nordic countries alongside the native languages of the region and showcase its geospatial variation. We utilize a head-mounted display (HMD) for a room-scale VR scenario that allows 3D interaction by using hand gestures. In addition to spatial movement through the Nordic areas, the interface enables exploration of the Twitter data based on time (days, weeks, months, or time of predefined special events), making it particularly useful for diachronic investigations.

	In addition to demonstrating how the VR methods aid data visualization and exploration, we briefly discuss the pedagogical implications of using VR to showcase linguistic diversity. Our empirical results detail students’ reactions to working in this environment. The discussion part examines the benefits, prospects and limitations of using VR in visualizing corpus data.}
}

@inproceedings{amorim2025coa,
	author		= {Jorge H. Amorim and Aitor {Aldama Campino} and Christian Asker and Carlo Navarra and Tina-Simone Neset and Nico Reski and Isabel Ribeiro and David Segersson and Katerina Vrotsou and Fuxing Wang and Emma Wingstedt and Lotten Wiréhn},
	title		= {{Co-creation of an interactive climate visualization tool towards the adaptation of Swedish cities to heat}},
	booktitle	= {Poster session at The 12th International Conference on Urban Climate (ICUC 2025)},
	pages		= {ICUC12-649},
	address		= {Rotterdam, The Netherlands},
	publisher	= {International Association for Urban Climate (IAUC)},
	date		= {2025-07-07/2025-07-11},
	language	= {english},
	url			= {https://doi.org/10.5194/icuc12-649},
	doi			= {10.5194/icuc12-649}, 
	abstract	= {The record hot summer of 2018 and IPCC projections for Northern Europe have raised awareness of the potential impacts of heat on human comfort and health even over high-latitude cities. However, only one-third of Swedish municipalities have developed routines, checklists and action plans to counter extreme heat. It is amply agreed that the creation of adaptation strategies and action plans for heatwaves in the future should be based on reliable, detailed and tailored climate data, which is lacking. Realising this challenge, this study presents the results from the on-going multi-disciplinary research project BRIGHT - Advancing knowledge and tools for the adaptation of Swedish cities to heat, with a focus on: (1) an event-based dynamical downscaling of the urban climate of selected Swedish cities for present and future climate. Results are presented at both 300 m and 1 m resolution with the goal of providing a complete description of the meteorological conditions affecting human comfort. (2) summer campaigns with a network of low-cost thermohygrometers for the period 2022-2024. These observations provide a better representation of the spatial variability of temperature and humidity across different local climate zones. We look specifically at the urban heat island of Stockholm and the cooling effect of parks. (3) a citizen sensing mobile application has been developed and put into action during the summers of 2023 and 2024 with the objective of engaging urban dwellers in providing feedback on perceived thermal comfort. These results are analysed with the help of local weather observations and model data. (4) an interactive visualization tool prototype is being co-designed with municipal stakeholders with the goal of assisting in their climate adaptation work. This tool enables the identification of hot spots, which may represent an additional risk during a heat wave, and of cooler areas, usually associated with urban green infrastructure.}
}

@mastersthesis{reski2013eni,
    author 		= {Nico Reski},
    title 		= {{Exploring new interaction mechanisms to support information sharing and collaboration using large multi-touch displays}},
    type		= {Bachelor's thesis},
    school 		= {Hochschule für Technik und Wirtschaft Berlin},
    pages		= {93},
    address 	= {Berlin, Germany},
    date 		= {2013-08-20},
    language	= {english},
	url			= {https://sisis.rz.htw-berlin.de/search?bvnr=BV041369569},
	keywords	= {interactive tabletop, multi-touch, collaboration, information sharing, natural user interface, tangible user interface, co-location, digital storytelling, Microsoft Surface, SUR40},
	abstract	= {The rapid evolution of computer technologies entails that those technologies become available to a broader audience as we go along. But with new technologies new challenges arise. Especially in the education sector Information and Communications Technology (ICT) is used to enhance the learning process and to support the learner. Recent times have shown that multi-touch enabled devices become more and more attractive to the user, but force the users to turn away from each other and focus on their own multi-touch display. With the introduction of interactive tabletop hardware multi-touch interaction is taken to the next level since large displays allow and invite not just one but multiple users at the same time. However that presents designers and developers with new challenges in terms of how to use the interaction possibilities and the workspace offered by such large displays best in order to support active collaboration and information sharing in an education scenario.

	This thesis evaluates the use of Natural User Interface (NUI) and Tangible User Interface (TUI) design principles in the process of interactive co-located collaboration in technology-enhanced learning activities. For this purpose, an interactive tabletop application with Microsoft Surface in the context of digital storytelling was implemented. Based on this tabletop prototype, a user interaction study with ten participants was conducted. The evaluation considered the users' subjective reaction and acceptance for those User Interface (UI) paradigms as well as their level of collaboration and communication. The results indicate that the use of a NUI on large interactive tabletops supports co-located collaboration and encourages the exchange of information. However, although the use of a TUI can provide added value to the prototypes' operating principles, its suitability for co-located collaboration could neither be approved nor disproved based on the current study. Finally, the thesis proposes solutions for the found problems as well as further recommendations for future work.}
}

@inproceedings{reski2014eni,
	author		= {Nico Reski and Susanna Nordmark and Marcelo Milrad},
	title		= {{Exploring New Interaction Mechanisms to Support Information Sharing and Collaboration Using Large Multi-touch Displays in the Context of {Digital Storytelling}}},
	booktitle	= {Proceedings of the 14th International Conference on Advanced Learning Technologies (ICALT 2014)},
	pages		= {176--180},
	address		= {Athens, Greece},
	publisher	= {Institute of Electrical and Electronics Engineers (IEEE)},
	date		= {2014-07-07/2014-07-10},
	language	= {english},
	url			= {https://doi.org/10.1109/ICALT.2014.59},
	doi 		= {10.1109/ICALT.2014.59},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-34113},
	isbn		= {978-1-4799-4038-7},
	issn		= {2161-3761},
	keywords	= {interactive tabletops, mobile digital storytelling, collaborative learning, tangible user interfaces},
	abstract	= {A wide range of Information and Communications Technologies (ICT) have been used to support teaching and to enhance the learning process in the last decades. With the latest introduction of large interactive tabletops, multi-touch interaction is taken to the next level since large displays allow and invite not just one but multiple users to interact and collaborate at the same time. The latest presents designers and developers with new challenges in terms of interaction possibilities to promote active collaboration and information sharing. This paper evaluates the use of novel Tangible User Interface (TUI) approaches for the design of an interactive tabletop application conceived to support co-located collaborative learning in the particular context of Digital Storytelling (DS). We present the results of a user interaction study, which considers the users' subjective reaction and acceptance for these User Interface (UI) paradigms, as well as their level of collaboration and communication while working together. The results of the study indicated that the users adapted working in a close collaboration using the provided multi-touch functionalities very quickly. Furthermore, users appreciated the possibility to closely discussing, conversing and exchanging information with their peers through simultaneous interactions on the multi-touch display.}
}

@mastersthesis{reski2015cyp,
    author 		= {Nico Reski},
    title 		= {{Change your Perspective: Exploration of a {3D} Network created with {Open Data} in an Immersive {Virtual Reality} Environment using a Head-mounted Display and Vision-based Motion Controls}},
    school 		= {Linnaeus University},
    pages		= {194},
    address 	= {Växjö, Sweden},
    date 		= {2015-10-26},
    language	= {english},
	url			= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-46779},
	keywords	= {virtual reality, human-computer interaction, immersive interaction, natural user interface, information visualization, open data, head-mounted display, vision-based motion controls},
	abstract	= {Year after year, technologies are evolving in an incredible rapid pace, becoming faster, more complex, more accurate and more immersive. Looking back just a decade, especially interaction technologies have made a major leap. Just two years ago in 2013, after being researched for quite some time, the hype around virtual reality (VR) arouse renewed enthusiasm, finally reaching mainstream attention as the so called head-mounted displays (HMD), devices worn on the head  to grant a visual peek into the virtual world, gain more and more acceptance with the end-user. Currently, humans interact with computers in a very counter-intuitive two dimensional way. The ability to experience digital content in the humans most natural manner, by simply looking around and perceiving information from their surroundings, has the potential to be a major game changer in how we perceive and eventually interact with digital information. However, this confronts designers and developers with new challenges of how to apply these exciting technologies, supporting interaction mechanisms to naturally explore digital information in the virtual world, ultimately overcoming real world boundaries. Within the virtual world, the only limit is our imagination.

	This thesis investigates an approach of how to naturally interact and explore information based on open data within an immersive virtual reality environment using a head-mounted display and vision-based motion controls. For this purpose, an immersive VR application visualizing information as a network of European capital cities has been implemented, offering interaction through gesture input. The application lays a major focus on the exploration of the generated network and the consumption of the displayed information. While the conducted user interaction study with eleven participants investigated their acceptance of the developed prototype, estimating their workload and examining their explorative behaviour, the additional dialog with five experts in the form of explorative discussions provided further feedback towards the prototype’s design and concept. The results indicate the participants’ enthusiasm and excitement towards the novelty and intuitiveness of exploring information in a less traditional way than before, while challenging them with the applied interface and interaction design in a positive manner. The design and concept were also accepted through the experts, valuing the idea and implementation. They provided constructive feedback towards the visualization of the information as well as emphasising and encouraging to be even bolder, making more usage of the available 3D environment. Finally, the thesis discusses these findings and proposes recommendations for future work.}
}

@inproceedings{reski2016cyp,
	author		= {Nico Reski and Aris Alissandrakis},
	title		= {{Change your Perspective: Exploration of a {3D} Network created from {Open Data} in an Immersive {Virtual Reality} Environment}},
	booktitle	= {The 9th International Conference on Advances in Computer-Human Interactions (ACHI 2016)},
	pages		= {403--410},
	address		= {Venice, Italy},
	publisher	= {International Academy, Research and Industry Association (IARIA)},
	date		= {2016-04-24/2016-04-28},
	language	= {english},
	url			= {http://www.thinkmind.org/index.php?view=article&articleid=achi_2016_19_30_20107},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-52379},
	issn		= {2308-4138},
	isbn		= {978-1-61208-468-8},
	keywords	= {human-computer interaction, virtual reality, immersive interaction, information visualization},
	abstract	= {This paper investigates an approach of how to naturally interact and explore information (based on open data) within an immersive virtual reality environment (VRE) using a head- mounted display and vision-based motion controls. We present the results of a user interaction study that investigated the acceptance of the developed prototype, estimated the workload as well as examined the participants’ behavior. Additional discussions with experts provided further feedback towards the prototype’s overall design and concept. The results indicate that the participants were enthusiastic regarding the novelty and intuitiveness of exploring information in a VRE, as well as were challenged (in a positive manner) with the applied interface and interaction design. The presented concept and design were well received by the experts, who valued the idea and implementation and encouraged to be even bolder, making more use of the available 3D environment.}
}

@inproceedings{reski2018uaa,
	author		= {Nico Reski and Aris Alissandrakis},
	title		= {{Using an {Augmented Reality} Cube-Like Interface and {3D} Gesture-Based Interaction to Navigate and Manipulate Data}},
	booktitle	= {Proceedings of the 11th International Symposium on Visual Information Communication and Interaction (VINCI '18)},
	pages		= {92--96},
	address		= {Växjö, Sweden},
	publisher	= {Association for Computing Machinery (ACM)},
	date		= {2018-08-13/2018-08-15},
	language	= {english},
	url			= {https://doi.org/10.1145/3231622.3231625},
	doi 		= {10.1145/3231622.3231625},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-77132},
	isbn		= {978-1-4503-6501-7},
	keywords	= {3D gesture-based interaction, 3D user interface, augmented reality, human-computer interaction, interaction design},
	abstract	= {In this paper we describe our work-in-progress to create an interface that enables users to browse and select data within an Augmented Reality environment, using a virtual cube object that can be interacted with through 3D gestural input. We present the prototype design (including the graphical elements), describe the interaction possibilities of touching the cube with the hand/finger, and put the prototype into the context of our Augmented Reality for Public Engagement (PEAR) framework. An interactive prototype was implemented and runs on a typical off-the-shelf smart-phone device.}
}

@inproceedings{reski2019ceo,
	author		= {Nico Reski and Aris Alissadrakis and Jukka Tyrkkö},
	title		= {{Collaborative exploration of rich corpus data using immersive virtual reality and non-immersive technologies}},
	booktitle	= {2nd International Conference: Approaches to Digital Discourse Analysis (ADDA 2)},
	series		= {ADDA 2 Book of Abstracts},
	pages		= {7--9},
	address		= {Turku, Finland},
	publisher	= {University of Turku},
	date		= {2019-05-23/2019-05-25},
	language	= {english},
	url			= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-83858},
	keywords	= {virtual reality, Nordic Tweet Stream, digital humanities, immersive analytics},
	abstract	= {In recent years, large textual data sets, comprising many data points and rich metadata, have become a common object of investigation and analysis. Information Visualization and Visual Analytics provide practical tools for visual data analysis, most commonly as interactive two-dimensional (2D) visualizations that are displayed through normal computer monitors. At the same time, display technologies have evolved rapidly over the past decade. In particular, emerging technologies such as virtual reality (VR), augmented reality (AR), or mixed reality (MR) have become affordable and more user-friendly (LaValle 2016). Under the banner of “Immersive Analytics”, researchers started to explore the novel application of such immersive technologies for the purpose of data analysis (Marriott et al. 2018).

	By using immersive technologies, researchers hope to increase motivation and user engagement for the overall data analysis activity as well as providing different perspectives on the data. This can be particularly helpful in the case of exploratory data analysis, when the researcher attempts to identify interesting points or anomalies in the data without prior knowledge of what exactly they are searching for. Furthermore, the data analysis process often involves the collaborative sharing of information and knowledge between multiple users for the goal of interpreting and making sense of the explored data together (Isenberg et al. 2011). However, immersive technologies such as VR are often rather single user-centric experiences, where one user is wearing a head-mounted display (HMD) device and is thus visually isolated from the real-world surroundings. Consequently, new tools and approaches for co-located, synchronous collaboration in such immersive data analysis scenarios are needed.

	In this software demonstration, we present our developed VR system that enables two users to explore data at the same time, one inside an immersive VR environment, and one outside VR using a non-immersive companion application. The context of this demonstrated data analysis activity is centered around the exploration of the language variability in tweets from the perspectives of multilingualism and sociolinguistics (see, e.g. Coats 2017 and Grieve et al. 2017). Our primary data come from the the Nordic Tweet Stream (NTS) corpus (Laitinen et al. 2018, Tyrkkö 2018), and the immersive VR application visualizes in three dimensions (3D) the clustered Twitter traffic within the Nordic region as stacked cuboids according to their geospatial position, where each stack represents a color-coded language share (Alissandrakis et al. 2018). Through the utilization of 3D gestural input, the VR user can interact with the data using hand postures and gestures in order to move through the virtual 3D space, select clusters and display more detailed information, and to navigate through time (Reski and Alissandrakis 2019) ( https://vrxar.github.io/apps/odxvrxnts-360/ ). A non-immersive companion application, running in a normal web browser, presents an overview map of the Nordic region as well as other supplemental information about the data that are more suitable to be displayed using non-immersive technologies.

	We will present two complementary applications, each with a different objective within the collaborative data analysis framework. The design and implementation of certain connectivity and collaboration features within these applications facilitate the co-located, synchronous exploration and sensemaking. For instance, the VR user’s position and orientation are displayed and updated in real-time within the overview map of the non-immersive application. The other way around, the selected cluster of the non-immersive user is also highlighted for the user in VR. Initial tests with pairs of language students validated the proof-of-concept of the developed collaborative system and encourage the conduction of further future investigations in this direction.}
}

@inproceedings{reski2020eot,
	author		= {Nico Reski and Aris Alissandrakis and Andreas Kerren},
	title		= {{Exploration of Time-Oriented Data in Immersive {Virtual Reality} Using a {3D Radar Chart} Approach}},
	booktitle	= {Proceedings of the 11th Nordic Conference on Human-Computer Interaction: Shaping Experiences, Shaping Society (NordiCHI 2020)},
	pages		= {33:1--11},
	address		= {Tallinn, Estonia},
	publisher	= {Association for Computing Machinery (ACM)},
	date		= {2020-10-25/2020-10-29},
	language	= {english},
	url			= {https://doi.org/10.1145/3419249.3420171},
	doi 		= {10.1145/3419249.3420171},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-98671},
	isbn		= {9781450375795},
	keywords	= {immersive analytics, radar chart, time-oriented data, virtual reality, 3D gestural input},
	abstract	= {In this paper, we present an approach to interact with time-oriented data in Virtual Reality within the context of Immersive Analytics. We implemented a Virtual Reality application that enables its user to explore data in an immersive environment (head-mounted display, 3D gestural input), utilizing potential advantages of immersive technologies, for instance, depth cues for better spatial understanding, natural interaction, and user engagement. The visualization design is inspired by the overall concept of a radar chart, and using the third dimension to represent time-series related data. We conducted a user study with 15 participants, encouraging them to examine a representative dataset within an explorative analysis scenario with no time constraints. Based on the results of usability and user engagement scores, task completion analysis, observations, and interviews, we were able to empirically validate the approach in general, and gain insights in the users’ interaction and data analysis strategies.}
}

@article{reski2020ode,
	author		= {Nico Reski and Aris Alissandrakis},
	title		= {{Open data exploration in virtual reality: a comparative study of input technology}},
	journal		= {Virtual Reality},
	pages		= {1--22},
	volume		= {24},
	number		= {},
	publisher	= {Springer London},
	year		= {2020},
	month		= {3},
	language	= {english},
	url			= {https://doi.org/10.1007/s10055-019-00378-w},
	doi 		= {10.1007/s10055-019-00378-w},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-79974},
	isbn 		= {1434-9957},
	issn		= {1434-9957},
	keywords	= {Comparative study, Gamepad, Room-scale virtual reality, Virtual reality, Vision-based motion controls, 3D gestural input},
	abstract	= {In this article, we compare three different input technologies (gamepad, vision-based motion controls, room-scale) for an interactive virtual reality (VR) environment. The overall system is able to visualize (open) data from multiple online sources in a unified interface, enabling the user to browse and explore displayed information in an immersive VR setting. We conducted a user interaction study (n=24; n=8 per input technology, between-group design) to investigate experienced workload and perceived flow of interaction. Log files and observations allowed further insights and comparison of each condition. We have identified trends that indicate user preference of a visual (virtual) representation, but no clear trends regarding the application of physical controllers (over vision-based controls), in a scenario that encouraged exploration with no time limitations.}
}

@inproceedings{reski2020otw,
	author		= {Nico Reski and Aris Alissandrakis and Jukka Tyrkkö and Andreas Kerren},
	title		= {{{``Oh, that's where you are!''} -- Towards a Hybrid Asymmetric {Collaborative Immersive Analytics} System}},
	booktitle	= {Proceedings of the 11th Nordic Conference on Human-Computer Interaction: Shaping Experiences, Shaping Society (NordiCHI 2020)},
	pages		= {5:1--12},
	address		= {Tallinn, Estonia},
	publisher	= {Association for Computing Machinery (ACM)},
	date		= {2020-10-25/2020-10-29},
	language	= {english},
	url			= {https://doi.org/10.1145/3419249.3420102},
	doi 		= {10.1145/3419249.3420102},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-98675},
	isbn		= {9781450375795},
	keywords	= {asymmetrical collaboration, awareness, common ground, deixis, immersive analytics, spatial reference, virtual reality, 3D gestural input},
	abstract	= {We present a hybrid Immersive Analytics system to support asymmetrical collaboration between a pair of users during synchronous data exploration. The system consists of an immersive Virtual Reality application, a non-immersive web application, and a real-time communication interface connecting both applications to provide features to facilitate the collaborators’ mutual understanding and their ability to make (spatial) references. We conducted a real world case study with pairs of language students, encouraging them to use the developed system to investigate a large multivariate Twitter dataset from a sociolinguistic perspective within an explorative analysis scenario. Based on the results of usability scores, log file analyses, observations, and interviews, we were able to validate the approach in general, and gain insights into the users’ collaboration with respect to awareness, deixis, and group dynamics.}
}

@article{reski2022aee,
	author		= {Nico Reski and Aris Alissandrakis and Andreas Kerren},
	title		= {{An Empirical Evaluation of Asymmetric Synchronous Collaboration Combining Immersive and Non-Immersive Interfaces Within the Context of {Immersive Analytics}}},
	journal		= {Frontiers in Virtual Reality},
	pages		= {743445:1--29},
	volume		= {2},
	number		= {},
	publisher	= {{Frontiers Media S.A.}},
	date		= {2022-01-17},
	language	= {english},
	url			= {https://doi.org/10.3389/frvir.2021.743445},
	doi 		= {10.3389/frvir.2021.743445},
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-109309},
	issn		= {2673-4192},
	keywords	= {asymmetric user roles, computer-supported cooperative work, heterogeneous display and interaction technologies, immersive analytics, empirical evaluation, spatio-temporal data exploration, synchronous remote collaboration, virtual reality},
	abstract	= {Collaboration is an essential part of data analysis, allowing multiple users to combine their expertise and to debate about the interpretation of data discoveries using their contextual knowledge. The design of collaborative interfaces within the context of Immersive Analytics remains challenging, particularly due to the various user-centered characteristics of immersive technologies. In this article, we present the use case of a system that enables multiple users to synchronously explore the same data in a collaborative scenario that combines immersive and non-immersive interfaces in an asymmetric role setup. Such a setup allows for bridging the gap when applying heterogeneous display and interaction technologies, enabling each analyst to have an independent and different view of the data, while maintaining important collaborative aspects during the joint data exploration. We developed an immersive VR environment (head- mounted display, 3D gestural input) and a non-immersive desktop terminal (monitor, keyboard and mouse) centered around spatio-temporal data exploration. Supported through a real-time communication interface, synchronous collaborative features are integrated in both interfaces, facilitating the users in their ability to establish a shared context and to make spatio-temporal references. We conducted an empirical evaluation with five participant pairs (within-subject design) to investigate aspects of usability, user engagement, and collaboration during a confirmative analysis task. Synthesis of questionnaire results in combination with additional log file analysis, audio activity analysis, and observations, revealed good usability scores, high user engagement, as well as overall close and balanced collaboration of enthusiastic pairs during the task completion independent of their interface type, validating our system approach in general. Further supported through the self-constructed Spatio-Temporal Collaboration Questionnaire, we are able to contribute with discussion and considerations of the presented scenario and the synchronous collaborative features for the design of similar applications.}
}

@phdthesis{reski2022sdi,
    author 		= {Nico Reski},
    title 		= {{Supporting Data Interaction and Hybrid Asymmetric Collaboration Using Virtual Reality Within the Context of Immersive Analytics}},
    school 		= {Linnaeus University},
    publisher 	= {Linnaeus University Press},
    series 		= {Linnaeus University Dissertations},
    number 		= {459},
    pages		= {323},
    address 	= {Växjö, Sweden},
    date 		= {2022-09-30},
    language	= {english},
	url			= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-115654},
	isbn 		= {9789189709270},
	keywords	= {3D gestural input, 3D radar charts, 3D user interfaces, empirical evaluation, head-mounted display, hybrid asymmetric collaboration, immersive analytics, spatio-temporal data interaction, user interface design, virtual reality},
	abstract	= {Immersive display and interaction technologies have rapidly evolved in recent years, offering advanced techniques compared to traditional Human-Computer Interaction. Computer-generated Virtual Environments viewed with stereoscopic depth perception and explored using 3D spatial interaction can represent more accurately how humans naturally interact in the real world. Data analysis is a promising area of application for such technologies, holding potential to promote intuitive interaction, user engagement, collaboration, and data curiosity, as well as to foster appropriate contextual visualization. Even when techniques such as Machine Learning and Data Mining assist with the analysis of data, human interpretation, contextualization, and meaning making are still needed. The design of immersive data visualization and interaction is challenging due to the complexity of the involved technologies and human factors, which calls for an interdisciplinary research effort.

	The focus of this thesis is to investigate means of exploration, interaction, and collaboration using Virtual Reality and 3D gestural input in immersive environments within the context of spatio-temporal data analysis. Based on existing literature as well as following an applied and interdisciplinary research approach, a design space for this type of Immersive Analytics is defined. The emphasis on spatio-temporal data is relevant across various real-world contexts and scenarios, such as sociolinguistics and climate analysis, given that data collected nowadays commonly feature descriptors of where and when they were captured. An immersive data analysis system has been implemented and evaluated across three virtual environment iterations. Two core themes from a user-centered perspective are interaction and collaboration. The design of useful and engaging 3D gestural interaction techniques support the conduction of typical analytical tasks that aid the data exploration and thus the discovery of insights. Furthermore, data analysis is seldom a solitary activity, but can be conducted in collaboration with multiple analysts, who combine their knowledge to interpret and discuss the discoveries. For this purpose, the concept of Hybrid Asymmetric Collaboration is defined, aiming to facilitate an envisioned broader analytical workflow that assumes a mixture of immersive and non-immersive interfaces (hybrid) as well as distinct user roles (asymmetric). To bridge data analysis across heterogeneous interface types, the design of visual information cues is investigated to support foundational aspects of collaboration, such as awareness, common ground, reference, and deixis.

	The conducted research has been empirically evaluated using a combination of standardized and custom methods in a total of six main studies. The outcomes of these studies allow for reflections and the proposal of design guidelines for collaborative data interaction in immersive spaces.}
}

@article{reski2023upo,
	author		= {Nico Reski and Aris Alissandrakis and Andreas Kerren},
	title		= {{User Preferences of Spatio-Temporal Referencing Approaches For Immersive 3D Radar Charts}},
	journal		= {arXiv Preprint},
	pages		= {1--29},
	volume		= {},
	number		= {},
	publisher	= {{arXiv}},
	date		= {2023-03-14},
	language	= {english},
	url			= {https://doi.org/10.48550/arXiv.2303.07899},
	doi 		= {10.48550/arXiv.2303.07899},
	issn		= {},
	keywords	= {awareness, collaborative immersive analytics, computer-supported cooperative work, empirical study, virtual reality, visual information cues, 3D radar chart},
	abstract	= {The use of head-mounted display technologies for virtual reality experiences is inherently single-user-centred, allowing for the visual immersion of its user in the computer-generated environment. This isolates them from their physical surroundings, effectively preventing external visual information cues, such as the pointing and referral to an artifact by another user. However, such input is important and desired in collaborative scenarios when exploring and analyzing data in virtual environments together with a peer. In this article, we investigate different designs for making spatio-temporal references, i.e., visually highlighting virtual data artifacts, within the context of Collaborative Immersive Analytics. The ability to make references to data is foundational for collaboration, affecting aspects such as awareness, attention, and common ground. Based on three design options, we implemented a variety of approaches to make spatial and temporal references in an immersive virtual reality environment that featured abstract visualization of spatio-temporal data as 3D Radar Charts. We conducted a user study (n=12) to empirically evaluate aspects such as aesthetic appeal, legibility, and general user preference. The results indicate a unified favour for the presented location approach as a spatial reference while revealing trends towards a preference of mixed temporal reference approaches dependent on the task configuration: pointer for elementary, and outline for synoptic references. Based on immersive data visualization complexity as well as task reference configuration, we argue that it can be beneficial to explore multiple reference approaches as collaborative information cues, as opposed to following a rather uniform user interface design.}
}

@article{reski2024dat,
    author      = {Nico Reski and Aris Alissandrakis and Andreas Kerren},
    title       = {{Designing a 3D gestural interface to support user interaction with time-oriented data as immersive 3D radar charts}},
    journal     = {Virtual Reality},
    pages       = {30:1--24},
    volume      = {28},
    number      = {},
    publisher   = {{Springer Nature}},
    year        = {2024-01-23},
    language    = {english},
    url         = {https://doi.org/10.1007/s10055-023-00913-w},
    doi         = {10.1007/s10055-023-00913-w},
    diva        = {https://urn.kb.se/resolve?urn=urn:nbn:se:liu:diva-200466},
    issn        = {},
    keywords    = {empirical study, immersive analytics, user interface design, virtual reality, 3D gestural input, 3D radar chart},
    abstract    = {The design of intuitive three-dimensional user interfaces is vital for interaction in virtual reality, allowing to effectively close the loop between a human user and the virtual environment. The utilization of 3D gestural input allows for useful hand interaction with virtual content by directly grasping visible objects, or through invisible gestural commands that are associated with corresponding features in the immersive 3D space. The design of such interfaces remains complex and challenging. In this article, we present a design approach for a three-dimensional user interface using 3D gestural input with the aim to facilitate user interaction within the context of Immersive Analytics. Based on a scenario of exploring time-oriented data in immersive virtual reality using 3D Radar Charts, we implemented a rich set of features that is closely aligned with relevant 3D interaction techniques, data analysis tasks, and aspects of hand posture comfort. We conducted an empirical evaluation (n=12), featuring a series of representative tasks to evaluate the developed user interface design prototype. The results, based on questionnaires, observations, and interviews, indicate good usability and an engaging user experience. We are able to reflect on the implemented hand-based grasping and gestural command techniques, identifying aspects for improvement in regard to hand detection and precision as well as emphasizing a prototype's ability to infer user intent for better prevention of unintentional gestures.}
}

@article{reski2026uci,
    author      = {Nico Reski and Carlo Navarra and Lotten Wiréhn and Tina-Simone Neset and Aris Alissandrakis and Aitor {Aldama Campino} and Fuxing Wang and Isabel Ribeiro and Jorge H. Amorim and Andreas Kerren and Katerina Vrotsou.},
    title       = {{Urban Climate InteracTable: towards an immersive contextual data analysis platform to visualize and explore urban heat}},
    journal     = {Virtual Reality},
    pages       = {7:1--24},
    volume      = {30},
    number      = {},
    publisher   = {{Springer Nature}},
    year        = {2026},
    month       = {},
    language    = {english},
    url         = {https://doi.org/10.1007/s10055-025-01264-4},
    doi         = {10.1007/s10055-025-01264-4},
    diva        = {https://urn.kb.se/resolve?urn=urn:nbn:se:liu:diva-219922},
    issn        = {},
    keywords    = {Immersive analytics, Urban analytics, Urban heat, Climate adaptation, Climate modelling, Visualization, Design study},
    abstract    = {Extreme weather events, such as heat waves, are occurring more frequently and intensively, imposing new climate-adaptation demands on municipal planning. We conducted a design study across the domains of urban planning and urban climate research, and identified challenges regarding a lack of heat-related information in current planning processes, and the high complexity of effective climate data representation. To address these challenges, and so enhance the information flow between these domains, we developed Urban Climate InteracTable, an immersive interface that supports exploratory analysis of spatio-temporal climate simulation data integrated with an urban environment representation. We describe several use cases in which this interface can be utilized to assist with planning-related decision processes and to communicate heat-related phenomena. We present the feedback obtained from our collaborating domain experts and relevant external experts, and reflect on our experiences throughout the design study. From this, we offer insights for future research.}
}

@inproceedings{yousefi2016tdg,
	author		= {Shahrouz Yousefi and Mhretab Kidane and Yeray Delgado and Julio Chana and Nico Reski},
	title		= {{{3D} Gesture-based Interaction for Immersive Experience in {Mobile VR}}},
	booktitle	= {The 23rd International Conference on Pattern Recognition (ICPR 2016)},
	pages		= {2122--2127},
	address		= {Cancun, Mexico},
	publisher	= {Institute of Electrical and Electronics Engineers (IEEE)},
	date		= {2016-12-04/2016-12-08},
	language	= {english},
	url			= {https://doi.org/10.1109/ICPR.2016.7899949},
	doi			= {10.1109/ICPR.2016.7899949}, 
	diva 		= {http://urn.kb.se/resolve?urn=urn:nbn:se:lnu:diva-61447},
	isbn		= {978-1-5090-4847-2},
	abstract	= {In this paper we introduce a novel solution for real-time 3D hand gesture analysis using the embedded 2D camera of a mobile device. The presented framework is based on forming a large database of hand gestures including the ground truth information of hand poses and details of finger joints in 3D. For a query frame captured by the mobile device’s camera in real time, the gesture analysis system finds the best match from the database. Once the best match is found, the corresponding ground truth information will be used for interaction in the designed interface. The presented framework performs an extremely efficient gesture analysis (more than 30 fps) in flexible lighting condition and complex background with dynamic movement of the mobile device. The introduced work is implemented in Android and tested in Gear VR headset.}
}