2020
|
Croci, Simone; Ozcinar, Cagri; Zerman, Emin; Knorr, Sebastian; Cabrera, Julian; Smolic, Aljosa Visual Attention-Aware Quality Estimation Framework for Omnidirectional Video using Spherical Voronoi Diagram Journal Article In: Springer Quality and User Experience, 2020. @article{Croci2020b,
title = {Visual Attention-Aware Quality Estimation Framework for Omnidirectional Video using Spherical Voronoi Diagram},
author = {Simone Croci and Cagri Ozcinar and Emin Zerman and Sebastian Knorr and Julian Cabrera and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/research/voronoi-based-objective-metrics/
https://v-sense.scss.tcd.ie:443/wp-content/uploads/2020/04/Croci2020b.pdf},
doi = {10.1007/s41233-020-00032-3},
year = {2020},
date = {2020-03-01},
journal = {Springer Quality and User Experience},
abstract = {Omnidirectional video (ODV) enables viewers to look at every direction from a fixed point and provides a much more immersive experience than traditional 2D video. Assessing the video quality is important for delivering ODV to the end-user with the best possible quality. For this goal, two aspects of ODV should be considered. The first is the spherical nature of ODV and the related projection distortions when the ODV is stored in a planar format. The second is the interactive look-around consumption nature of ODV. Related to this aspect, visual attention, that identifies the regions that attract the viewer's attention, is important for ODV quality assessment. Considering these aspects, in this paper, we study in particular objective full-reference quality assessment for ODV. To this end, we propose a quality assessment framework based on the Spherical Voronoi diagram and visual attention. In this framework, a given ODV is subdivided into multiple planar patches with low projection distortions using the spherical Voronoi diagram. Afterwards, each planar patch is analyzed separately by a quality metric for traditional 2D video, obtaining a quality score for each patch. Then, the patch scores are combined based on visual attention into a final quality score. To validate the proposed framework, we create a dataset of ODVs with scaling and compression distortions, and conduct subjective experiments in order to gather the subjective quality scores and the visual attention data for our ODV dataset. The evaluation of the proposed framework based on our dataset shows that both the use of the spherical Voronoi diagram and visual attention are crucial for achieving state-of-the-art performance.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Omnidirectional video (ODV) enables viewers to look at every direction from a fixed point and provides a much more immersive experience than traditional 2D video. Assessing the video quality is important for delivering ODV to the end-user with the best possible quality. For this goal, two aspects of ODV should be considered. The first is the spherical nature of ODV and the related projection distortions when the ODV is stored in a planar format. The second is the interactive look-around consumption nature of ODV. Related to this aspect, visual attention, that identifies the regions that attract the viewer's attention, is important for ODV quality assessment. Considering these aspects, in this paper, we study in particular objective full-reference quality assessment for ODV. To this end, we propose a quality assessment framework based on the Spherical Voronoi diagram and visual attention. In this framework, a given ODV is subdivided into multiple planar patches with low projection distortions using the spherical Voronoi diagram. Afterwards, each planar patch is analyzed separately by a quality metric for traditional 2D video, obtaining a quality score for each patch. Then, the patch scores are combined based on visual attention into a final quality score. To validate the proposed framework, we create a dataset of ODVs with scaling and compression distortions, and conduct subjective experiments in order to gather the subjective quality scores and the visual attention data for our ODV dataset. The evaluation of the proposed framework based on our dataset shows that both the use of the spherical Voronoi diagram and visual attention are crucial for achieving state-of-the-art performance. |
2019
|
Croci, Simone; Ozcinar, Cagri; Zerman, Emin; Cabrera, Julian; Smolic, Aljosa Voronoi-based Objective Quality Metrics for Omnidirectional Video Inproceedings In: 11th International Conference on Quality of Multimedia Experience (QoMEX 2019), 2019. @inproceedings{Croci2019,
title = {Voronoi-based Objective Quality Metrics for Omnidirectional Video},
author = {Simone Croci and Cagri Ozcinar and Emin Zerman and Julian Cabrera and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/research/voronoi-based-objective-metrics/
https://v-sense.scss.tcd.ie:443/wp-content/uploads/2019/03/QoMEX2019.pdf},
year = {2019},
date = {2019-06-06},
booktitle = {11th International Conference on Quality of Multimedia Experience (QoMEX 2019)},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
|
2018
|
Croci, Simone; Knorr, Sebastian; Smolic, Aljosa Sharpness Mismatch Detection in Stereoscopic Content with 360-Degree Capability Inproceedings In: IEEE International Conference on Image Processing (ICIP 2018), 2018. @inproceedings{croci2018a,
title = {Sharpness Mismatch Detection in Stereoscopic Content with 360-Degree Capability},
author = {Simone Croci and Sebastian Knorr and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/wp-content/uploads/2018/08/ICIP_2018.pdf},
year = {2018},
date = {2018-10-07},
booktitle = {IEEE International Conference on Image Processing (ICIP 2018)},
abstract = {This paper presents a novel sharpness mismatch detection method for stereoscopic images based on the comparison of edge width histograms of the left and right view. The new method is evaluated on the LIVE 3D Phase II and Ningbo 3D Phase I datasets and compared with two state-of-the-art methods. Experimental results show that the new method highly correlates with user scores of subjective tests and that it outperforms the current state-of-the-art. We then extend the method to stereoscopic omnidirectional images by partitioning the images into patches using a spherical Voronoi diagram. Furthermore, we integrate visual attention data into the detection process in order to weight sharpness mismatch according to the likelihood of its appearance in the viewport of the end-user's virtual reality device. For obtaining visual attention data, we performed a subjective experiment with 17 test subjects and 96 stereoscopic omnidirectional images. The entire dataset including the viewport trajectory data and resulting visual attention maps are publicly available with this paper.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
This paper presents a novel sharpness mismatch detection method for stereoscopic images based on the comparison of edge width histograms of the left and right view. The new method is evaluated on the LIVE 3D Phase II and Ningbo 3D Phase I datasets and compared with two state-of-the-art methods. Experimental results show that the new method highly correlates with user scores of subjective tests and that it outperforms the current state-of-the-art. We then extend the method to stereoscopic omnidirectional images by partitioning the images into patches using a spherical Voronoi diagram. Furthermore, we integrate visual attention data into the detection process in order to weight sharpness mismatch according to the likelihood of its appearance in the viewport of the end-user's virtual reality device. For obtaining visual attention data, we performed a subjective experiment with 17 test subjects and 96 stereoscopic omnidirectional images. The entire dataset including the viewport trajectory data and resulting visual attention maps are publicly available with this paper. |
O’Dwyer, Néill; Johnson, Nicholas Virtual Play: Beckettian Experiments in Virtual Reality Journal Article In: Contemporary Theatre review, vol. 28.1, 2018. @article{BeckettianExperimentsinVirtualReality,
title = {Virtual Play: Beckettian Experiments in Virtual Reality},
author = {Néill O’Dwyer and Nicholas Johnson},
url = {https://www.contemporarytheatrereview.org/2018/beckettian-experiments-in-virtual-reality/
},
year = {2018},
date = {2018-02-21},
journal = {Contemporary Theatre review},
volume = {28.1},
abstract = {The past ten years have seen extensive experimentation with Beckett and new technological media at Trinity College Dublin. Research projects have included the stage adaptation and installation of a teleplay (Ghost Trio, 2007), the HD digital video exploration of two teleplays (Abstract Machines, 2010, including new versions of …but the clouds… and Nacht und Träume), and numerous smaller projects involving audio and video within the remit of “fundamental research” at the Samuel Beckett Laboratory (2013–present). The most recent project, Virtual Play, explores Beckett’s Play (1963) within FVV (free-viewpoint video), a form of user-centred VR (virtual reality). This project, reflecting interdisciplinary and cross-faculty collaboration between the V-SENSE project (within the School of Computer Science and Statistics) and the School of Creative Arts, has made high-impact contributions in both FVV research and Beckett Studies, and has now been recognised at European level, receiving first prize at the 2017 New European Media Awards.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The past ten years have seen extensive experimentation with Beckett and new technological media at Trinity College Dublin. Research projects have included the stage adaptation and installation of a teleplay (Ghost Trio, 2007), the HD digital video exploration of two teleplays (Abstract Machines, 2010, including new versions of …but the clouds… and Nacht und Träume), and numerous smaller projects involving audio and video within the remit of “fundamental research” at the Samuel Beckett Laboratory (2013–present). The most recent project, Virtual Play, explores Beckett’s Play (1963) within FVV (free-viewpoint video), a form of user-centred VR (virtual reality). This project, reflecting interdisciplinary and cross-faculty collaboration between the V-SENSE project (within the School of Computer Science and Statistics) and the School of Creative Arts, has made high-impact contributions in both FVV research and Beckett Studies, and has now been recognised at European level, receiving first prize at the 2017 New European Media Awards. |
2017
|
Croci, Simone; Knorr, Sebastian; Smolic, Aljosa Saliency-Based Sharpness Mismatch Detection For Stereoscopic Omnidirectional Images Inproceedings In: 14th European Conference on Visual Media Production, London, UK, 2017. @inproceedings{Croci2017a,
title = {Saliency-Based Sharpness Mismatch Detection For Stereoscopic Omnidirectional Images},
author = {Simone Croci and Sebastian Knorr and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/wp-content/uploads/2017/10/2017_CVMP_Saliency-Based-Sharpness-Mismatch-Detection-For-Stereoscopic-Omnidirectional-Images.pdf},
doi = {https://doi.org/10.1145/3150165.3150168},
year = {2017},
date = {2017-12-11},
booktitle = {14th European Conference on Visual Media Production},
address = {London, UK},
abstract = {In this paper, we present a novel sharpness mismatch detection (SMD) approach for stereoscopic omnidirectional images (ODI) for quality control within the post-production work ow, which is the main contribution. In particular, we applied a state of the art SMD approach, which was originally developed for traditional HD images, and extended it to stereoscopic ODIs. A new e cient method for patch extraction from ODIs was developed based on the spherical Voronoi diagram of equidistant points evenly distributed on the sphere. The subdivision of the ODI into patches allows an accurate detection and localization of regions with sharpness mismatch. A second contribution of the paper is the integration of saliency into our SMD approach. In this context, we introduce a novel method for the estimation of saliency maps from viewport data of head-mounted displays (HMD). Finally, we demonstrate the performance of our SMD approach with data collected from a subjective test with 17 participants.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we present a novel sharpness mismatch detection (SMD) approach for stereoscopic omnidirectional images (ODI) for quality control within the post-production work ow, which is the main contribution. In particular, we applied a state of the art SMD approach, which was originally developed for traditional HD images, and extended it to stereoscopic ODIs. A new e cient method for patch extraction from ODIs was developed based on the spherical Voronoi diagram of equidistant points evenly distributed on the sphere. The subdivision of the ODI into patches allows an accurate detection and localization of regions with sharpness mismatch. A second contribution of the paper is the integration of saliency into our SMD approach. In this context, we introduce a novel method for the estimation of saliency maps from viewport data of head-mounted displays (HMD). Finally, we demonstrate the performance of our SMD approach with data collected from a subjective test with 17 participants. |
Croci, Simone; Knorr, Sebastian; Goldmann, Lutz; Smolic, Aljosa A Framework for Quality Control in Cinematic VR Based on Voronoi Patches and Saliency Inproceedings In: International Conference on 3D Immersion, Brussels, Belgium, 2017. @inproceedings{Croci2017b,
title = {A Framework for Quality Control in Cinematic VR Based on Voronoi Patches and Saliency},
author = {Simone Croci and Sebastian Knorr and Lutz Goldmann and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/wp-content/uploads/2017/10/2017_IC3D_A-FRAMEWORK-FOR-QUALITY-CONTROL-IN-CINEMATIC-VR-BASED-ON-VORONOI-PATCHES-AND-SALIENCY.pdf},
year = {2017},
date = {2017-12-11},
booktitle = {International Conference on 3D Immersion},
address = {Brussels, Belgium},
abstract = {In this paper, we present a novel framework for quality control in cinematic VR (360-video) based on Voronoi patches and saliency which can be used in post-production workflows. Our approach first extracts patches in stereoscopic omnidirectional images (ODI) using the spherical Voronoi diagram. The subdivision of the ODI into patches allows an accurate detection and localization of regions with artifacts. Further, we introduce saliency in order to weight detected artifacts according to the visual attention of end-users. Then, we propose different artifact detection and analysis methods for sharpness mismatch detection (SMD), color mismatch detection (CMD) and disparity distribution analysis. In particular, we took two state of the art approaches for SMD and CMD, which were originally developed for conventional planar images, and extended them to stereoscopic ODIs. Finally, we evaluated the performance of our framework with a dataset of 18 ODIs for which saliency maps were obtained from a subjective test with 17 participants.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
In this paper, we present a novel framework for quality control in cinematic VR (360-video) based on Voronoi patches and saliency which can be used in post-production workflows. Our approach first extracts patches in stereoscopic omnidirectional images (ODI) using the spherical Voronoi diagram. The subdivision of the ODI into patches allows an accurate detection and localization of regions with artifacts. Further, we introduce saliency in order to weight detected artifacts according to the visual attention of end-users. Then, we propose different artifact detection and analysis methods for sharpness mismatch detection (SMD), color mismatch detection (CMD) and disparity distribution analysis. In particular, we took two state of the art approaches for SMD and CMD, which were originally developed for conventional planar images, and extended them to stereoscopic ODIs. Finally, we evaluated the performance of our framework with a dataset of 18 ODIs for which saliency maps were obtained from a subjective test with 17 participants. |
Knorr, Sebastian; Croci, Simone; Smolic, Aljosa A Modular Scheme for Artifact Detection in Stereoscopic Omni-Directional Images Inproceedings In: Irish Machine Vision and Image Processing Conference, Maynooth, Ireland, 2017. @inproceedings{Knorr2017,
title = {A Modular Scheme for Artifact Detection in Stereoscopic Omni-Directional Images},
author = { Sebastian Knorr and Simone Croci and Aljosa Smolic},
url = {https://v-sense.scss.tcd.ie:443/wp-content/uploads/2017/07/imvip2017_knorr_final.pdf
},
year = {2017},
date = {2017-08-30},
booktitle = {Irish Machine Vision and Image Processing Conference},
address = {Maynooth, Ireland},
abstract = {With the release of new head-mounted displays (HMDs) and new omni-directional capture systems, 360-degree video is one of the latest and most powerful trends in immersive media, with an increasing potential for the next decades. However, especially creating 360-degree content in 3D is still an error-prone task with many limitations to overcome. This paper describes the critical aspects of 3D content creation for 360-degree video. In particular, conflicts of depth cues and binocular rivalry are reviewed in detail, as these cause eye fatigue, headache, and even nausea. Both the reasons for the appearance of the conflicts and how to detect some of these conflicts by objective image analysis methods are detailed in this paper. The latter is the main contribution of this paper and part of long-term research roadmap of the authors in order to provide a comprehensive framework for artifact detection and correction in 360-degree videos. Then, experimental results are demonstrating the performance of the proposed approaches in terms of objective measures and visual feedback. Finally, the paper concludes with a discussion and future work.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
With the release of new head-mounted displays (HMDs) and new omni-directional capture systems, 360-degree video is one of the latest and most powerful trends in immersive media, with an increasing potential for the next decades. However, especially creating 360-degree content in 3D is still an error-prone task with many limitations to overcome. This paper describes the critical aspects of 3D content creation for 360-degree video. In particular, conflicts of depth cues and binocular rivalry are reviewed in detail, as these cause eye fatigue, headache, and even nausea. Both the reasons for the appearance of the conflicts and how to detect some of these conflicts by objective image analysis methods are detailed in this paper. The latter is the main contribution of this paper and part of long-term research roadmap of the authors in order to provide a comprehensive framework for artifact detection and correction in 360-degree videos. Then, experimental results are demonstrating the performance of the proposed approaches in terms of objective measures and visual feedback. Finally, the paper concludes with a discussion and future work. |