Reordering virtual reality: Recording and recreating real-time experiences. Dolinsky, M., Sherman, W., Wernert, E., & Chi, Y., C. In Proceedings of SPIE - The International Society for Optical Engineering, volume 8289, 2012. SPIE.
Reordering virtual reality: Recording and recreating real-time experiences [link]Website  abstract   bibtex   
The proliferation of technological devices and artistic strategies has brought about an urgent and justifiable need to capture site-specific time-based virtual reality experiences. Interactive art experiences are specifically dependent on the orchestration of multiple sources including hardware, software, site-specific location, visitor inputs and 3D stereo and sensory interactions. Although a photograph or video may illustrate a particular component of the work, such as an illustration of the artwork or a sample of the sound, these only represent a fraction of the overall experience. This paper seeks to discuss documentation strategies that combine multiple approaches and capture the interactions between art projection, acting, stage design, sight movement, dialogue and audio design. © 2012 SPIE-IS&T.
@inproceedings{
 title = {Reordering virtual reality: Recording and recreating real-time experiences},
 type = {inproceedings},
 year = {2012},
 identifiers = {[object Object]},
 keywords = {3d stereos; Archive; Dissemination; Interactive a,Data storage equipment; Digital libraries; Informa,Virtual reality},
 volume = {8289},
 websites = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-84899055835&doi=10.1117%2F12.912053&partnerID=40&md5=d6833bd4ddac2771f76ab08e523ba9a6},
 publisher = {SPIE},
 city = {Burlingame, CA},
 id = {9ef3c13a-fa7d-3e1a-8946-8287b5b57f01},
 created = {2018-02-27T18:07:32.088Z},
 file_attached = {false},
 profile_id = {42d295c0-0737-38d6-8b43-508cab6ea85d},
 group_id = {27e0553c-8ec0-31bd-b42c-825b8a5a9ae8},
 last_modified = {2018-02-27T18:07:32.088Z},
 read = {false},
 starred = {false},
 authored = {false},
 confirmed = {true},
 hidden = {false},
 citation_key = {Dolinsky2012},
 source_type = {conference},
 notes = {cited By 3; Conference of The Engineering Reality of Virtual Reality 2012 ; Conference Date: 24 January 2012 Through 25 January 2012; Conference Code:104653},
 private_publication = {false},
 abstract = {The proliferation of technological devices and artistic strategies has brought about an urgent and justifiable need to capture site-specific time-based virtual reality experiences. Interactive art experiences are specifically dependent on the orchestration of multiple sources including hardware, software, site-specific location, visitor inputs and 3D stereo and sensory interactions. Although a photograph or video may illustrate a particular component of the work, such as an illustration of the artwork or a sample of the sound, these only represent a fraction of the overall experience. This paper seeks to discuss documentation strategies that combine multiple approaches and capture the interactions between art projection, acting, stage design, sight movement, dialogue and audio design. © 2012 SPIE-IS&T.},
 bibtype = {inproceedings},
 author = {Dolinsky, M and Sherman, W and Wernert, E and Chi, Y C},
 booktitle = {Proceedings of SPIE - The International Society for Optical Engineering}
}

Downloads: 0