ArticlePDF Available

Virtual Reality und Augmented Reality in der Bildung

Authors:
!"#$%&'('()&(*)!+('",)-.)/01234)5&"6789):'89&6;)7(<)=7>?'(6'<):'89&6;)&()<'")@&9<7(>.)
A(?8B-8>8C&()1DE0123.)F.)21B20.)GFFH*)0I21BJ0II.)%66K#*EELLL.A(?B
87#6"&8.86EA&9'8<?&(E7#'"M7K9N8<E<N$7?'(6#E-8>8C&(E0123B1D.K<A)
)
Virtual(Reality(und(Augmented(Reality(in(der(Bildung(
-8"6&()!+('")
)
G()<&'#'?)O'A6)%8+'()L&")'&('()#'%")&((NP86&P'()F$%L'"K7(Q6)>'LR%96,)<'")LN%9)'"#6)&()
<'()(R$%#6'()S8%"'()686#R$%9&$%)T&"Q7(>)C'&>'()L&"<.)U?>'Q'%"6)#$%"'&6'6)<&')
6'$%(N9N>&#$%')!(6L&$Q97(>)7(>987+9&$%)#$%('99)PN"8(.)
FN)%8+'()#&$%)#'&6)<'()=(AR(>'()<'")5&"6789B:'89&6;BO'8<#'6#)&()<'()9'6C6'()S8%"'()
'"%'+9&$%')5'"+'##'"7(>'()&()<'")O'8<B-N7(6'<BV&#K98;BW'$%(N9N>&')'">'+'(.)-&6)<'")
!(6L&$Q97(>)PN()5:BO'8<#'6#)7(<)<'")=(QX(<&>7(>)+CL.)U?#'6C7(>)PN()Y"N<7Q6'()
L&')Z$797#):&A6,)OW[)5&P',)F8?#7(>)\'8")5:)'6$.)+"&(>'()&??'")?'%")O'"#6'99'")
Y"N<7Q6')87A)<'()-8"Q6,)7?)5&"6789):'89&6;)<'")]AA'(69&$%Q'&6)C7>R(>9&$%)C7)?8$%'(.)=9#)
\NN>9')\NN>9')^[8"<+N8"<^)'&(AX%"6',)'""'&$%6)5:)686#R$%9&$%)<'()-8##'(?8"Q6,)<'(()
(7()Q8(()'&()5&"6789B:'89&6;BO'8<#'6)87#)'&('?)_8"6N()&()_N?+&(86&N()?&6)'&('?)
-N+&96'9'AN()%'">'#6'996)L'"<'(.)FN)+&'6'6)<&')5'"+&(<7(>)PN()-8Q'")!<7$86&N(,)5:)7(<)
'&('?)?N+&9'()\'"R6)'&(')>"N`8"6&>')-a>9&$%Q'&6,)_N(C'K6')<'#)<&>&689'()#K&'9'"&#$%'()
b'"('(#)C7)(76C'(,)7?)'&(')&??'"#&P')b'"('"A8%"7(>)/&??'"#&P')b'8"(&(>4)C7)#$%8AA'(.)
V'")!&(#86C)PN()5:BW'$%(&Q'()C7?)b'"('(,)C7")=7#+&9<7(>)N<'")T'&6'"+&9<7(>)L&"<)
89#N)'&()&(6'"'##8(6'"'#)cN"#$%7(>#A'9<)?&6)%N%'?)YN6'(C&89.)
Definition(von(VR((
)
5&"6789):'89&6;)/5:4)#'9+#6)Q8(()89#)$N?K76'">'('"&'"6')G997#&N()'&('")"'89'()T'96)
8(>'#'%'()L'"<'(,)LR%"'(<)=7>?'(6'<):'896&;)/=:4)?'%")89#)'&(')!">R(C7(>)<'")
"'89'()T'96)<7"$%)$N?K76'">'('"&'"6')d+'"98>'"7(>'()C7)#'%'()&#6.)V&')K'"A'Q6')5&"6789)
:'89&6;)6R7#$%6)<&')?'(#$%9&$%'()F&((',)#N<8##)#&')PN()<'")"'89'()T'96)(&$%6)C7)
7(6'"#$%'&<'()#&(<.)V8#)e&'9,)'&(')#N9$%')K'"A'Q6')T'96)C7)#$%8AA'(,)&#6)f'<N$%)'&()
76NK&#$%'")T7(#$%)/F68(QNP&g,)012J4.)V'")>'#7(<')-'(#$%'(P'"#68(<)"'<7C&'"6)5:)87A)
QNKA?N(6&'"6')V&#K98;#)L&')Z$797#):&A6)7(<EN<'")C.)@.)V86'(%8(<#$%7%'.)5:)Q8(()8+'")
87$%)<7"$%)Y"Nf'Q6N"'()&()_N?+&(86&N()?&6)Y'"#N(89)[N?K76'"()/Y[#4)(8?'(#)[=5!)
/["7CBH'&"8,)F8(<&(,)h)V'c8(6&,)2iiD4)'""'&$%6)L'"<'(.)V8"X+'"%&(87#)#&(<)899')="6'()
PN()[N?K76'"#K&'9'()&()<'")b8>',)'&()5:B!"9'+(&#)C7)#$%8AA'(.)V'")U(6'"#$%&'<)PN()5:)
C7)'&('?)DVBc&9?)+'#6'%6)<8"&(,)<8##)?8()?&6)<'")'"#6'996'()T'96)&(6'"8>&'"'(,)<'()
e7#68(<)<'")T'96)P'"R(<'"()7(<)'&()c''<+8$Q)'"%896'()Q8(()/F68(QNP&g,)012J4.)V7"$%)
<&')-a>9&$%Q'&6)<'")G(6'"8Q6&N()Q8(()?8()&()<&')P&"67'99')T'96)'&(687$%'(.)b876)@7"<'8)
7(<)[N&AA'6)/011D4)#&(<)G(6'"8Q6&N()7(<)G??'"#&N()CL'&)PN()<"'&)F$%9X##'9?'"Q?89'()
PN()5:.)V8#)<"&66')-'"Q?89)&#6)<&')Y%8(68#&'.)5&"6789):'89&6;)L&"<)NA6)P'"L'(<'6,)7?)
"'89')Y"NC'##')C7)#&?79&'"'(.)V&')!&(#$%"R(Q7(>)<'")Y8"8?'6'")<'")F&?7986&N(,)7?)<8#)
=7#?8`)'&('#)"'89'()Y"NC'##'#)PN99#6R(<&>)8+C7+&9<'(,)N%(')<&')F&?7986&N()C7)
7(6'"+"'$%'(,)9&'>6)NA6)&()<'()OR(<'()<'")5:B!(6L&$Q9'"E&(('()7(<)&%"'")Y%8(68#&'.)
!&(')L'&6'"')L&$%6&>')V'A&(&6&N()&?)O&(+9&$Q)87A)5&"6789):'89&6;)#&(<)5&"6789)
!(P&"N(?'(6#)/5!4.)^5!#)P'"?&66'9()<&')G997#&N()'&('")Y"R#'(C)8()'&('?)8(<'"'()Z"6)89#)
<'")8Q67'99'()K%;#&#$%'()U?>'+7(>^)/F68(QNP&g,)012J4.)-8()#&'%6)<8#)c'9<)#'9+#6)&#6)
'&>'(69&$%)#'%")+"'&6.))
)
!"#$%&'('()&(*)!+('",)-.)/01234)5&"6789):'89&6;)7(<)=7>?'(6'<):'89&6;)&()<'")@&9<7(>.)
A(?8B-8>8C&()1DE0123.)F.)21B20.)GFFH*)0I21BJ0II.)%66K#*EELLL.A(?B
87#6"&8.86EA&9'8<?&(E7#'"M7K9N8<E<N$7?'(6#E-8>8C&(E0123B1D.K<A)
)
Entwicklung(
)
H8$%<'?)Y89?'")b7$Q';)8?)2.)=7>7#6)0120)'&(')'"AN9>"'&$%')_&$Q#68"6'"B_8?K8>(')AX")
#'&()5:BO'8<#'6)Z$797#):&A6)>'#68"6'6)%866',)#K"8(>'()>"N`')U(6'"('%?'()L&'<'")87A)
<'()5:Be7>)87A.)!")C'&>6')<'")T'96,)<8##)'#)?a>9&$%)&#6,)'&()O&>%B!(<)5:BO'8<#'6)C7)
'(6L&$Q'9(,)AX")'&('()Y"'&#)7(6'")D11)VN998".))
G?)S8%")012I)AX%"6')\NN>9')\NN>9')[8"<+N8"<)87A)#'&('")\NN>9')GEZB_N(A'"'(C)'&(.)!#)
QN?+&(&'"6)'&()-N+&96'9'AN()7(<)'&(')_8"6N(QN(#6"7Q6&N()C7)'&('?)O'8<)-N7(6'<)
V&#K98;)/OV-4.)-&6)<'")%N%'()5'"AX>+8"Q'&6)PN()-N+&96'9'AN('()&()<'")]AA'(69&$%Q'&6)&#6)
<8#)!&(687$%'()&()'&(')P&"67'99')T'96)AX")f'<'()-'(#$%'()899'&()<7"$%)<'()@'#&6C)'&('#)
-N+&96'9'AN(#)?a>9&$%.)-8()Q8(()#8>'(,)<8?&6)L&"<)5:)7(<)=:)>"7(<#R6C9&$%)
?8##'(687>9&$%,)N%(')<8##)?8()#&$%)'&(')V86'(+"&99')#'9+#6)Q87A'()?7##.)
)
V&')=(L'(<7(>'()87A)<'()@"&99'()#&(<)(86X"9&$%)P&'9AR96&>.)W;K&#$%'"L'&#')&#6)<'")
FK&'9'#'Q6N")#68"Q)P'"6"'6'(,)8+'")87$%)<'")@&9<7(>##'Q6N")C'&>6)C7('%?'(<)?'%")7(<)
?'%")=(L'(<7(>#?a>9&$%Q'&6'()/F6'"(&>)'6)89,)012j4.)FN)Q8(()?8()C.)@.)<&')
c7(Q6&N(#L'&#'()PN()-8#$%&('()#&?79&'"'()/FK&6C'")h)!+('",)012k4)N<'")"'?N6')
!lK'"6E&(('()AX")<&')ba#7(>)PN()Y"N+9'?'()%&(C7AX>'()/FK&6C'")'6)89,)01234.)_7"C7?)'#)
#&(<)%&'")Q87?)\"'(C'()>'#'6C6)7(<)L&")<X"A'()7(#)87A)#K8(('(<')!(6L&$Q97(>'()
A"'7'(.)
Literatur(
)
@7"<'8,)\.,)h)[N&AA'6,)Y.)/011D4.)5&"6789)"'89&6;)6'$%(N9N>;.)=$8<'?&$)F'8"$%)[N?K9'6'.)
T&9';.)
)
["7CBH'&"8,)[.,)F8(<&(,)V.)S.,)h)V'c8(6&,)W.)=.)/2iiD4.)F7""N7(<B#$"''()K"Nf'$6&N(B+8#'<)
P&"6789)"'89&6;*)6%')<'#&>()8(<)&?K9'?'(686&N()NA)6%')[=5!.)G()-.)[.)T%&66N()/!<.4,)
FG\\:=YO)miD)Y"N$''<&(>#)NA)6%')016%)8((789)$N(A'"'($')N()[N?K76'")>"8K%&$#)8(<)
&(6'"8$6&P')6'$%(&n7'#)/KK.)2DJo2I04.)H'L)pN"Q*)=[-.))
)
FK&6C'",)-.)h)!+('",)-.)/012k4.)U#')[8#'#)8(<)="$%&6'$67"')NA)8()G(AN"?86&N()#;#6'?)6N)
&(6'>"86')#?8"6)>98##'#)&()'<7$86&N(89)'(P&"N(?'(6#.)G()Y"N$''<&(>#)NA)!<-'<&8*)TN"9<)
[N(A'"'($')N()!<7$86&N(89)-'<&8)8(<)W'$%(N9N>;)012k)/KK.)J2BJ34.)=##N$&86&N()AN")6%')
=<P8($'?'(6)NA)[N?K76&(>)&()!<7$86&N()/==[!4.)
)
FK&6C'",)-.,)H8(&$,)G.,)!+('",)-.)/01234)V&#68($')b'8"(&(>)8(<)=##&#68($')U#&(>)F?8"6)
\98##'#.)!<7$.)F$&.)0123,)3,)02)
)
Stanković,+S.+(2015).+5&"6789):'89&6;)8(<)5&"6789)!(P&"N(?'(6#)&()21)b'$67"'#.)G()=.)[.)
@NP&Q)/!<.4,)F;(6%'#&#)b'$67"'#)N()G?8>',)5&<'N,)8(<)-796&?'<&8)Y"N$'##&(>)/5N9.)3,)KK.)
2o2ij4.)-N">8()h)[98;KNN9)Y7+9&#%'"#.))
)
F6'"(&>,)[.,)FK&6C'",)-.,)h)!+('",)-.)/012j4.)b'8"(&(>)&()8)5&"6789)!(P&"N(?'(6*)
G?K9'?'(686&N()8(<)!P89786&N()NA)8)5:)-86%B\8?'.)G()\.)_7"7+8$8Q,)h)O.)=96&(K7997Q)
/!<#.4,)-N+&9')W'$%(N9N>&'#)8(<)=7>?'(6'<):'89&6;)&()ZK'()!<7$86&N()/KK.)2jJB2ii4.)
O'"#%';,)Y=*)G\G)\9N+89.)<N&*21.I123Eij3B2BJ00JB0221BJ.$%11i)
... Against this background, engineering students must be trained and professionalized in the use of their anticipated virtual work environment [11]. However, even though its principles are often taught, the practical use of VR by students within the frame of university engineering education is relatively rare [12]. ...
Conference Paper
Full-text available
As many of the common mistakes made by engineering students are based on weak spatial imagination, the use of virtual reality could help to enhance those skills in undergraduate teaching. As part of a university lecture in product development that covers methods of product development and technical design, the use of virtual reality was integrated into an engineering exercise. Supplementing the usual technical tools like CAD, half of the 14 student groups were allowed to use a dedicated VR-application to review their machine designs in virtual reality. While the objective performance of those groups was not better in comparison to the “non-VR” groups, most of the student groups were able to identify design issues or mistakes through the use of VR, which they couldn’t find using only CAD. According to interviews, 69.7% of the VR-users reported that they were only able to experience and realize “the true spatial dimensions” through the use of VR.
Chapter
Full-text available
Soft skills training is considered important for employees to be successful at work. Several companies are offering immersive virtual soft skills training with head-mounted displays. The main contribution of this paper is to provide an overview of the research literature within the field of using immersive virtual soft skills learning and training of employees. The results of this preliminary scoping review show that there is a lack of research literature and empirical studies within this topic.
Book
Full-text available
The 7th International Conference of the Immersive Learning Research Network (iLRN 2021) is an innovative and interactive virtual gathering for a strengthening global network of researchers and practitioners collaborating to develop the scientific, technical, and applied potential of immersive learning. It is the premier scholarly event focusing on advances in the use of virtual reality (VR), augmented reality (AR), mixed reality (MR), and other extended reality (XR) technologies to support learners across the full span of learning—from K-12 through higher-education to workbased, informal, and lifelong learning contexts. Following the success of iLRN 2020, our first fully online and in-VR conference, this year’s conference was once again based on the iLRN Virtual Campus, powered by ©Virbela, but with a range of activities taking place on various other XR simulation, gaming, and other platforms. Scholars and professionals working from informal and formal education settings as well as those representing diverse industry sectors are invited to participate in the conference, where they may share their research findings, experiences, and insights; network and establish partnerships to envision and shape the future of XR and immersive technologies for learning; and contribute to the emerging scholarly knowledge base on how these technologies can be used to create experiences that educate, engage, and excite learners.
Chapter
Full-text available
With the introduction of Google Cardboard, a combination of mobile devices, Virtual Reality (VR) and making was created. This "marriage" opened a wide range of possible, cheap Virtual Reality applications, which can be created and used by everyone. In this chapter, the potential of combining making, gaming and education is demonstrated by evaluating an implemented math-game prototype in a school by pupils aged 12-13. The aim of the virtual reality game is to solve math exercises with increasing difficulty. The pupils were motivated and excited by immerging into the virtual world of the game to solve exercises and advance in the game. The results of the evaluation were very positive and showed the high motivational potential of combining making and game-based learning and its usage in schools as educational instrument.
Article
Full-text available
With the everyday growth of technology, new possibilities arise to support activities of everyday life. In education and training, more and more digital learning materials are emerging, but there is still room for improvement. This research study describes the implementation of a smart glasses app and infrastructure to support distance learning with WebRTC. The instructor is connected to the learner by a video streaming session and gets the live video stream from the learner’s smart glasses from the learner’s point of view. Additionally, the instructor can draw on the video to add context-aware information. The drawings are immediately sent to the learner to support him to solve a task. The prototype has been qualitatively evaluated by a test user who performed a fine-motor-skills task and a maintenance task under assistance of the remote instructor.
Chapter
Full-text available
With the introduction of Google Cardboard, a combination of mobile devices, Virtual Reality (VR) and making was created. This “marriage” opened a wide range of possible, cheap Virtual Reality applications, which can be created and used by everyone. In this chapter, the potential of combining making, gaming and education is demonstrated by evaluating an implemented math-game prototype in a school by pupils aged 12-13. The aim of the virtual reality game is to solve math exercises with increasing difficulty. The pupils were motivated and excited by immerging into the virtual world of the game to solve exercises and advance in the game. The results of the evaluation were very positive and showed the high motivational potential of combining making and game-based learning and its usage in schools as educational instrument.
Conference Paper
Full-text available
Wearable devices, such as smart glasses, are nowadays easily available on the market; therefore, these devices could be used to evaluate more and more use cases in educational domain. After a short introduction to smart glasses functionality, features and user interaction techniques, several use cases are defined and described. To integrate smart glasses into the educational domain, specialized information systems and infrastructure is necessary. A basic concept of a suitable information system is defined and explained by a sample use case. The main advantage of using smart glasses in educational domain is that users can interact with the device hands-free therefore (fine motor skills) tasks can be performed while receiving visual and vocal support simultaneously. Additionally the teacher/observer can evaluate the performance remotely. Wearable devices become better available and cheaper, but should only be used in suitable use cases where the learning experience could be improved.
Conference Paper
Full-text available
This paper describes the CAVE (CAVE Automatic Virtual Environment) virtual reality/scientific visualization system in detail and demonstrates that projection technology applied to virtual-reality goals achieves a system that matches the quality of workstation screens in terms of resolution, color, and flicker-free stereo. In addition, this format helps reduce the effect of common tracking and system latency errors. The off-axis perspective projection techniques we use are shown to be simple and straightforward. Our techniques for doing multi-screen stereo vision are enumerated, and design barriers, past and current, are described. Advantages and disadvantages of the projection paradigm are discussed, with an analysis of the effect of tracking noise and delay on the user. Successive refinement, a necessary tool for scientific visualization, is developed in the virtual reality context. The use of the CAVE as a one-to-many presentation device at SIGGRAPH '92 and Supercomputing '92 for computational science data is also mentioned.
Article
The book is based on the material originally developed for the course on Virtual Reality, which the author was teaching at Tampere University of Technology, as well as course on Virtual Environments that the author had prepared for the University for Advancing Studies at Tempe, Arizona. This original purpose has influenced the structure of this book as well as the depth to which we explore the presented concepts. Therefore, our intention in this book is to give an introduction into the important issues regarding a series of related concepts of Virtual Reality, Augmented Reality, and Virtual Environments. We do not attempt to go into any of these issues in depth but rather outline general principles and discuss them in a sense broad enough to provide sufficient foundations for a further study. In other words, we aim to provide a set of keywords to the reader in order give him a good starting point from which he could go on and explore any of these issues in detail.
Article
From the Publisher: This in-depth review of current virtual reality technology and its applications provides a detailed analysis of the engineering, scientific and functional aspects of virtual reality systems and the fundamentals of VR modeling and programming. It also contains an exhaustive list of present and future VR applications in a number of diverse fields. Virtual Reality Technology is the first book to include a full chapter on force and tactile feedback and to discuss newer interface tools such as 3-D probes and cyberscopes. Supplemented with 23 color plates and more than 200 drawings and tables which illustrate the concepts described.