WebXR Implementation (VR/AR in Browser) on Website
WebXR Device API allows you to run VR and AR experiences directly in the browser without installing apps. Users open a page in Chrome on Android or Safari on iPhone and enter augmented reality. On desktop with a VR headset (Meta Quest Browser, Valve Index + SteamVR) — virtual reality.
Browser Support
AR (immersive-ar): Chrome Android 90+, Samsung Internet 14+. iOS/Safari — via WebXR Viewer or native AR Quick Look (USDZ files).
VR (immersive-vr): Chrome Android with Cardboard headset, Meta Quest Browser, Firefox Reality, Valve Index via SteamVR.
Inline (3D in page without headset): all browsers with WebGL.
Support check:
const isARSupported = await navigator.xr?.isSessionSupported('immersive-ar')
const isVRSupported = await navigator.xr?.isSessionSupported('immersive-vr')
Three.js + WebXR
Three.js has built-in WebXR support:
npm install three @types/three
import * as THREE from 'three'
import { ARButton } from 'three/examples/jsm/webxr/ARButton'
import { VRButton } from 'three/examples/jsm/webxr/VRButton'
import { XRControllerModelFactory } from 'three/examples/jsm/webxr/XRControllerModelFactory'
function WebXRScene({ mode }: { mode: 'ar' | 'vr' }) {
const mountRef = useRef<HTMLDivElement>(null)
useEffect(() => {
const container = mountRef.current!
const renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true })
renderer.setPixelRatio(window.devicePixelRatio)
renderer.setSize(container.clientWidth, container.clientHeight)
renderer.xr.enabled = true // Enable WebXR
container.appendChild(renderer.domElement)
const scene = new THREE.Scene()
const camera = new THREE.PerspectiveCamera(70, container.clientWidth / container.clientHeight, 0.01, 100)
// Lighting
scene.add(new THREE.AmbientLight(0xffffff, 1))
const dirLight = new THREE.DirectionalLight(0xffffff, 2)
dirLight.position.set(0, 5, 3)
scene.add(dirLight)
// AR/VR Button
const button = mode === 'ar'
? ARButton.createButton(renderer, {
requiredFeatures: ['hit-test'], // Surface detection
optionalFeatures: ['dom-overlay'], // UI over AR
domOverlay: { root: container },
})
: VRButton.createButton(renderer)
document.body.appendChild(button)
// VR Controllers
if (mode === 'vr') {
const controllerModelFactory = new XRControllerModelFactory()
for (let i = 0; i < 2; i++) {
const controller = renderer.xr.getController(i)
controller.addEventListener('selectstart', onSelectStart)
controller.addEventListener('selectend', onSelectEnd)
scene.add(controller)
const controllerGrip = renderer.xr.getControllerGrip(i)
controllerGrip.add(controllerModelFactory.createControllerModel(controllerGrip))
scene.add(controllerGrip)
}
}
// Hit testing for AR (placing objects on surfaces)
let hitTestSource: XRHitTestSource | null = null
let hitTestSourceRequested = false
const reticle = createReticle()
scene.add(reticle)
renderer.xr.addEventListener('sessionstart', async () => {
if (mode !== 'ar') return
const session = renderer.xr.getSession()!
const viewerSpace = await session.requestReferenceSpace('viewer')
hitTestSource = await session.requestHitTestSource!({ space: viewerSpace })!
})
renderer.setAnimationLoop((timestamp, frame) => {
if (mode === 'ar' && frame) {
// Hit test — find surface under camera
const referenceSpace = renderer.xr.getReferenceSpace()!
const hitTestResults = frame.getHitTestResults(hitTestSource!)
if (hitTestResults.length > 0) {
const hit = hitTestResults[0]
const pose = hit.getPose(referenceSpace)
if (pose) {
reticle.visible = true
reticle.matrix.fromArray(pose.transform.matrix)
}
} else {
reticle.visible = false
}
}
renderer.render(scene, camera)
})
function onSelectStart(event: THREE.Event) {
// On controller press in AR — place object on surface
if (reticle.visible) {
const geometry = new THREE.BoxGeometry(0.1, 0.1, 0.1)
const material = new THREE.MeshStandardMaterial({ color: 0x2563eb })
const mesh = new THREE.Mesh(geometry, material)
mesh.position.setFromMatrixPosition(reticle.matrix)
mesh.quaternion.setFromRotationMatrix(reticle.matrix)
scene.add(mesh)
}
}
return () => {
renderer.setAnimationLoop(null)
renderer.dispose()
button.remove()
container.removeChild(renderer.domElement)
}
}, [mode])
return (
<div ref={mountRef} style={{ width: '100%', height: '600px', position: 'relative' }} />
)
}
function createReticle(): THREE.Mesh {
const geometry = new THREE.RingGeometry(0.05, 0.07, 32).rotateX(-Math.PI / 2)
const material = new THREE.MeshBasicMaterial({ color: 0xffffff, side: THREE.DoubleSide })
const reticle = new THREE.Mesh(geometry, material)
reticle.matrixAutoUpdate = false
reticle.visible = false
return reticle
}
A-Frame: Declarative WebXR
For simpler VR scenes without deep customization:
npm install aframe
<!-- Full-featured VR scene in HTML -->
<a-scene>
<a-sky color="#1a1a2e"></a-sky>
<!-- Environment -->
<a-plane position="0 0 0" rotation="-90 0 0" width="20" height="20" color="#0a0a1a"></a-plane>
<!-- Interactive object -->
<a-box
position="-1 1 -3"
rotation="0 45 0"
color="#2563eb"
animation="property: rotation; to: 0 405 0; loop: true; dur: 4000; easing: linear"
event-set__mouseenter="color: #60a5fa"
event-set__mouseleave="color: #2563eb"
cursor-listener
></a-box>
<!-- Camera with cursor for headsets without controllers -->
<a-camera>
<a-cursor
animation__click="property: scale; startEvents: click; from: 0.1 0.1 0.1; to: 1 1 1; dur: 150"
></a-cursor>
</a-camera>
</a-scene>
iOS: AR Quick Look
Safari iOS doesn't support WebXR AR, but supports AR Quick Look via USDZ files:
<!-- Native AR on iOS via USDZ -->
<a
href="/models/product.usdz"
rel="ar"
id="ar-link"
>
<img src="/models/product-preview.jpg" alt="View in AR" />
<span>View in Your Space</span>
</a>
// Detect platform and show appropriate button
const isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent)
const isAndroid = /Android/.test(navigator.userAgent)
if (isIOS) {
// Show USDZ link
document.getElementById('ar-link')!.style.display = 'block'
} else if (isAndroid && await navigator.xr?.isSessionSupported('immersive-ar')) {
// WebXR AR for Android Chrome
document.getElementById('ar-button')!.style.display = 'block'
}
3D Model Conversion
For AR Quick Look you need USDZ (iOS), for WebXR — GLTF. Convert via Blender or CLI:
# GLTF → USDZ via Apple Reality Converter (macOS) or online services
# GLB → USDZ via usd-from-gltf (npm)
npm install -g usd-from-gltf
gltf-to-usd model.glb model.usdz
What We Do
Evaluate target devices (Android WebXR, iOS USDZ, VR headsets). Implement AR product view with hit-testing — user sees product in their interior. Prepare USDZ files for iOS in parallel. Test on real devices.
Timeline: AR product view (Android + iOS) — 5–7 days. Interactive VR scene with controllers — 8–12 days.







