* Copyright 2010-2024 Three.js Authors
* SPDX-License-Identifier: MIT
*/
-const REVISION = '168';
+const REVISION = '169';
const MOUSE = { LEFT: 0, MIDDLE: 1, RIGHT: 2, ROTATE: 0, DOLLY: 1, PAN: 2 };
const TOUCH = { ROTATE: 0, PAN: 1, DOLLY_PAN: 2, DOLLY_ROTATE: 3 };
}
+function toNormalizedProjectionMatrix( projectionMatrix ) {
+
+ const m = projectionMatrix.elements;
+
+ // Convert [-1, 1] to [0, 1] projection matrix
+ m[ 2 ] = 0.5 * m[ 2 ] + 0.5 * m[ 3 ];
+ m[ 6 ] = 0.5 * m[ 6 ] + 0.5 * m[ 7 ];
+ m[ 10 ] = 0.5 * m[ 10 ] + 0.5 * m[ 11 ];
+ m[ 14 ] = 0.5 * m[ 14 ] + 0.5 * m[ 15 ];
+
+}
+
+function toReversedProjectionMatrix( projectionMatrix ) {
+
+ const m = projectionMatrix.elements;
+ const isPerspectiveMatrix = m[ 11 ] === - 1;
+
+ // Reverse [0, 1] projection matrix
+ if ( isPerspectiveMatrix ) {
+
+ m[ 10 ] = - m[ 10 ] - 1;
+ m[ 14 ] = - m[ 14 ];
+
+ } else {
+
+ m[ 10 ] = - m[ 10 ];
+ m[ 14 ] = - m[ 14 ] + 1;
+
+ }
+
+}
+
/**
* Matrices converting P3 <-> Rec. 709 primaries, without gamut mapping
* or clipping. Based on W3C specifications for sRGB and Display P3,
const _vbp = /*@__PURE__*/ new Vector3();
const _vcp = /*@__PURE__*/ new Vector3();
+const _v40 = /*@__PURE__*/ new Vector4();
+const _v41 = /*@__PURE__*/ new Vector4();
+const _v42 = /*@__PURE__*/ new Vector4();
+
class Triangle {
constructor( a = new Vector3(), b = new Vector3(), c = new Vector3() ) {
}
+ static getInterpolatedAttribute( attr, i1, i2, i3, barycoord, target ) {
+
+ _v40.setScalar( 0 );
+ _v41.setScalar( 0 );
+ _v42.setScalar( 0 );
+
+ _v40.fromBufferAttribute( attr, i1 );
+ _v41.fromBufferAttribute( attr, i2 );
+ _v42.fromBufferAttribute( attr, i3 );
+
+ target.setScalar( 0 );
+ target.addScaledVector( _v40, barycoord.x );
+ target.addScaledVector( _v41, barycoord.y );
+ target.addScaledVector( _v42, barycoord.z );
+
+ return target;
+
+ }
+
static isFrontFacing( a, b, c, direction ) {
_v0$2.subVectors( c, b );
this.normalized = normalized;
this.usage = StaticDrawUsage;
- this._updateRange = { offset: 0, count: - 1 };
this.updateRanges = [];
this.gpuType = FloatType;
}
- get updateRange() {
-
- warnOnce( 'THREE.BufferAttribute: updateRange() is deprecated and will be removed in r169. Use addUpdateRange() instead.' ); // @deprecated, r159
- return this._updateRange;
-
- }
-
setUsage( value ) {
this.usage = value;
const _tempA = /*@__PURE__*/ new Vector3();
const _morphA = /*@__PURE__*/ new Vector3();
-const _uvA$1 = /*@__PURE__*/ new Vector2();
-const _uvB$1 = /*@__PURE__*/ new Vector2();
-const _uvC$1 = /*@__PURE__*/ new Vector2();
-
-const _normalA = /*@__PURE__*/ new Vector3();
-const _normalB = /*@__PURE__*/ new Vector3();
-const _normalC = /*@__PURE__*/ new Vector3();
-
const _intersectionPoint = /*@__PURE__*/ new Vector3();
const _intersectionPointWorld = /*@__PURE__*/ new Vector3();
if ( intersection ) {
- if ( uv ) {
+ const barycoord = new Vector3();
+ Triangle.getBarycoord( _intersectionPoint, _vA$1, _vB$1, _vC$1, barycoord );
- _uvA$1.fromBufferAttribute( uv, a );
- _uvB$1.fromBufferAttribute( uv, b );
- _uvC$1.fromBufferAttribute( uv, c );
+ if ( uv ) {
- intersection.uv = Triangle.getInterpolation( _intersectionPoint, _vA$1, _vB$1, _vC$1, _uvA$1, _uvB$1, _uvC$1, new Vector2() );
+ intersection.uv = Triangle.getInterpolatedAttribute( uv, a, b, c, barycoord, new Vector2() );
}
if ( uv1 ) {
- _uvA$1.fromBufferAttribute( uv1, a );
- _uvB$1.fromBufferAttribute( uv1, b );
- _uvC$1.fromBufferAttribute( uv1, c );
-
- intersection.uv1 = Triangle.getInterpolation( _intersectionPoint, _vA$1, _vB$1, _vC$1, _uvA$1, _uvB$1, _uvC$1, new Vector2() );
+ intersection.uv1 = Triangle.getInterpolatedAttribute( uv1, a, b, c, barycoord, new Vector2() );
}
if ( normal ) {
- _normalA.fromBufferAttribute( normal, a );
- _normalB.fromBufferAttribute( normal, b );
- _normalC.fromBufferAttribute( normal, c );
-
- intersection.normal = Triangle.getInterpolation( _intersectionPoint, _vA$1, _vB$1, _vC$1, _normalA, _normalB, _normalC, new Vector3() );
+ intersection.normal = Triangle.getInterpolatedAttribute( normal, a, b, c, barycoord, new Vector3() );
if ( intersection.normal.dot( ray.direction ) > 0 ) {
Triangle.getNormal( _vA$1, _vB$1, _vC$1, face.normal );
intersection.face = face;
+ intersection.barycoord = barycoord;
}
function updateBuffer( buffer, attribute, bufferType ) {
const array = attribute.array;
- const updateRange = attribute._updateRange; // @deprecated, r159
const updateRanges = attribute.updateRanges;
gl.bindBuffer( bufferType, buffer );
- if ( updateRange.count === - 1 && updateRanges.length === 0 ) {
+ if ( updateRanges.length === 0 ) {
// Not using update ranges
gl.bufferSubData( bufferType, 0, array );
- }
+ } else {
- if ( updateRanges.length !== 0 ) {
+ // Before applying update ranges, we merge any adjacent / overlapping
+ // ranges to reduce load on `gl.bufferSubData`. Empirically, this has led
+ // to performance improvements for applications which make heavy use of
+ // update ranges. Likely due to GPU command overhead.
+ //
+ // Note that to reduce garbage collection between frames, we merge the
+ // update ranges in-place. This is safe because this method will clear the
+ // update ranges once updated.
- for ( let i = 0, l = updateRanges.length; i < l; i ++ ) {
+ updateRanges.sort( ( a, b ) => a.start - b.start );
+ // To merge the update ranges in-place, we work from left to right in the
+ // existing updateRanges array, merging ranges. This may result in a final
+ // array which is smaller than the original. This index tracks the last
+ // index representing a merged range, any data after this index can be
+ // trimmed once the merge algorithm is completed.
+ let mergeIndex = 0;
+
+ for ( let i = 1; i < updateRanges.length; i ++ ) {
+
+ const previousRange = updateRanges[ mergeIndex ];
const range = updateRanges[ i ];
- gl.bufferSubData( bufferType, range.start * array.BYTES_PER_ELEMENT,
- array, range.start, range.count );
+ // We add one here to merge adjacent ranges. This is safe because ranges
+ // operate over positive integers.
+ if ( range.start <= previousRange.start + previousRange.count + 1 ) {
+
+ previousRange.count = Math.max(
+ previousRange.count,
+ range.start + range.count - previousRange.start
+ );
+
+ } else {
+
+ ++ mergeIndex;
+ updateRanges[ mergeIndex ] = range;
+
+ }
}
- attribute.clearUpdateRanges();
+ // Trim the array to only contain the merged ranges.
+ updateRanges.length = mergeIndex + 1;
- }
+ for ( let i = 0, l = updateRanges.length; i < l; i ++ ) {
- // @deprecated, r159
- if ( updateRange.count !== - 1 ) {
+ const range = updateRanges[ i ];
- gl.bufferSubData( bufferType, updateRange.offset * array.BYTES_PER_ELEMENT,
- array, updateRange.offset, updateRange.count );
+ gl.bufferSubData( bufferType, range.start * array.BYTES_PER_ELEMENT,
+ array, range.start, range.count );
+
+ }
- updateRange.count = - 1; // reset range
+ attribute.clearUpdateRanges();
}
const fragment$2 = "uniform vec3 color;\nuniform float opacity;\n#include <common>\n#include <packing>\n#include <fog_pars_fragment>\n#include <bsdfs>\n#include <lights_pars_begin>\n#include <logdepthbuf_pars_fragment>\n#include <shadowmap_pars_fragment>\n#include <shadowmask_pars_fragment>\nvoid main() {\n\t#include <logdepthbuf_fragment>\n\tgl_FragColor = vec4( color, opacity * ( 1.0 - getShadowMask() ) );\n\t#include <tonemapping_fragment>\n\t#include <colorspace_fragment>\n\t#include <fog_fragment>\n}";
-const vertex$1 = "uniform float rotation;\nuniform vec2 center;\n#include <common>\n#include <uv_pars_vertex>\n#include <fog_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\tvec4 mvPosition = modelViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 );\n\tvec2 scale;\n\tscale.x = length( vec3( modelMatrix[ 0 ].x, modelMatrix[ 0 ].y, modelMatrix[ 0 ].z ) );\n\tscale.y = length( vec3( modelMatrix[ 1 ].x, modelMatrix[ 1 ].y, modelMatrix[ 1 ].z ) );\n\t#ifndef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) scale *= - mvPosition.z;\n\t#endif\n\tvec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;\n\tvec2 rotatedPosition;\n\trotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;\n\trotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;\n\tmvPosition.xy += rotatedPosition;\n\tgl_Position = projectionMatrix * mvPosition;\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <fog_vertex>\n}";
+const vertex$1 = "uniform float rotation;\nuniform vec2 center;\n#include <common>\n#include <uv_pars_vertex>\n#include <fog_pars_vertex>\n#include <logdepthbuf_pars_vertex>\n#include <clipping_planes_pars_vertex>\nvoid main() {\n\t#include <uv_vertex>\n\tvec4 mvPosition = modelViewMatrix[ 3 ];\n\tvec2 scale = vec2( length( modelMatrix[ 0 ].xyz ), length( modelMatrix[ 1 ].xyz ) );\n\t#ifndef USE_SIZEATTENUATION\n\t\tbool isPerspective = isPerspectiveMatrix( projectionMatrix );\n\t\tif ( isPerspective ) scale *= - mvPosition.z;\n\t#endif\n\tvec2 alignedPosition = ( position.xy - ( center - vec2( 0.5 ) ) ) * scale;\n\tvec2 rotatedPosition;\n\trotatedPosition.x = cos( rotation ) * alignedPosition.x - sin( rotation ) * alignedPosition.y;\n\trotatedPosition.y = sin( rotation ) * alignedPosition.x + cos( rotation ) * alignedPosition.y;\n\tmvPosition.xy += rotatedPosition;\n\tgl_Position = projectionMatrix * mvPosition;\n\t#include <logdepthbuf_vertex>\n\t#include <clipping_planes_vertex>\n\t#include <fog_vertex>\n}";
const fragment$1 = "uniform vec3 diffuse;\nuniform float opacity;\n#include <common>\n#include <uv_pars_fragment>\n#include <map_pars_fragment>\n#include <alphamap_pars_fragment>\n#include <alphatest_pars_fragment>\n#include <alphahash_pars_fragment>\n#include <fog_pars_fragment>\n#include <logdepthbuf_pars_fragment>\n#include <clipping_planes_pars_fragment>\nvoid main() {\n\tvec4 diffuseColor = vec4( diffuse, opacity );\n\t#include <clipping_planes_fragment>\n\tvec3 outgoingLight = vec3( 0.0 );\n\t#include <logdepthbuf_fragment>\n\t#include <map_fragment>\n\t#include <alphamap_fragment>\n\t#include <alphatest_fragment>\n\t#include <alphahash_fragment>\n\toutgoingLight = diffuseColor.rgb;\n\t#include <opaque_fragment>\n\t#include <tonemapping_fragment>\n\t#include <colorspace_fragment>\n\t#include <fog_fragment>\n}";
}
const logarithmicDepthBuffer = parameters.logarithmicDepthBuffer === true;
+ const reverseDepthBuffer = parameters.reverseDepthBuffer === true && extensions.has( 'EXT_clip_control' );
+
+ if ( reverseDepthBuffer === true ) {
+
+ const ext = extensions.get( 'EXT_clip_control' );
+ ext.clipControlEXT( ext.LOWER_LEFT_EXT, ext.ZERO_TO_ONE_EXT );
+
+ }
const maxTextures = gl.getParameter( gl.MAX_TEXTURE_IMAGE_UNITS );
const maxVertexTextures = gl.getParameter( gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS );
precision: precision,
logarithmicDepthBuffer: logarithmicDepthBuffer,
+ reverseDepthBuffer: reverseDepthBuffer,
maxTextures: maxTextures,
maxVertexTextures: maxVertexTextures,
parameters.numLightProbes > 0 ? '#define USE_LIGHT_PROBES' : '',
parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '',
+ parameters.reverseDepthBuffer ? '#define USE_REVERSEDEPTHBUF' : '',
'uniform mat4 modelMatrix;',
'uniform mat4 modelViewMatrix;',
parameters.decodeVideoTexture ? '#define DECODE_VIDEO_TEXTURE' : '',
parameters.logarithmicDepthBuffer ? '#define USE_LOGDEPTHBUF' : '',
+ parameters.reverseDepthBuffer ? '#define USE_REVERSEDEPTHBUF' : '',
'uniform mat4 viewMatrix;',
'uniform vec3 cameraPosition;',
const programs = [];
const logarithmicDepthBuffer = capabilities.logarithmicDepthBuffer;
+ const reverseDepthBuffer = capabilities.reverseDepthBuffer;
const SUPPORTS_VERTEX_TEXTURES = capabilities.vertexTextures;
let precision = capabilities.precision;
sizeAttenuation: material.sizeAttenuation === true,
logarithmicDepthBuffer: logarithmicDepthBuffer,
+ reverseDepthBuffer: reverseDepthBuffer,
skinning: object.isSkinnedMesh === true,
_programLayers.enable( 2 );
if ( parameters.logarithmicDepthBuffer )
_programLayers.enable( 3 );
- if ( parameters.skinning )
+ if ( parameters.reverseDepthBuffer )
_programLayers.enable( 4 );
- if ( parameters.morphTargets )
+ if ( parameters.skinning )
_programLayers.enable( 5 );
- if ( parameters.morphNormals )
+ if ( parameters.morphTargets )
_programLayers.enable( 6 );
- if ( parameters.morphColors )
+ if ( parameters.morphNormals )
_programLayers.enable( 7 );
- if ( parameters.premultipliedAlpha )
+ if ( parameters.morphColors )
_programLayers.enable( 8 );
- if ( parameters.shadowMapEnabled )
+ if ( parameters.premultipliedAlpha )
_programLayers.enable( 9 );
- if ( parameters.doubleSided )
+ if ( parameters.shadowMapEnabled )
_programLayers.enable( 10 );
- if ( parameters.flipSided )
+ if ( parameters.doubleSided )
_programLayers.enable( 11 );
- if ( parameters.useDepthPacking )
+ if ( parameters.flipSided )
_programLayers.enable( 12 );
- if ( parameters.dithering )
+ if ( parameters.useDepthPacking )
_programLayers.enable( 13 );
- if ( parameters.transmission )
+ if ( parameters.dithering )
_programLayers.enable( 14 );
- if ( parameters.sheen )
+ if ( parameters.transmission )
_programLayers.enable( 15 );
- if ( parameters.opaque )
+ if ( parameters.sheen )
_programLayers.enable( 16 );
- if ( parameters.pointsUvs )
+ if ( parameters.opaque )
_programLayers.enable( 17 );
- if ( parameters.decodeVideoTexture )
+ if ( parameters.pointsUvs )
_programLayers.enable( 18 );
- if ( parameters.alphaToCoverage )
+ if ( parameters.decodeVideoTexture )
_programLayers.enable( 19 );
+ if ( parameters.alphaToCoverage )
+ _programLayers.enable( 20 );
array.push( _programLayers.mask );
}
+const reversedFuncs = {
+ [ NeverDepth ]: AlwaysDepth,
+ [ LessDepth ]: GreaterDepth,
+ [ EqualDepth ]: NotEqualDepth,
+ [ LessEqualDepth ]: GreaterEqualDepth,
+
+ [ AlwaysDepth ]: NeverDepth,
+ [ GreaterDepth ]: LessDepth,
+ [ NotEqualDepth ]: EqualDepth,
+ [ GreaterEqualDepth ]: LessEqualDepth,
+};
+
function WebGLState( gl ) {
function ColorBuffer() {
function DepthBuffer() {
let locked = false;
+ let reversed = false;
let currentDepthMask = null;
let currentDepthFunc = null;
return {
+ setReversed: function ( value ) {
+
+ reversed = value;
+
+ },
+
setTest: function ( depthTest ) {
if ( depthTest ) {
setFunc: function ( depthFunc ) {
+ if ( reversed ) depthFunc = reversedFuncs[ depthFunc ];
+
if ( currentDepthFunc !== depthFunc ) {
switch ( depthFunc ) {
}
+ if ( glFormat === _gl.RGB_INTEGER ) {
+
+ if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RGB8UI;
+ if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.RGB16UI;
+ if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.RGB32UI;
+ if ( glType === _gl.BYTE ) internalFormat = _gl.RGB8I;
+ if ( glType === _gl.SHORT ) internalFormat = _gl.RGB16I;
+ if ( glType === _gl.INT ) internalFormat = _gl.RGB32I;
+
+ }
+
+ if ( glFormat === _gl.RGBA_INTEGER ) {
+
+ if ( glType === _gl.UNSIGNED_BYTE ) internalFormat = _gl.RGBA8UI;
+ if ( glType === _gl.UNSIGNED_SHORT ) internalFormat = _gl.RGBA16UI;
+ if ( glType === _gl.UNSIGNED_INT ) internalFormat = _gl.RGBA32UI;
+ if ( glType === _gl.BYTE ) internalFormat = _gl.RGBA8I;
+ if ( glType === _gl.SHORT ) internalFormat = _gl.RGBA16I;
+ if ( glType === _gl.INT ) internalFormat = _gl.RGBA32I;
+
+ }
+
if ( glFormat === _gl.RGB ) {
if ( glType === _gl.UNSIGNED_INT_5_9_9_9_REV ) internalFormat = _gl.RGB9_E5;
// camera matrices cache
+ const _currentProjectionMatrix = new Matrix4();
const _projScreenMatrix = new Matrix4();
const _vector3 = new Vector3();
state = new WebGLState( _gl );
+ if ( capabilities.reverseDepthBuffer ) state.buffers.depth.setReversed( true );
+
info = new WebGLInfo( _gl );
properties = new WebGLProperties();
textures = new WebGLTextures( _gl, extensions, state, properties, capabilities, utils, info );
}
- if ( depth ) bits |= _gl.DEPTH_BUFFER_BIT;
+ if ( depth ) {
+
+ bits |= _gl.DEPTH_BUFFER_BIT;
+ _gl.clearDepth( this.capabilities.reverseDepthBuffer ? 0 : 1 );
+
+ }
+
if ( stencil ) {
bits |= _gl.STENCIL_BUFFER_BIT;
scene.traverse( function ( object ) {
+ if ( ! ( object.isMesh || object.isPoints || object.isLine || object.isSprite ) ) {
+
+ return;
+
+ }
+
const material = object.material;
if ( material ) {
// common camera uniforms
- p_uniforms.setValue( _gl, 'projectionMatrix', camera.projectionMatrix );
+ if ( capabilities.reverseDepthBuffer ) {
+
+ _currentProjectionMatrix.copy( camera.projectionMatrix );
+
+ toNormalizedProjectionMatrix( _currentProjectionMatrix );
+ toReversedProjectionMatrix( _currentProjectionMatrix );
+
+ p_uniforms.setValue( _gl, 'projectionMatrix', _currentProjectionMatrix );
+
+ } else {
+
+ p_uniforms.setValue( _gl, 'projectionMatrix', camera.projectionMatrix );
+
+ }
+
p_uniforms.setValue( _gl, 'viewMatrix', camera.matrixWorldInverse );
const uCamPos = p_uniforms.map.cameraPosition;
if ( framebuffer ) {
- state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer );
-
- try {
-
- const texture = renderTarget.texture;
- const textureFormat = texture.format;
- const textureType = texture.type;
-
- if ( ! capabilities.textureFormatReadable( textureFormat ) ) {
-
- throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in RGBA or implementation defined format.' );
+ const texture = renderTarget.texture;
+ const textureFormat = texture.format;
+ const textureType = texture.type;
- }
+ if ( ! capabilities.textureFormatReadable( textureFormat ) ) {
- if ( ! capabilities.textureTypeReadable( textureType ) ) {
+ throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in RGBA or implementation defined format.' );
- throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in UnsignedByteType or implementation defined type.' );
+ }
- }
+ if ( ! capabilities.textureTypeReadable( textureType ) ) {
- // the following if statement ensures valid read requests (no out-of-bounds pixels, see #8604)
- if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) {
+ throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in UnsignedByteType or implementation defined type.' );
- const glBuffer = _gl.createBuffer();
- _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer );
- _gl.bufferData( _gl.PIXEL_PACK_BUFFER, buffer.byteLength, _gl.STREAM_READ );
- _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), 0 );
- _gl.flush();
+ }
- // check if the commands have finished every 8 ms
- const sync = _gl.fenceSync( _gl.SYNC_GPU_COMMANDS_COMPLETE, 0 );
- await probeAsync( _gl, sync, 4 );
+ // the following if statement ensures valid read requests (no out-of-bounds pixels, see #8604)
+ if ( ( x >= 0 && x <= ( renderTarget.width - width ) ) && ( y >= 0 && y <= ( renderTarget.height - height ) ) ) {
- try {
+ // set the active frame buffer to the one we want to read
+ state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer );
- _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer );
- _gl.getBufferSubData( _gl.PIXEL_PACK_BUFFER, 0, buffer );
+ const glBuffer = _gl.createBuffer();
+ _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer );
+ _gl.bufferData( _gl.PIXEL_PACK_BUFFER, buffer.byteLength, _gl.STREAM_READ );
+ _gl.readPixels( x, y, width, height, utils.convert( textureFormat ), utils.convert( textureType ), 0 );
- } finally {
+ // reset the frame buffer to the currently set buffer before waiting
+ const currFramebuffer = _currentRenderTarget !== null ? properties.get( _currentRenderTarget ).__webglFramebuffer : null;
+ state.bindFramebuffer( _gl.FRAMEBUFFER, currFramebuffer );
- _gl.deleteBuffer( glBuffer );
- _gl.deleteSync( sync );
+ // check if the commands have finished every 8 ms
+ const sync = _gl.fenceSync( _gl.SYNC_GPU_COMMANDS_COMPLETE, 0 );
- }
+ _gl.flush();
- return buffer;
+ await probeAsync( _gl, sync, 4 );
- }
+ // read the data and delete the buffer
+ _gl.bindBuffer( _gl.PIXEL_PACK_BUFFER, glBuffer );
+ _gl.getBufferSubData( _gl.PIXEL_PACK_BUFFER, 0, buffer );
+ _gl.deleteBuffer( glBuffer );
+ _gl.deleteSync( sync );
- } finally {
+ return buffer;
- // restore framebuffer of current render target if necessary
+ } else {
- const framebuffer = ( _currentRenderTarget !== null ) ? properties.get( _currentRenderTarget ).__webglFramebuffer : null;
- state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer );
+ throw new Error( 'THREE.WebGLRenderer.readRenderTargetPixelsAsync: requested read bounds are out of range.' );
}
this.count = array !== undefined ? array.length / stride : 0;
this.usage = StaticDrawUsage;
- this._updateRange = { offset: 0, count: - 1 };
this.updateRanges = [];
this.version = 0;
}
- get updateRange() {
-
- warnOnce( 'THREE.InterleavedBuffer: updateRange() is deprecated and will be removed in r169. Use addUpdateRange() instead.' ); // @deprecated, r159
- return this._updateRange;
-
- }
-
setUsage( value ) {
this.usage = value;
}
+ removeLevel( distance ) {
+
+ const levels = this.levels;
+
+ for ( let i = 0; i < levels.length; i ++ ) {
+
+ if ( levels[ i ].distance === distance ) {
+
+ const removedElements = levels.splice( i, 1 );
+ this.remove( removedElements[ 0 ].object );
+
+ return true;
+
+ }
+
+ }
+
+ return false;
+
+ }
+
getCurrentLevel() {
return this._currentLevel;
// stores visible, active, and geometry id per object
this._drawInfo = [];
+ // instance ids that have been set as inactive, and are available to be overwritten
+ this._availableInstanceIds = [];
+
// geometry information
this._drawRanges = [];
this._reservedRanges = [];
addInstance( geometryId ) {
+ const atCapacity = this._drawInfo.length >= this.maxInstanceCount;
+
// ensure we're not over geometry
- if ( this._drawInfo.length >= this._maxInstanceCount ) {
+ if ( atCapacity && this._availableInstanceIds.length === 0 ) {
throw new Error( 'BatchedMesh: Maximum item count reached.' );
}
- this._drawInfo.push( {
-
+ const instanceDrawInfo = {
visible: true,
active: true,
geometryIndex: geometryId,
+ };
- } );
+ let drawId = null;
+
+ // Prioritize using previously freed instance ids
+ if ( this._availableInstanceIds.length > 0 ) {
+
+ drawId = this._availableInstanceIds.pop();
+ this._drawInfo[ drawId ] = instanceDrawInfo;
+
+ } else {
+
+ drawId = this._drawInfo.length;
+ this._drawInfo.push( instanceDrawInfo );
+
+ }
- // initialize the matrix
- const drawId = this._drawInfo.length - 1;
const matricesTexture = this._matricesTexture;
const matricesArray = matricesTexture.image.data;
_identityMatrix.toArray( matricesArray, drawId * 16 );
}
*/
- /*
deleteInstance( instanceId ) {
- // Note: User needs to call optimize() afterward to pack the data.
-
const drawInfo = this._drawInfo;
if ( instanceId >= drawInfo.length || drawInfo[ instanceId ].active === false ) {
}
drawInfo[ instanceId ].active = false;
+ this._availableInstanceIds.push( instanceId );
this._visibilityChanged = true;
return this;
}
- */
// get bounding box and compute it if it doesn't exist
getBoundingBoxAt( geometryId, target ) {
}
+ setGeometryIdAt( instanceId, geometryId ) {
+
+ // return early if the geometry is out of range or not active
+ const drawInfo = this._drawInfo;
+ if ( instanceId >= drawInfo.length || drawInfo[ instanceId ].active === false ) {
+
+ return null;
+
+ }
+
+ // check if the provided geometryId is within the valid range
+ if ( geometryId < 0 || geometryId >= this._geometryCount ) {
+
+ return null;
+
+ }
+
+ drawInfo[ instanceId ].geometryIndex = geometryId;
+
+ return this;
+
+ }
+
+ getGeometryIdAt( instanceId ) {
+
+ const drawInfo = this._drawInfo;
+ if ( instanceId >= drawInfo.length || drawInfo[ instanceId ].active === false ) {
+
+ return - 1;
+
+ }
+
+ return drawInfo[ instanceId ].geometryIndex;
+
+ }
+
+ getGeometryRangeAt( geometryId, target = {} ) {
+
+ if ( geometryId < 0 || geometryId >= this._geometryCount ) {
+
+ return null;
+
+ }
+
+ const drawRange = this._drawRanges[ geometryId ];
+
+ target.start = drawRange.start;
+ target.count = drawRange.count;
+
+ return target;
+
+ }
+
raycast( raycaster, intersects ) {
const drawInfo = this._drawInfo;
index: a,
face: null,
faceIndex: null,
+ barycoord: null,
object: object
};
point: intersectPoint,
index: index,
face: null,
+ faceIndex: null,
+ barycoord: null,
object: object
} );
// faces
- indices.push( a, b, d );
- indices.push( b, c, d );
+ if ( radiusTop > 0 ) {
- // update group counter
+ indices.push( a, b, d );
+ groupCount += 3;
- groupCount += 6;
+ }
+
+ if ( radiusBottom > 0 ) {
+
+ indices.push( b, c, d );
+ groupCount += 3;
+
+ }
}
}
- const material = MaterialLoader.createMaterialFromType( json.type );
+ const material = this.createMaterialFromType( json.type );
if ( json.uuid !== undefined ) material.uuid = json.uuid;
if ( json.name !== undefined ) material.name = json.name;
}
+ createMaterialFromType( type ) {
+
+ return MaterialLoader.createMaterialFromType( type );
+
+ }
+
static createMaterialFromType( type ) {
const materialLib = {
function now() {
- return ( typeof performance === 'undefined' ? Date : performance ).now(); // see #10732
+ return performance.now();
}
}
- stop() {
+ stop( delay = 0 ) {
if ( this.hasPlaybackControl === false ) {
if ( this.source !== null ) {
- this.source.stop();
+ this.source.stop( this.context.currentTime + delay );
this.source.onended = null;
}
class Controls extends EventDispatcher {
- constructor( object, domElement ) {
+ constructor( object, domElement = null ) {
super();