first app vibe

This commit is contained in:
2025-07-28 22:43:55 +01:00
parent d70b6714c3
commit af090f5bf0
2530 changed files with 1410652 additions and 0 deletions

234
app/node_modules/three/src/nodes/display/BlendModes.js generated vendored Normal file
View File

@@ -0,0 +1,234 @@
import { Fn, If, vec4 } from '../tsl/TSLBase.js';
import { mix, min, step } from '../math/MathNode.js';
/**
* Represents a "Color Burn" blend mode.
*
* It's designed to darken the base layer's colors based on the color of the blend layer.
* It significantly increases the contrast of the base layer, making the colors more vibrant and saturated.
* The darker the color in the blend layer, the stronger the darkening and contrast effect on the base layer.
*
* @tsl
* @function
* @param {Node<vec3>} base - The base color.
* @param {Node<vec3>} blend - The blend color. A white (#ffffff) blend color does not alter the base color.
* @return {Node<vec3>} The result.
*/
export const blendBurn = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return min( 1.0, base.oneMinus().div( blend ) ).oneMinus();
} ).setLayout( {
name: 'blendBurn',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
/**
* Represents a "Color Dodge" blend mode.
*
* It's designed to lighten the base layer's colors based on the color of the blend layer.
* It significantly increases the brightness of the base layer, making the colors lighter and more vibrant.
* The brighter the color in the blend layer, the stronger the lightening and contrast effect on the base layer.
*
* @tsl
* @function
* @param {Node<vec3>} base - The base color.
* @param {Node<vec3>} blend - The blend color. A black (#000000) blend color does not alter the base color.
* @return {Node<vec3>} The result.
*/
export const blendDodge = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return min( base.div( blend.oneMinus() ), 1.0 );
} ).setLayout( {
name: 'blendDodge',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
/**
* Represents a "Screen" blend mode.
*
* Similar to `blendDodge()`, this mode also lightens the base layer's colors based on the color of the blend layer.
* The "Screen" blend mode is better for general brightening whereas the "Dodge" results in more subtle and nuanced
* effects.
*
* @tsl
* @function
* @param {Node<vec3>} base - The base color.
* @param {Node<vec3>} blend - The blend color. A black (#000000) blend color does not alter the base color.
* @return {Node<vec3>} The result.
*/
export const blendScreen = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return base.oneMinus().mul( blend.oneMinus() ).oneMinus();
} ).setLayout( {
name: 'blendScreen',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
/**
* Represents a "Overlay" blend mode.
*
* It's designed to increase the contrast of the base layer based on the color of the blend layer.
* It amplifies the existing colors and contrast in the base layer, making lighter areas lighter and darker areas darker.
* The color of the blend layer significantly influences the resulting contrast and color shift in the base layer.
*
* @tsl
* @function
* @param {Node<vec3>} base - The base color.
* @param {Node<vec3>} blend - The blend color
* @return {Node<vec3>} The result.
*/
export const blendOverlay = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
return mix( base.mul( 2.0 ).mul( blend ), base.oneMinus().mul( 2.0 ).mul( blend.oneMinus() ).oneMinus(), step( 0.5, base ) );
} ).setLayout( {
name: 'blendOverlay',
type: 'vec3',
inputs: [
{ name: 'base', type: 'vec3' },
{ name: 'blend', type: 'vec3' }
]
} );
/**
* This function blends two color based on their alpha values by replicating the behavior of `THREE.NormalBlending`.
* It assumes both input colors have non-premultiplied alpha.
*
* @tsl
* @function
* @param {Node<vec4>} base - The base color.
* @param {Node<vec4>} blend - The blend color
* @return {Node<vec4>} The result.
*/
export const blendColor = /*@__PURE__*/ Fn( ( [ base, blend ] ) => {
const outAlpha = blend.a.add( base.a.mul( blend.a.oneMinus() ) );
return vec4( blend.rgb.mul( blend.a ).add( base.rgb.mul( base.a ).mul( blend.a.oneMinus() ) ).div( outAlpha ), outAlpha );
} ).setLayout( {
name: 'blendColor',
type: 'vec4',
inputs: [
{ name: 'base', type: 'vec4' },
{ name: 'blend', type: 'vec4' }
]
} );
/**
* Premultiplies the RGB channels of a color by its alpha channel.
*
* This function is useful for converting a non-premultiplied alpha color
* into a premultiplied alpha format, where the RGB values are scaled
* by the alpha value. Premultiplied alpha is often used in graphics
* rendering for certain operations, such as compositing and image processing.
*
* @tsl
* @function
* @param {Node<vec4>} color - The input color with non-premultiplied alpha.
* @return {Node<vec4>} The color with premultiplied alpha.
*/
export const premultiplyAlpha = /*@__PURE__*/ Fn( ( [ color ] ) => {
return vec4( color.rgb.mul( color.a ), color.a );
}, { color: 'vec4', return: 'vec4' } );
/**
* Unpremultiplies the RGB channels of a color by its alpha channel.
*
* This function is useful for converting a premultiplied alpha color
* back into a non-premultiplied alpha format, where the RGB values are
* divided by the alpha value. Unpremultiplied alpha is often used in graphics
* rendering for certain operations, such as compositing and image processing.
*
* @tsl
* @function
* @param {Node<vec4>} color - The input color with premultiplied alpha.
* @return {Node<vec4>} The color with non-premultiplied alpha.
*/
export const unpremultiplyAlpha = /*@__PURE__*/ Fn( ( [ color ] ) => {
If( color.a.equal( 0.0 ), () => vec4( 0.0 ) );
return vec4( color.rgb.div( color.a ), color.a );
}, { color: 'vec4', return: 'vec4' } );
// Deprecated
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendBurn} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const burn = ( ...params ) => { // @deprecated, r171
console.warn( 'THREE.TSL: "burn" has been renamed. Use "blendBurn" instead.' );
return blendBurn( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendDodge} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const dodge = ( ...params ) => { // @deprecated, r171
console.warn( 'THREE.TSL: "dodge" has been renamed. Use "blendDodge" instead.' );
return blendDodge( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendScreen} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const screen = ( ...params ) => { // @deprecated, r171
console.warn( 'THREE.TSL: "screen" has been renamed. Use "blendScreen" instead.' );
return blendScreen( params );
};
/**
* @tsl
* @function
* @deprecated since r171. Use {@link blendOverlay} instead.
*
* @param {...any} params
* @returns {Function}
*/
export const overlay = ( ...params ) => { // @deprecated, r171
console.warn( 'THREE.TSL: "overlay" has been renamed. Use "blendOverlay" instead.' );
return blendOverlay( params );
};

117
app/node_modules/three/src/nodes/display/BumpMapNode.js generated vendored Normal file
View File

@@ -0,0 +1,117 @@
import TempNode from '../core/TempNode.js';
import { uv } from '../accessors/UV.js';
import { normalView } from '../accessors/Normal.js';
import { positionView } from '../accessors/Position.js';
import { faceDirection } from './FrontFacingNode.js';
import { Fn, nodeProxy, float, vec2 } from '../tsl/TSLBase.js';
// Bump Mapping Unparametrized Surfaces on the GPU by Morten S. Mikkelsen
// https://mmikk.github.io/papers3d/mm_sfgrad_bump.pdf
const dHdxy_fwd = Fn( ( { textureNode, bumpScale } ) => {
// It's used to preserve the same TextureNode instance
const sampleTexture = ( callback ) => textureNode.cache().context( { getUV: ( texNode ) => callback( texNode.uvNode || uv() ), forceUVContext: true } );
const Hll = float( sampleTexture( ( uvNode ) => uvNode ) );
return vec2(
float( sampleTexture( ( uvNode ) => uvNode.add( uvNode.dFdx() ) ) ).sub( Hll ),
float( sampleTexture( ( uvNode ) => uvNode.add( uvNode.dFdy() ) ) ).sub( Hll )
).mul( bumpScale );
} );
// Evaluate the derivative of the height w.r.t. screen-space using forward differencing (listing 2)
const perturbNormalArb = Fn( ( inputs ) => {
const { surf_pos, surf_norm, dHdxy } = inputs;
// normalize is done to ensure that the bump map looks the same regardless of the texture's scale
const vSigmaX = surf_pos.dFdx().normalize();
const vSigmaY = surf_pos.dFdy().normalize();
const vN = surf_norm; // normalized
const R1 = vSigmaY.cross( vN );
const R2 = vN.cross( vSigmaX );
const fDet = vSigmaX.dot( R1 ).mul( faceDirection );
const vGrad = fDet.sign().mul( dHdxy.x.mul( R1 ).add( dHdxy.y.mul( R2 ) ) );
return fDet.abs().mul( surf_norm ).sub( vGrad ).normalize();
} );
/**
* This class can be used for applying bump maps to materials.
*
* ```js
* material.normalNode = bumpMap( texture( bumpTex ) );
* ```
*
* @augments TempNode
*/
class BumpMapNode extends TempNode {
static get type() {
return 'BumpMapNode';
}
/**
* Constructs a new bump map node.
*
* @param {Node<float>} textureNode - Represents the bump map data.
* @param {?Node<float>} [scaleNode=null] - Controls the intensity of the bump effect.
*/
constructor( textureNode, scaleNode = null ) {
super( 'vec3' );
/**
* Represents the bump map data.
*
* @type {Node<float>}
*/
this.textureNode = textureNode;
/**
* Controls the intensity of the bump effect.
*
* @type {?Node<float>}
* @default null
*/
this.scaleNode = scaleNode;
}
setup() {
const bumpScale = this.scaleNode !== null ? this.scaleNode : 1;
const dHdxy = dHdxy_fwd( { textureNode: this.textureNode, bumpScale } );
return perturbNormalArb( {
surf_pos: positionView,
surf_norm: normalView,
dHdxy
} );
}
}
export default BumpMapNode;
/**
* TSL function for creating a bump map node.
*
* @tsl
* @function
* @param {Node<float>} textureNode - Represents the bump map data.
* @param {?Node<float>} [scaleNode=null] - Controls the intensity of the bump effect.
* @returns {BumpMapNode}
*/
export const bumpMap = /*@__PURE__*/ nodeProxy( BumpMapNode ).setParameterLength( 1, 2 );

View File

@@ -0,0 +1,141 @@
import { dot, max, mix } from '../math/MathNode.js';
import { add } from '../math/OperatorNode.js';
import { Fn, If, float, vec3, vec4 } from '../tsl/TSLBase.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { Vector3 } from '../../math/Vector3.js';
import { LinearSRGBColorSpace } from '../../constants.js';
/**
* Computes a grayscale value for the given RGB color value.
*
* @tsl
* @function
* @param {Node<vec3>} color - The color value to compute the grayscale for.
* @return {Node<vec3>} The grayscale color.
*/
export const grayscale = /*@__PURE__*/ Fn( ( [ color ] ) => {
return luminance( color.rgb );
} );
/**
* Super-saturates or desaturates the given RGB color.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color.
* @param {Node<float>} [adjustment=1] - Specifies the amount of the conversion. A value under `1` desaturates the color, a value over `1` super-saturates it.
* @return {Node<vec3>} The saturated color.
*/
export const saturation = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
return adjustment.mix( luminance( color.rgb ), color.rgb );
} );
/**
* Selectively enhance the intensity of less saturated RGB colors. Can result
* in a more natural and visually appealing image with enhanced color depth
* compared to {@link ColorAdjustment#saturation}.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color.
* @param {Node<float>} [adjustment=1] - Controls the intensity of the vibrance effect.
* @return {Node<vec3>} The updated color.
*/
export const vibrance = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
const average = add( color.r, color.g, color.b ).div( 3.0 );
const mx = color.r.max( color.g.max( color.b ) );
const amt = mx.sub( average ).mul( adjustment ).mul( - 3.0 );
return mix( color.rgb, mx, amt );
} );
/**
* Updates the hue component of the given RGB color while preserving its luminance and saturation.
*
* @tsl
* @function
* @param {Node<vec3>} color - The input color.
* @param {Node<float>} [adjustment=1] - Defines the degree of hue rotation in radians. A positive value rotates the hue clockwise, while a negative value rotates it counterclockwise.
* @return {Node<vec3>} The updated color.
*/
export const hue = /*@__PURE__*/ Fn( ( [ color, adjustment = float( 1 ) ] ) => {
const k = vec3( 0.57735, 0.57735, 0.57735 );
const cosAngle = adjustment.cos();
return vec3( color.rgb.mul( cosAngle ).add( k.cross( color.rgb ).mul( adjustment.sin() ).add( k.mul( dot( k, color.rgb ).mul( cosAngle.oneMinus() ) ) ) ) );
} );
/**
* Computes the luminance for the given RGB color value.
*
* @tsl
* @function
* @param {Node<vec3>} color - The color value to compute the luminance for.
* @param {?Node<vec3>} luminanceCoefficients - The luminance coefficients. By default predefined values of the current working color space are used.
* @return {Node<vec3>} The luminance.
*/
export const luminance = (
color,
luminanceCoefficients = vec3( ColorManagement.getLuminanceCoefficients( new Vector3() ) )
) => dot( color, luminanceCoefficients );
/**
* Color Decision List (CDL) v1.2
*
* Compact representation of color grading information, defined by slope, offset, power, and
* saturation. The CDL should be typically be given input in a log space (such as LogC, ACEScc,
* or AgX Log), and will return output in the same space. Output may require clamping >=0.
*
* @tsl
* @function
* @param {Node<vec4>} color Input (-Infinity < input < +Infinity)
* @param {Node<vec3>} slope Slope (0 ≤ slope < +Infinity)
* @param {Node<vec3>} offset Offset (-Infinity < offset < +Infinity; typically -1 < offset < 1)
* @param {Node<vec3>} power Power (0 < power < +Infinity)
* @param {Node<float>} saturation Saturation (0 ≤ saturation < +Infinity; typically 0 ≤ saturation < 4)
* @param {Node<vec3>} luminanceCoefficients Luminance coefficients for saturation term, typically Rec. 709
* @return {Node<vec4>} Output, -Infinity < output < +Infinity
*
* References:
* - ASC CDL v1.2
* - {@link https://blender.stackexchange.com/a/55239/43930}
* - {@link https://docs.acescentral.com/specifications/acescc/}
*/
export const cdl = /*@__PURE__*/ Fn( ( [
color,
slope = vec3( 1 ),
offset = vec3( 0 ),
power = vec3( 1 ),
saturation = float( 1 ),
// ASC CDL v1.2 explicitly requires Rec. 709 luminance coefficients.
luminanceCoefficients = vec3( ColorManagement.getLuminanceCoefficients( new Vector3(), LinearSRGBColorSpace ) )
] ) => {
// NOTE: The ASC CDL v1.2 defines a [0, 1] clamp on the slope+offset term, and another on the
// saturation term. Per the ACEScc specification and Filament, limits may be omitted to support
// values outside [0, 1], requiring a workaround for negative values in the power expression.
const luma = color.rgb.dot( vec3( luminanceCoefficients ) );
const v = max( color.rgb.mul( slope ).add( offset ), 0.0 ).toVar();
const pv = v.pow( power ).toVar();
If( v.r.greaterThan( 0.0 ), () => { v.r.assign( pv.r ); } ); // eslint-disable-line
If( v.g.greaterThan( 0.0 ), () => { v.g.assign( pv.g ); } ); // eslint-disable-line
If( v.b.greaterThan( 0.0 ), () => { v.b.assign( pv.b ); } ); // eslint-disable-line
v.assign( luma.add( v.sub( luma ).mul( saturation ) ) );
return vec4( v.rgb, color.a );
} );

View File

@@ -0,0 +1,54 @@
import { mix } from '../math/MathNode.js';
import { Fn } from '../tsl/TSLCore.js';
/**
* Converts the given color value from sRGB to linear-sRGB color space.
*
* @tsl
* @function
* @param {Node<vec3>} color - The sRGB color.
* @return {Node<vec3>} The linear-sRGB color.
*/
export const sRGBTransferEOTF = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.mul( 0.9478672986 ).add( 0.0521327014 ).pow( 2.4 );
const b = color.mul( 0.0773993808 );
const factor = color.lessThanEqual( 0.04045 );
const rgbResult = mix( a, b, factor );
return rgbResult;
} ).setLayout( {
name: 'sRGBTransferEOTF',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' }
]
} );
/**
* Converts the given color value from linear-sRGB to sRGB color space.
*
* @tsl
* @function
* @param {Node<vec3>} color - The linear-sRGB color.
* @return {Node<vec3>} The sRGB color.
*/
export const sRGBTransferOETF = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.pow( 0.41666 ).mul( 1.055 ).sub( 0.055 );
const b = color.mul( 12.92 );
const factor = color.lessThanEqual( 0.0031308 );
const rgbResult = mix( a, b, factor );
return rgbResult;
} ).setLayout( {
name: 'sRGBTransferOETF',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' }
]
} );

View File

@@ -0,0 +1,164 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, mat3, nodeObject, vec4 } from '../tsl/TSLCore.js';
import { SRGBTransfer } from '../../constants.js';
import { ColorManagement } from '../../math/ColorManagement.js';
import { sRGBTransferEOTF, sRGBTransferOETF } from './ColorSpaceFunctions.js';
import { Matrix3 } from '../../math/Matrix3.js';
const WORKING_COLOR_SPACE = 'WorkingColorSpace';
const OUTPUT_COLOR_SPACE = 'OutputColorSpace';
/**
* This node represents a color space conversion. Meaning it converts
* a color value from a source to a target color space.
*
* @augments TempNode
*/
class ColorSpaceNode extends TempNode {
static get type() {
return 'ColorSpaceNode';
}
/**
* Constructs a new color space node.
*
* @param {Node} colorNode - Represents the color to convert.
* @param {string} source - The source color space.
* @param {string} target - The target color space.
*/
constructor( colorNode, source, target ) {
super( 'vec4' );
/**
* Represents the color to convert.
*
* @type {Node}
*/
this.colorNode = colorNode;
/**
* The source color space.
*
* @type {string}
*/
this.source = source;
/**
* The target color space.
*
* @type {string}
*/
this.target = target;
}
/**
* This method resolves the constants `WORKING_COLOR_SPACE` and
* `OUTPUT_COLOR_SPACE` based on the current configuration of the
* color management and renderer.
*
* @param {NodeBuilder} builder - The current node builder.
* @param {string} colorSpace - The color space to resolve.
* @return {string} The resolved color space.
*/
resolveColorSpace( builder, colorSpace ) {
if ( colorSpace === WORKING_COLOR_SPACE ) {
return ColorManagement.workingColorSpace;
} else if ( colorSpace === OUTPUT_COLOR_SPACE ) {
return builder.context.outputColorSpace || builder.renderer.outputColorSpace;
}
return colorSpace;
}
setup( builder ) {
const { colorNode } = this;
const source = this.resolveColorSpace( builder, this.source );
const target = this.resolveColorSpace( builder, this.target );
let outputNode = colorNode;
if ( ColorManagement.enabled === false || source === target || ! source || ! target ) {
return outputNode;
}
if ( ColorManagement.getTransfer( source ) === SRGBTransfer ) {
outputNode = vec4( sRGBTransferEOTF( outputNode.rgb ), outputNode.a );
}
if ( ColorManagement.getPrimaries( source ) !== ColorManagement.getPrimaries( target ) ) {
outputNode = vec4(
mat3( ColorManagement._getMatrix( new Matrix3(), source, target ) ).mul( outputNode.rgb ),
outputNode.a
);
}
if ( ColorManagement.getTransfer( target ) === SRGBTransfer ) {
outputNode = vec4( sRGBTransferOETF( outputNode.rgb ), outputNode.a );
}
return outputNode;
}
}
export default ColorSpaceNode;
/**
* TSL function for converting a given color node from the current working color space to the given color space.
*
* @tsl
* @function
* @param {Node} node - Represents the node to convert.
* @param {string} targetColorSpace - The target color space.
* @returns {ColorSpaceNode}
*/
export const workingToColorSpace = ( node, targetColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), WORKING_COLOR_SPACE, targetColorSpace ) );
/**
* TSL function for converting a given color node from the given color space to the current working color space.
*
* @tsl
* @function
* @param {Node} node - Represents the node to convert.
* @param {string} sourceColorSpace - The source color space.
* @returns {ColorSpaceNode}
*/
export const colorSpaceToWorking = ( node, sourceColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), sourceColorSpace, WORKING_COLOR_SPACE ) );
/**
* TSL function for converting a given color node from one color space to another one.
*
* @tsl
* @function
* @param {Node} node - Represents the node to convert.
* @param {string} sourceColorSpace - The source color space.
* @param {string} targetColorSpace - The target color space.
* @returns {ColorSpaceNode}
*/
export const convertColorSpace = ( node, sourceColorSpace, targetColorSpace ) => nodeObject( new ColorSpaceNode( nodeObject( node ), sourceColorSpace, targetColorSpace ) );
addMethodChaining( 'workingToColorSpace', workingToColorSpace );
addMethodChaining( 'colorSpaceToWorking', colorSpaceToWorking );

View File

@@ -0,0 +1,106 @@
import Node from '../core/Node.js';
import { nodeImmutable, float, Fn } from '../tsl/TSLBase.js';
import { BackSide, DoubleSide, WebGLCoordinateSystem } from '../../constants.js';
/**
* This node can be used to evaluate whether a primitive is front or back facing.
*
* @augments Node
*/
class FrontFacingNode extends Node {
static get type() {
return 'FrontFacingNode';
}
/**
* Constructs a new front facing node.
*/
constructor() {
super( 'bool' );
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isFrontFacingNode = true;
}
generate( builder ) {
if ( builder.shaderStage !== 'fragment' ) return 'true';
//
const { renderer, material } = builder;
if ( renderer.coordinateSystem === WebGLCoordinateSystem ) {
if ( material.side === BackSide ) {
return 'false';
}
}
return builder.getFrontFacing();
}
}
export default FrontFacingNode;
/**
* TSL object that represents whether a primitive is front or back facing
*
* @tsl
* @type {FrontFacingNode<bool>}
*/
export const frontFacing = /*@__PURE__*/ nodeImmutable( FrontFacingNode );
/**
* TSL object that represents the front facing status as a number instead of a bool.
* `1` means front facing, `-1` means back facing.
*
* @tsl
* @type {Node<float>}
*/
export const faceDirection = /*@__PURE__*/ float( frontFacing ).mul( 2.0 ).sub( 1.0 );
/**
* Converts a direction vector to a face direction vector based on the material's side.
*
* If the material is set to `BackSide`, the direction is inverted.
* If the material is set to `DoubleSide`, the direction is multiplied by `faceDirection`.
*
* @tsl
* @param {Node<vec3>} direction - The direction vector to convert.
* @returns {Node<vec3>} The converted direction vector.
*/
export const directionToFaceDirection = /*@__PURE__*/ Fn( ( [ direction ], { material } ) => {
const side = material.side;
if ( side === BackSide ) {
direction = direction.mul( - 1.0 );
} else if ( side === DoubleSide ) {
direction = direction.mul( faceDirection );
}
return direction;
} );

View File

@@ -0,0 +1,117 @@
import TempNode from '../core/TempNode.js';
import { normalView, transformNormalToView } from '../accessors/Normal.js';
import { TBNViewMatrix } from '../accessors/AccessorsUtils.js';
import { nodeProxy, vec3 } from '../tsl/TSLBase.js';
import { TangentSpaceNormalMap, ObjectSpaceNormalMap } from '../../constants.js';
import { directionToFaceDirection } from './FrontFacingNode.js';
/**
* This class can be used for applying normals maps to materials.
*
* ```js
* material.normalNode = normalMap( texture( normalTex ) );
* ```
*
* @augments TempNode
*/
class NormalMapNode extends TempNode {
static get type() {
return 'NormalMapNode';
}
/**
* Constructs a new normal map node.
*
* @param {Node<vec3>} node - Represents the normal map data.
* @param {?Node<vec2>} [scaleNode=null] - Controls the intensity of the effect.
*/
constructor( node, scaleNode = null ) {
super( 'vec3' );
/**
* Represents the normal map data.
*
* @type {Node<vec3>}
*/
this.node = node;
/**
* Controls the intensity of the effect.
*
* @type {?Node<vec2>}
* @default null
*/
this.scaleNode = scaleNode;
/**
* The normal map type.
*
* @type {(TangentSpaceNormalMap|ObjectSpaceNormalMap)}
* @default TangentSpaceNormalMap
*/
this.normalMapType = TangentSpaceNormalMap;
}
setup( { material } ) {
const { normalMapType, scaleNode } = this;
let normalMap = this.node.mul( 2.0 ).sub( 1.0 );
if ( scaleNode !== null ) {
let scale = scaleNode;
if ( material.flatShading === true ) {
scale = directionToFaceDirection( scale );
}
normalMap = vec3( normalMap.xy.mul( scale ), normalMap.z );
}
let output = null;
if ( normalMapType === ObjectSpaceNormalMap ) {
output = transformNormalToView( normalMap );
} else if ( normalMapType === TangentSpaceNormalMap ) {
output = TBNViewMatrix.mul( normalMap ).normalize();
} else {
console.error( `THREE.NodeMaterial: Unsupported normal map type: ${ normalMapType }` );
output = normalView; // Fallback to default normal view
}
return output;
}
}
export default NormalMapNode;
/**
* TSL function for creating a normal map node.
*
* @tsl
* @function
* @param {Node<vec3>} node - Represents the normal map data.
* @param {?Node<vec2>} [scaleNode=null] - Controls the intensity of the effect.
* @returns {NormalMapNode}
*/
export const normalMap = /*@__PURE__*/ nodeProxy( NormalMapNode ).setParameterLength( 1, 2 );

759
app/node_modules/three/src/nodes/display/PassNode.js generated vendored Normal file
View File

@@ -0,0 +1,759 @@
import TempNode from '../core/TempNode.js';
import { default as TextureNode/*, texture*/ } from '../accessors/TextureNode.js';
import { NodeUpdateType } from '../core/constants.js';
import { nodeObject } from '../tsl/TSLBase.js';
import { uniform } from '../core/UniformNode.js';
import { viewZToOrthographicDepth, perspectiveDepthToViewZ } from './ViewportDepthNode.js';
import { HalfFloatType/*, FloatType*/ } from '../../constants.js';
import { Vector2 } from '../../math/Vector2.js';
import { DepthTexture } from '../../textures/DepthTexture.js';
import { RenderTarget } from '../../core/RenderTarget.js';
const _size = /*@__PURE__*/ new Vector2();
/**
* Represents the texture of a pass node.
*
* @augments TextureNode
*/
class PassTextureNode extends TextureNode {
static get type() {
return 'PassTextureNode';
}
/**
* Constructs a new pass texture node.
*
* @param {PassNode} passNode - The pass node.
* @param {Texture} texture - The output texture.
*/
constructor( passNode, texture ) {
super( texture );
/**
* A reference to the pass node.
*
* @type {PassNode}
*/
this.passNode = passNode;
this.setUpdateMatrix( false );
}
setup( builder ) {
if ( builder.object.isQuadMesh ) this.passNode.build( builder );
return super.setup( builder );
}
clone() {
return new this.constructor( this.passNode, this.value );
}
}
/**
* An extension of `PassTextureNode` which allows to manage more than one
* internal texture. Relevant for the `getPreviousTexture()` related API.
*
* @augments PassTextureNode
*/
class PassMultipleTextureNode extends PassTextureNode {
static get type() {
return 'PassMultipleTextureNode';
}
/**
* Constructs a new pass texture node.
*
* @param {PassNode} passNode - The pass node.
* @param {string} textureName - The output texture name.
* @param {boolean} [previousTexture=false] - Whether previous frame data should be used or not.
*/
constructor( passNode, textureName, previousTexture = false ) {
// null is passed to the super call since this class does not
// use an external texture for rendering pass data into. Instead
// the texture is managed by the pass node itself
super( passNode, null );
/**
* The output texture name.
*
* @type {string}
*/
this.textureName = textureName;
/**
* Whether previous frame data should be used or not.
*
* @type {boolean}
*/
this.previousTexture = previousTexture;
}
/**
* Updates the texture reference of this node.
*/
updateTexture() {
this.value = this.previousTexture ? this.passNode.getPreviousTexture( this.textureName ) : this.passNode.getTexture( this.textureName );
}
setup( builder ) {
this.updateTexture();
return super.setup( builder );
}
clone() {
const newNode = new this.constructor( this.passNode, this.textureName, this.previousTexture );
newNode.uvNode = this.uvNode;
newNode.levelNode = this.levelNode;
newNode.biasNode = this.biasNode;
newNode.sampler = this.sampler;
newNode.depthNode = this.depthNode;
newNode.compareNode = this.compareNode;
newNode.gradNode = this.gradNode;
return newNode;
}
}
/**
* Represents a render pass (sometimes called beauty pass) in context of post processing.
* This pass produces a render for the given scene and camera and can provide multiple outputs
* via MRT for further processing.
*
* ```js
* const postProcessing = new PostProcessing( renderer );
*
* const scenePass = pass( scene, camera );
*
* postProcessing.outputNode = scenePass;
* ```
*
* @augments TempNode
*/
class PassNode extends TempNode {
static get type() {
return 'PassNode';
}
/**
* Constructs a new pass node.
*
* @param {('color'|'depth')} scope - The scope of the pass. The scope determines whether the node outputs color or depth.
* @param {Scene} scene - A reference to the scene.
* @param {Camera} camera - A reference to the camera.
* @param {Object} options - Options for the internal render target.
*/
constructor( scope, scene, camera, options = {} ) {
super( 'vec4' );
/**
* The scope of the pass. The scope determines whether the node outputs color or depth.
*
* @type {('color'|'depth')}
*/
this.scope = scope;
/**
* A reference to the scene.
*
* @type {Scene}
*/
this.scene = scene;
/**
* A reference to the camera.
*
* @type {Camera}
*/
this.camera = camera;
/**
* Options for the internal render target.
*
* @type {Object}
*/
this.options = options;
/**
* The pass's pixel ratio. Will be kept automatically kept in sync with the renderer's pixel ratio.
*
* @private
* @type {number}
* @default 1
*/
this._pixelRatio = 1;
/**
* The pass's pixel width. Will be kept automatically kept in sync with the renderer's width.
* @private
* @type {number}
* @default 1
*/
this._width = 1;
/**
* The pass's pixel height. Will be kept automatically kept in sync with the renderer's height.
* @private
* @type {number}
* @default 1
*/
this._height = 1;
const depthTexture = new DepthTexture();
depthTexture.isRenderTargetTexture = true;
//depthTexture.type = FloatType;
depthTexture.name = 'depth';
const renderTarget = new RenderTarget( this._width * this._pixelRatio, this._height * this._pixelRatio, { type: HalfFloatType, ...options, } );
renderTarget.texture.name = 'output';
renderTarget.depthTexture = depthTexture;
/**
* The pass's render target.
*
* @type {RenderTarget}
*/
this.renderTarget = renderTarget;
/**
* A dictionary holding the internal result textures.
*
* @private
* @type {Object<string, Texture>}
*/
this._textures = {
output: renderTarget.texture,
depth: depthTexture
};
/**
* A dictionary holding the internal texture nodes.
*
* @private
* @type {Object<string, TextureNode>}
*/
this._textureNodes = {};
/**
* A dictionary holding the internal depth nodes.
*
* @private
* @type {Object}
*/
this._linearDepthNodes = {};
/**
* A dictionary holding the internal viewZ nodes.
*
* @private
* @type {Object}
*/
this._viewZNodes = {};
/**
* A dictionary holding the texture data of the previous frame.
* Used for computing velocity/motion vectors.
*
* @private
* @type {Object<string, Texture>}
*/
this._previousTextures = {};
/**
* A dictionary holding the texture nodes of the previous frame.
* Used for computing velocity/motion vectors.
*
* @private
* @type {Object<string, TextureNode>}
*/
this._previousTextureNodes = {};
/**
* The `near` property of the camera as a uniform.
*
* @private
* @type {UniformNode}
*/
this._cameraNear = uniform( 0 );
/**
* The `far` property of the camera as a uniform.
*
* @private
* @type {UniformNode}
*/
this._cameraFar = uniform( 0 );
/**
* A MRT node configuring the MRT settings.
*
* @private
* @type {?MRTNode}
* @default null
*/
this._mrt = null;
this._layers = null;
this._resolution = 1;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isPassNode = true;
/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders the
* scene once per frame in its {@link PassNode#updateBefore} method.
*
* @type {string}
* @default 'frame'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
/**
* This flag is used for global cache.
*
* @type {boolean}
* @default true
*/
this.global = true;
}
/**
* Sets the resolution for the pass.
* The resolution is a factor that is multiplied with the renderer's width and height.
*
* @param {number} resolution - The resolution to set. A value of `1` means full resolution.
* @return {PassNode} A reference to this pass.
*/
setResolution( resolution ) {
this._resolution = resolution;
return this;
}
/**
* Gets the current resolution of the pass.
*
* @return {number} The current resolution. A value of `1` means full resolution.
* @default 1
*/
getResolution() {
return this._resolution;
}
setLayers( layers ) {
this._layers = layers;
return this;
}
getLayers() {
return this._layers;
}
/**
* Sets the given MRT node to setup MRT for this pass.
*
* @param {MRTNode} mrt - The MRT object.
* @return {PassNode} A reference to this pass.
*/
setMRT( mrt ) {
this._mrt = mrt;
return this;
}
/**
* Returns the current MRT node.
*
* @return {MRTNode} The current MRT node.
*/
getMRT() {
return this._mrt;
}
/**
* Returns the texture for the given output name.
*
* @param {string} name - The output name to get the texture for.
* @return {Texture} The texture.
*/
getTexture( name ) {
let texture = this._textures[ name ];
if ( texture === undefined ) {
const refTexture = this.renderTarget.texture;
texture = refTexture.clone();
texture.name = name;
this._textures[ name ] = texture;
this.renderTarget.textures.push( texture );
}
return texture;
}
/**
* Returns the texture holding the data of the previous frame for the given output name.
*
* @param {string} name - The output name to get the texture for.
* @return {Texture} The texture holding the data of the previous frame.
*/
getPreviousTexture( name ) {
let texture = this._previousTextures[ name ];
if ( texture === undefined ) {
texture = this.getTexture( name ).clone();
this._previousTextures[ name ] = texture;
}
return texture;
}
/**
* Switches current and previous textures for the given output name.
*
* @param {string} name - The output name.
*/
toggleTexture( name ) {
const prevTexture = this._previousTextures[ name ];
if ( prevTexture !== undefined ) {
const texture = this._textures[ name ];
const index = this.renderTarget.textures.indexOf( texture );
this.renderTarget.textures[ index ] = prevTexture;
this._textures[ name ] = prevTexture;
this._previousTextures[ name ] = texture;
this._textureNodes[ name ].updateTexture();
this._previousTextureNodes[ name ].updateTexture();
}
}
/**
* Returns the texture node for the given output name.
*
* @param {string} [name='output'] - The output name to get the texture node for.
* @return {TextureNode} The texture node.
*/
getTextureNode( name = 'output' ) {
let textureNode = this._textureNodes[ name ];
if ( textureNode === undefined ) {
textureNode = nodeObject( new PassMultipleTextureNode( this, name ) );
textureNode.updateTexture();
this._textureNodes[ name ] = textureNode;
}
return textureNode;
}
/**
* Returns the previous texture node for the given output name.
*
* @param {string} [name='output'] - The output name to get the previous texture node for.
* @return {TextureNode} The previous texture node.
*/
getPreviousTextureNode( name = 'output' ) {
let textureNode = this._previousTextureNodes[ name ];
if ( textureNode === undefined ) {
if ( this._textureNodes[ name ] === undefined ) this.getTextureNode( name );
textureNode = nodeObject( new PassMultipleTextureNode( this, name, true ) );
textureNode.updateTexture();
this._previousTextureNodes[ name ] = textureNode;
}
return textureNode;
}
/**
* Returns a viewZ node of this pass.
*
* @param {string} [name='depth'] - The output name to get the viewZ node for. In most cases the default `'depth'` can be used however the parameter exists for custom depth outputs.
* @return {Node} The viewZ node.
*/
getViewZNode( name = 'depth' ) {
let viewZNode = this._viewZNodes[ name ];
if ( viewZNode === undefined ) {
const cameraNear = this._cameraNear;
const cameraFar = this._cameraFar;
this._viewZNodes[ name ] = viewZNode = perspectiveDepthToViewZ( this.getTextureNode( name ), cameraNear, cameraFar );
}
return viewZNode;
}
/**
* Returns a linear depth node of this pass.
*
* @param {string} [name='depth'] - The output name to get the linear depth node for. In most cases the default `'depth'` can be used however the parameter exists for custom depth outputs.
* @return {Node} The linear depth node.
*/
getLinearDepthNode( name = 'depth' ) {
let linearDepthNode = this._linearDepthNodes[ name ];
if ( linearDepthNode === undefined ) {
const cameraNear = this._cameraNear;
const cameraFar = this._cameraFar;
const viewZNode = this.getViewZNode( name );
// TODO: just if ( builder.camera.isPerspectiveCamera )
this._linearDepthNodes[ name ] = linearDepthNode = viewZToOrthographicDepth( viewZNode, cameraNear, cameraFar );
}
return linearDepthNode;
}
setup( { renderer } ) {
this.renderTarget.samples = this.options.samples === undefined ? renderer.samples : this.options.samples;
this.renderTarget.texture.type = renderer.getColorBufferType();
return this.scope === PassNode.COLOR ? this.getTextureNode() : this.getLinearDepthNode();
}
updateBefore( frame ) {
const { renderer } = frame;
const { scene } = this;
let camera;
let pixelRatio;
const outputRenderTarget = renderer.getOutputRenderTarget();
if ( outputRenderTarget && outputRenderTarget.isXRRenderTarget === true ) {
pixelRatio = 1;
camera = renderer.xr.getCamera();
renderer.xr.updateCamera( camera );
_size.set( outputRenderTarget.width, outputRenderTarget.height );
} else {
camera = this.camera;
pixelRatio = renderer.getPixelRatio();
renderer.getSize( _size );
}
this._pixelRatio = pixelRatio;
this.setSize( _size.width, _size.height );
const currentRenderTarget = renderer.getRenderTarget();
const currentMRT = renderer.getMRT();
const currentMask = camera.layers.mask;
this._cameraNear.value = camera.near;
this._cameraFar.value = camera.far;
if ( this._layers !== null ) {
camera.layers.mask = this._layers.mask;
}
for ( const name in this._previousTextures ) {
this.toggleTexture( name );
}
renderer.setRenderTarget( this.renderTarget );
renderer.setMRT( this._mrt );
renderer.render( scene, camera );
renderer.setRenderTarget( currentRenderTarget );
renderer.setMRT( currentMRT );
camera.layers.mask = currentMask;
}
/**
* Sets the size of the pass's render target. Honors the pixel ratio.
*
* @param {number} width - The width to set.
* @param {number} height - The height to set.
*/
setSize( width, height ) {
this._width = width;
this._height = height;
const effectiveWidth = this._width * this._pixelRatio * this._resolution;
const effectiveHeight = this._height * this._pixelRatio * this._resolution;
this.renderTarget.setSize( effectiveWidth, effectiveHeight );
}
/**
* Sets the pixel ratio the pass's render target and updates the size.
*
* @param {number} pixelRatio - The pixel ratio to set.
*/
setPixelRatio( pixelRatio ) {
this._pixelRatio = pixelRatio;
this.setSize( this._width, this._height );
}
/**
* Frees internal resources. Should be called when the node is no longer in use.
*/
dispose() {
this.renderTarget.dispose();
}
}
/**
* @static
* @type {'color'}
* @default 'color'
*/
PassNode.COLOR = 'color';
/**
* @static
* @type {'depth'}
* @default 'depth'
*/
PassNode.DEPTH = 'depth';
export default PassNode;
/**
* TSL function for creating a pass node.
*
* @tsl
* @function
* @param {Scene} scene - A reference to the scene.
* @param {Camera} camera - A reference to the camera.
* @param {Object} options - Options for the internal render target.
* @returns {PassNode}
*/
export const pass = ( scene, camera, options ) => nodeObject( new PassNode( PassNode.COLOR, scene, camera, options ) );
/**
* TSL function for creating a pass texture node.
*
* @tsl
* @function
* @param {PassNode} pass - The pass node.
* @param {Texture} texture - The output texture.
* @returns {PassTextureNode}
*/
export const passTexture = ( pass, texture ) => nodeObject( new PassTextureNode( pass, texture ) );
/**
* TSL function for creating a depth pass node.
*
* @tsl
* @function
* @param {Scene} scene - A reference to the scene.
* @param {Camera} camera - A reference to the camera.
* @param {Object} options - Options for the internal render target.
* @returns {PassNode}
*/
export const depthPass = ( scene, camera, options ) => nodeObject( new PassNode( PassNode.DEPTH, scene, camera, options ) );

View File

@@ -0,0 +1,65 @@
import TempNode from '../core/TempNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
/**
* Represents a posterize effect which reduces the number of colors
* in an image, resulting in a more blocky and stylized appearance.
*
* @augments TempNode
*/
class PosterizeNode extends TempNode {
static get type() {
return 'PosterizeNode';
}
/**
* Constructs a new posterize node.
*
* @param {Node} sourceNode - The input color.
* @param {Node} stepsNode - Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
*/
constructor( sourceNode, stepsNode ) {
super();
/**
* The input color.
*
* @type {Node}
*/
this.sourceNode = sourceNode;
/**
* Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
*
* @type {Node}
*/
this.stepsNode = stepsNode;
}
setup() {
const { sourceNode, stepsNode } = this;
return sourceNode.mul( stepsNode ).floor().div( stepsNode );
}
}
export default PosterizeNode;
/**
* TSL function for creating a posterize node.
*
* @tsl
* @function
* @param {Node} sourceNode - The input color.
* @param {Node} stepsNode - Controls the intensity of the posterization effect. A lower number results in a more blocky appearance.
* @returns {PosterizeNode}
*/
export const posterize = /*@__PURE__*/ nodeProxy( PosterizeNode ).setParameterLength( 2 );

View File

@@ -0,0 +1,124 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, nodeObject } from '../tsl/TSLCore.js';
import { NoColorSpace, NoToneMapping } from '../../constants.js';
import { ColorManagement } from '../../math/ColorManagement.js';
/**
* Normally, tone mapping and color conversion happens automatically
* before outputting pixel too the default (screen) framebuffer. In certain
* post processing setups this happens to late because certain effects
* require e.g. sRGB input. For such scenarios, `RenderOutputNode` can be used
* to apply tone mapping and color space conversion at an arbitrary point
* in the effect chain.
*
* When applying tone mapping and color space conversion manually with this node,
* you have to set {@link PostProcessing#outputColorTransform} to `false`.
*
* ```js
* const postProcessing = new PostProcessing( renderer );
* postProcessing.outputColorTransform = false;
*
* const scenePass = pass( scene, camera );
* const outputPass = renderOutput( scenePass );
*
* postProcessing.outputNode = outputPass;
* ```
*
* @augments TempNode
*/
class RenderOutputNode extends TempNode {
static get type() {
return 'RenderOutputNode';
}
/**
* Constructs a new render output node.
*
* @param {Node} colorNode - The color node to process.
* @param {?number} toneMapping - The tone mapping type.
* @param {?string} outputColorSpace - The output color space.
*/
constructor( colorNode, toneMapping, outputColorSpace ) {
super( 'vec4' );
/**
* The color node to process.
*
* @type {Node}
*/
this.colorNode = colorNode;
/**
* The tone mapping type.
*
* @type {?number}
*/
this.toneMapping = toneMapping;
/**
* The output color space.
*
* @type {?string}
*/
this.outputColorSpace = outputColorSpace;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isRenderOutputNode = true;
}
setup( { context } ) {
let outputNode = this.colorNode || context.color;
// tone mapping
const toneMapping = ( this.toneMapping !== null ? this.toneMapping : context.toneMapping ) || NoToneMapping;
const outputColorSpace = ( this.outputColorSpace !== null ? this.outputColorSpace : context.outputColorSpace ) || NoColorSpace;
if ( toneMapping !== NoToneMapping ) {
outputNode = outputNode.toneMapping( toneMapping );
}
// working to output color space
if ( outputColorSpace !== NoColorSpace && outputColorSpace !== ColorManagement.workingColorSpace ) {
outputNode = outputNode.workingToColorSpace( outputColorSpace );
}
return outputNode;
}
}
export default RenderOutputNode;
/**
* TSL function for creating a posterize node.
*
* @tsl
* @function
* @param {Node} color - The color node to process.
* @param {?number} [toneMapping=null] - The tone mapping type.
* @param {?string} [outputColorSpace=null] - The output color space.
* @returns {RenderOutputNode}
*/
export const renderOutput = ( color, toneMapping = null, outputColorSpace = null ) => nodeObject( new RenderOutputNode( nodeObject( color ), toneMapping, outputColorSpace ) );
addMethodChaining( 'renderOutput', renderOutput );

260
app/node_modules/three/src/nodes/display/ScreenNode.js generated vendored Normal file
View File

@@ -0,0 +1,260 @@
import Node from '../core/Node.js';
import { NodeUpdateType } from '../core/constants.js';
import { uniform } from '../core/UniformNode.js';
import { Fn, nodeImmutable, vec2 } from '../tsl/TSLBase.js';
import { Vector2 } from '../../math/Vector2.js';
import { Vector4 } from '../../math/Vector4.js';
let screenSizeVec, viewportVec;
/**
* This node provides a collection of screen related metrics.
* Depending on {@link ScreenNode#scope}, the nodes can represent
* resolution or viewport data as well as fragment or uv coordinates.
*
* @augments Node
*/
class ScreenNode extends Node {
static get type() {
return 'ScreenNode';
}
/**
* Constructs a new screen node.
*
* @param {('coordinate'|'viewport'|'size'|'uv')} scope - The node's scope.
*/
constructor( scope ) {
super();
/**
* The node represents different metric depending on which scope is selected.
*
* - `ScreenNode.COORDINATE`: Window-relative coordinates of the current fragment according to WebGPU standards.
* - `ScreenNode.VIEWPORT`: The current viewport defined as a four-dimensional vector.
* - `ScreenNode.SIZE`: The dimensions of the current bound framebuffer.
* - `ScreenNode.UV`: Normalized coordinates.
*
* @type {('coordinate'|'viewport'|'size'|'uv')}
*/
this.scope = scope;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isViewportNode = true;
}
/**
* This method is overwritten since the node type depends on the selected scope.
*
* @return {('vec2'|'vec4')} The node type.
*/
getNodeType() {
if ( this.scope === ScreenNode.VIEWPORT ) return 'vec4';
else return 'vec2';
}
/**
* This method is overwritten since the node's update type depends on the selected scope.
*
* @return {NodeUpdateType} The update type.
*/
getUpdateType() {
let updateType = NodeUpdateType.NONE;
if ( this.scope === ScreenNode.SIZE || this.scope === ScreenNode.VIEWPORT ) {
updateType = NodeUpdateType.RENDER;
}
this.updateType = updateType;
return updateType;
}
/**
* `ScreenNode` implements {@link Node#update} to retrieve viewport and size information
* from the current renderer.
*
* @param {NodeFrame} frame - A reference to the current node frame.
*/
update( { renderer } ) {
const renderTarget = renderer.getRenderTarget();
if ( this.scope === ScreenNode.VIEWPORT ) {
if ( renderTarget !== null ) {
viewportVec.copy( renderTarget.viewport );
} else {
renderer.getViewport( viewportVec );
viewportVec.multiplyScalar( renderer.getPixelRatio() );
}
} else {
if ( renderTarget !== null ) {
screenSizeVec.width = renderTarget.width;
screenSizeVec.height = renderTarget.height;
} else {
renderer.getDrawingBufferSize( screenSizeVec );
}
}
}
setup( /*builder*/ ) {
const scope = this.scope;
let output = null;
if ( scope === ScreenNode.SIZE ) {
output = uniform( screenSizeVec || ( screenSizeVec = new Vector2() ) );
} else if ( scope === ScreenNode.VIEWPORT ) {
output = uniform( viewportVec || ( viewportVec = new Vector4() ) );
} else {
output = vec2( screenCoordinate.div( screenSize ) );
}
return output;
}
generate( builder ) {
if ( this.scope === ScreenNode.COORDINATE ) {
let coord = builder.getFragCoord();
if ( builder.isFlipY() ) {
// follow webgpu standards
const size = builder.getNodeProperties( screenSize ).outputNode.build( builder );
coord = `${ builder.getType( 'vec2' ) }( ${ coord }.x, ${ size }.y - ${ coord }.y )`;
}
return coord;
}
return super.generate( builder );
}
}
ScreenNode.COORDINATE = 'coordinate';
ScreenNode.VIEWPORT = 'viewport';
ScreenNode.SIZE = 'size';
ScreenNode.UV = 'uv';
export default ScreenNode;
// Screen
/**
* TSL object that represents normalized screen coordinates, unitless in `[0, 1]`.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const screenUV = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.UV );
/**
* TSL object that represents the screen resolution in physical pixel units.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const screenSize = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.SIZE );
/**
* TSL object that represents the current `x`/`y` pixel position on the screen in physical pixel units.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const screenCoordinate = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.COORDINATE );
// Viewport
/**
* TSL object that represents the viewport rectangle as `x`, `y`, `width` and `height` in physical pixel units.
*
* @tsl
* @type {ScreenNode<vec4>}
*/
export const viewport = /*@__PURE__*/ nodeImmutable( ScreenNode, ScreenNode.VIEWPORT );
/**
* TSL object that represents the viewport resolution in physical pixel units.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const viewportSize = viewport.zw;
/**
* TSL object that represents the current `x`/`y` pixel position on the viewport in physical pixel units.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const viewportCoordinate = /*@__PURE__*/ screenCoordinate.sub( viewport.xy );
/**
* TSL object that represents normalized viewport coordinates, unitless in `[0, 1]`.
*
* @tsl
* @type {ScreenNode<vec2>}
*/
export const viewportUV = /*@__PURE__*/ viewportCoordinate.div( viewportSize );
// Deprecated
/**
* @deprecated since r169. Use {@link screenSize} instead.
*/
export const viewportResolution = /*@__PURE__*/ ( Fn( () => { // @deprecated, r169
console.warn( 'THREE.TSL: "viewportResolution" is deprecated. Use "screenSize" instead.' );
return screenSize;
}, 'vec2' ).once() )();

View File

@@ -0,0 +1,242 @@
import { Fn, float, mat3, vec3, If } from '../tsl/TSLBase.js';
import { select } from '../math/ConditionalNode.js';
import { clamp, log2, max, min, pow, mix } from '../math/MathNode.js';
import { mul, sub, div } from '../math/OperatorNode.js';
/**
* Linear tone mapping, exposure only.
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const linearToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
return color.mul( exposure ).clamp();
} ).setLayout( {
name: 'linearToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
/**
* Reinhard tone mapping.
*
* Reference: {@link https://www.cs.utah.edu/docs/techreports/2002/pdf/UUCS-02-001.pdf}
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const reinhardToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
color = color.mul( exposure );
return color.div( color.add( 1.0 ) ).clamp();
} ).setLayout( {
name: 'reinhardToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
/**
* Cineon tone mapping.
*
* Reference: {@link http://filmicworlds.com/blog/filmic-tonemapping-operators/}
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const cineonToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
// filmic operator by Jim Hejl and Richard Burgess-Dawson
color = color.mul( exposure );
color = color.sub( 0.004 ).max( 0.0 );
const a = color.mul( color.mul( 6.2 ).add( 0.5 ) );
const b = color.mul( color.mul( 6.2 ).add( 1.7 ) ).add( 0.06 );
return a.div( b ).pow( 2.2 );
} ).setLayout( {
name: 'cineonToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
// source: https://github.com/selfshadow/ltc_code/blob/master/webgl/shaders/ltc/ltc_blit.fs
const RRTAndODTFit = /*@__PURE__*/ Fn( ( [ color ] ) => {
const a = color.mul( color.add( 0.0245786 ) ).sub( 0.000090537 );
const b = color.mul( color.add( 0.4329510 ).mul( 0.983729 ) ).add( 0.238081 );
return a.div( b );
} );
/**
* ACESFilmic tone mapping.
*
* Reference: {@link https://github.com/selfshadow/ltc_code/blob/master/webgl/shaders/ltc/ltc_blit.fs}
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const acesFilmicToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
// sRGB => XYZ => D65_2_D60 => AP1 => RRT_SAT
const ACESInputMat = mat3(
0.59719, 0.35458, 0.04823,
0.07600, 0.90834, 0.01566,
0.02840, 0.13383, 0.83777
);
// ODT_SAT => XYZ => D60_2_D65 => sRGB
const ACESOutputMat = mat3(
1.60475, - 0.53108, - 0.07367,
- 0.10208, 1.10813, - 0.00605,
- 0.00327, - 0.07276, 1.07602
);
color = color.mul( exposure ).div( 0.6 );
color = ACESInputMat.mul( color );
// Apply RRT and ODT
color = RRTAndODTFit( color );
color = ACESOutputMat.mul( color );
// Clamp to [0, 1]
return color.clamp();
} ).setLayout( {
name: 'acesFilmicToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
const LINEAR_REC2020_TO_LINEAR_SRGB = /*@__PURE__*/ mat3( vec3( 1.6605, - 0.1246, - 0.0182 ), vec3( - 0.5876, 1.1329, - 0.1006 ), vec3( - 0.0728, - 0.0083, 1.1187 ) );
const LINEAR_SRGB_TO_LINEAR_REC2020 = /*@__PURE__*/ mat3( vec3( 0.6274, 0.0691, 0.0164 ), vec3( 0.3293, 0.9195, 0.0880 ), vec3( 0.0433, 0.0113, 0.8956 ) );
const agxDefaultContrastApprox = /*@__PURE__*/ Fn( ( [ x_immutable ] ) => {
const x = vec3( x_immutable ).toVar();
const x2 = vec3( x.mul( x ) ).toVar();
const x4 = vec3( x2.mul( x2 ) ).toVar();
return float( 15.5 ).mul( x4.mul( x2 ) ).sub( mul( 40.14, x4.mul( x ) ) ).add( mul( 31.96, x4 ).sub( mul( 6.868, x2.mul( x ) ) ).add( mul( 0.4298, x2 ).add( mul( 0.1191, x ).sub( 0.00232 ) ) ) );
} );
/**
* AgX tone mapping.
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const agxToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
const colortone = vec3( color ).toVar();
const AgXInsetMatrix = mat3( vec3( 0.856627153315983, 0.137318972929847, 0.11189821299995 ), vec3( 0.0951212405381588, 0.761241990602591, 0.0767994186031903 ), vec3( 0.0482516061458583, 0.101439036467562, 0.811302368396859 ) );
const AgXOutsetMatrix = mat3( vec3( 1.1271005818144368, - 0.1413297634984383, - 0.14132976349843826 ), vec3( - 0.11060664309660323, 1.157823702216272, - 0.11060664309660294 ), vec3( - 0.016493938717834573, - 0.016493938717834257, 1.2519364065950405 ) );
const AgxMinEv = float( - 12.47393 );
const AgxMaxEv = float( 4.026069 );
colortone.mulAssign( exposure );
colortone.assign( LINEAR_SRGB_TO_LINEAR_REC2020.mul( colortone ) );
colortone.assign( AgXInsetMatrix.mul( colortone ) );
colortone.assign( max( colortone, 1e-10 ) );
colortone.assign( log2( colortone ) );
colortone.assign( colortone.sub( AgxMinEv ).div( AgxMaxEv.sub( AgxMinEv ) ) );
colortone.assign( clamp( colortone, 0.0, 1.0 ) );
colortone.assign( agxDefaultContrastApprox( colortone ) );
colortone.assign( AgXOutsetMatrix.mul( colortone ) );
colortone.assign( pow( max( vec3( 0.0 ), colortone ), vec3( 2.2 ) ) );
colortone.assign( LINEAR_REC2020_TO_LINEAR_SRGB.mul( colortone ) );
colortone.assign( clamp( colortone, 0.0, 1.0 ) );
return colortone;
} ).setLayout( {
name: 'agxToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );
/**
* Neutral tone mapping.
*
* Reference: {@link https://modelviewer.dev/examples/tone-mapping}
*
* @tsl
* @function
* @param {Node<vec3>} color - The color that should be tone mapped.
* @param {Node<float>} exposure - The exposure.
* @return {Node<vec3>} The tone mapped color.
*/
export const neutralToneMapping = /*@__PURE__*/ Fn( ( [ color, exposure ] ) => {
const StartCompression = float( 0.8 - 0.04 );
const Desaturation = float( 0.15 );
color = color.mul( exposure );
const x = min( color.r, min( color.g, color.b ) );
const offset = select( x.lessThan( 0.08 ), x.sub( mul( 6.25, x.mul( x ) ) ), 0.04 );
color.subAssign( offset );
const peak = max( color.r, max( color.g, color.b ) );
If( peak.lessThan( StartCompression ), () => {
return color;
} );
const d = sub( 1, StartCompression );
const newPeak = sub( 1, d.mul( d ).div( peak.add( d.sub( StartCompression ) ) ) );
color.mulAssign( newPeak.div( peak ) );
const g = sub( 1, div( 1, Desaturation.mul( peak.sub( newPeak ) ).add( 1 ) ) );
return mix( color, vec3( newPeak ), g );
} ).setLayout( {
name: 'neutralToneMapping',
type: 'vec3',
inputs: [
{ name: 'color', type: 'vec3' },
{ name: 'exposure', type: 'float' }
]
} );

View File

@@ -0,0 +1,120 @@
import TempNode from '../core/TempNode.js';
import { addMethodChaining, nodeObject, vec4 } from '../tsl/TSLCore.js';
import { rendererReference } from '../accessors/RendererReferenceNode.js';
import { NoToneMapping } from '../../constants.js';
import { hash } from '../core/NodeUtils.js';
/**
* This node represents a tone mapping operation.
*
* @augments TempNode
*/
class ToneMappingNode extends TempNode {
static get type() {
return 'ToneMappingNode';
}
/**
* Constructs a new tone mapping node.
*
* @param {number} toneMapping - The tone mapping type.
* @param {Node} exposureNode - The tone mapping exposure.
* @param {Node} [colorNode=null] - The color node to process.
*/
constructor( toneMapping, exposureNode = toneMappingExposure, colorNode = null ) {
super( 'vec3' );
/**
* The tone mapping type.
*
* @type {number}
*/
this.toneMapping = toneMapping;
/**
* The tone mapping exposure.
*
* @type {Node}
* @default null
*/
this.exposureNode = exposureNode;
/**
* Represents the color to process.
*
* @type {?Node}
* @default null
*/
this.colorNode = colorNode;
}
/**
* Overwrites the default `customCacheKey()` implementation by including the tone
* mapping type into the cache key.
*
* @return {number} The hash.
*/
customCacheKey() {
return hash( this.toneMapping );
}
setup( builder ) {
const colorNode = this.colorNode || builder.context.color;
const toneMapping = this.toneMapping;
if ( toneMapping === NoToneMapping ) return colorNode;
let outputNode = null;
const toneMappingFn = builder.renderer.library.getToneMappingFunction( toneMapping );
if ( toneMappingFn !== null ) {
outputNode = vec4( toneMappingFn( colorNode.rgb, this.exposureNode ), colorNode.a );
} else {
console.error( 'ToneMappingNode: Unsupported Tone Mapping configuration.', toneMapping );
outputNode = colorNode;
}
return outputNode;
}
}
export default ToneMappingNode;
/**
* TSL function for creating a tone mapping node.
*
* @tsl
* @function
* @param {number} mapping - The tone mapping type.
* @param {Node<float> | number} exposure - The tone mapping exposure.
* @param {Node<vec3> | Color} color - The color node to process.
* @returns {ToneMappingNode<vec3>}
*/
export const toneMapping = ( mapping, exposure, color ) => nodeObject( new ToneMappingNode( mapping, nodeObject( exposure ), nodeObject( color ) ) );
/**
* TSL object that represents the global tone mapping exposure of the renderer.
*
* @tsl
* @type {RendererReferenceNode<vec3>}
*/
export const toneMappingExposure = /*@__PURE__*/ rendererReference( 'toneMappingExposure', 'float' );
addMethodChaining( 'toneMapping', ( color, mapping, exposure ) => toneMapping( mapping, exposure, color ) );

View File

@@ -0,0 +1,183 @@
import { float, nodeObject, normalize, vec4 } from '../tsl/TSLBase.js';
import { Color } from '../../math/Color.js';
import NodeMaterial from '../../materials/nodes/NodeMaterial.js';
import { cameraProjectionMatrix } from '../../nodes/accessors/Camera.js';
import { modelViewMatrix } from '../../nodes/accessors/ModelNode.js';
import { positionLocal } from '../../nodes/accessors/Position.js';
import { normalLocal } from '../../nodes/accessors/Normal.js';
import { BackSide } from '../../constants.js';
import PassNode from './PassNode.js';
/**
* Represents a render pass for producing a toon outline effect on compatible objects.
* Only 3D objects with materials of type `MeshToonMaterial` and `MeshToonNodeMaterial`
* will receive the outline.
*
* ```js
* const postProcessing = new PostProcessing( renderer );
*
* const scenePass = toonOutlinePass( scene, camera );
*
* postProcessing.outputNode = scenePass;
* ```
* @augments PassNode
*/
class ToonOutlinePassNode extends PassNode {
static get type() {
return 'ToonOutlinePassNode';
}
/**
* Constructs a new outline pass node.
*
* @param {Scene} scene - A reference to the scene.
* @param {Camera} camera - A reference to the camera.
* @param {Node} colorNode - Defines the outline's color.
* @param {Node} thicknessNode - Defines the outline's thickness.
* @param {Node} alphaNode - Defines the outline's alpha.
*/
constructor( scene, camera, colorNode, thicknessNode, alphaNode ) {
super( PassNode.COLOR, scene, camera );
/**
* Defines the outline's color.
*
* @type {Node}
*/
this.colorNode = colorNode;
/**
* Defines the outline's thickness.
*
* @type {Node}
*/
this.thicknessNode = thicknessNode;
/**
* Defines the outline's alpha.
*
* @type {Node}
*/
this.alphaNode = alphaNode;
/**
* An internal material cache.
*
* @private
* @type {WeakMap<Material, NodeMaterial>}
*/
this._materialCache = new WeakMap();
}
updateBefore( frame ) {
const { renderer } = frame;
const currentRenderObjectFunction = renderer.getRenderObjectFunction();
renderer.setRenderObjectFunction( ( object, scene, camera, geometry, material, group, lightsNode, clippingContext ) => {
// only render outline for supported materials
if ( material.isMeshToonMaterial || material.isMeshToonNodeMaterial ) {
if ( material.wireframe === false ) {
const outlineMaterial = this._getOutlineMaterial( material );
renderer.renderObject( object, scene, camera, geometry, outlineMaterial, group, lightsNode, clippingContext );
}
}
// default
renderer.renderObject( object, scene, camera, geometry, material, group, lightsNode, clippingContext );
} );
super.updateBefore( frame );
renderer.setRenderObjectFunction( currentRenderObjectFunction );
}
/**
* Creates the material used for outline rendering.
*
* @private
* @return {NodeMaterial} The outline material.
*/
_createMaterial() {
const material = new NodeMaterial();
material.isMeshToonOutlineMaterial = true;
material.name = 'Toon_Outline';
material.side = BackSide;
// vertex node
const outlineNormal = normalLocal.negate();
const mvp = cameraProjectionMatrix.mul( modelViewMatrix );
const ratio = float( 1.0 ); // TODO: support outline thickness ratio for each vertex
const pos = mvp.mul( vec4( positionLocal, 1.0 ) );
const pos2 = mvp.mul( vec4( positionLocal.add( outlineNormal ), 1.0 ) );
const norm = normalize( pos.sub( pos2 ) ); // NOTE: subtract pos2 from pos because BackSide objectNormal is negative
material.vertexNode = pos.add( norm.mul( this.thicknessNode ).mul( pos.w ).mul( ratio ) );
// color node
material.colorNode = vec4( this.colorNode, this.alphaNode );
return material;
}
/**
* For the given toon material, this method returns a corresponding
* outline material.
*
* @private
* @param {(MeshToonMaterial|MeshToonNodeMaterial)} originalMaterial - The toon material.
* @return {NodeMaterial} The outline material.
*/
_getOutlineMaterial( originalMaterial ) {
let outlineMaterial = this._materialCache.get( originalMaterial );
if ( outlineMaterial === undefined ) {
outlineMaterial = this._createMaterial();
this._materialCache.set( originalMaterial, outlineMaterial );
}
return outlineMaterial;
}
}
export default ToonOutlinePassNode;
/**
* TSL function for creating a toon outline pass node.
*
* @tsl
* @function
* @param {Scene} scene - A reference to the scene.
* @param {Camera} camera - A reference to the camera.
* @param {Color} color - Defines the outline's color.
* @param {number} [thickness=0.003] - Defines the outline's thickness.
* @param {number} [alpha=1] - Defines the outline's alpha.
* @returns {ToonOutlinePassNode}
*/
export const toonOutlinePass = ( scene, camera, color = new Color( 0, 0, 0 ), thickness = 0.003, alpha = 1 ) => nodeObject( new ToonOutlinePassNode( scene, camera, nodeObject( color ), nodeObject( thickness ), nodeObject( alpha ) ) );

View File

@@ -0,0 +1,294 @@
import Node from '../core/Node.js';
import { float, log, log2, nodeImmutable, nodeProxy } from '../tsl/TSLBase.js';
import { cameraNear, cameraFar } from '../accessors/Camera.js';
import { positionView } from '../accessors/Position.js';
import { viewportDepthTexture } from './ViewportDepthTextureNode.js';
/**
* This node offers a collection of features in context of the depth logic in the fragment shader.
* Depending on {@link ViewportDepthNode#scope}, it can be used to define a depth value for the current
* fragment or for depth evaluation purposes.
*
* @augments Node
*/
class ViewportDepthNode extends Node {
static get type() {
return 'ViewportDepthNode';
}
/**
* Constructs a new viewport depth node.
*
* @param {('depth'|'depthBase'|'linearDepth')} scope - The node's scope.
* @param {?Node} [valueNode=null] - The value node.
*/
constructor( scope, valueNode = null ) {
super( 'float' );
/**
* The node behaves differently depending on which scope is selected.
*
* - `ViewportDepthNode.DEPTH_BASE`: Allows to define a value for the current fragment's depth.
* - `ViewportDepthNode.DEPTH`: Represents the depth value for the current fragment (`valueNode` is ignored).
* - `ViewportDepthNode.LINEAR_DEPTH`: Represents the linear (orthographic) depth value of the current fragment.
* If a `valueNode` is set, the scope can be used to convert perspective depth data to linear data.
*
* @type {('depth'|'depthBase'|'linearDepth')}
*/
this.scope = scope;
/**
* Can be used to define a custom depth value.
* The property is ignored in the `ViewportDepthNode.DEPTH` scope.
*
* @type {?Node}
* @default null
*/
this.valueNode = valueNode;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isViewportDepthNode = true;
}
generate( builder ) {
const { scope } = this;
if ( scope === ViewportDepthNode.DEPTH_BASE ) {
return builder.getFragDepth();
}
return super.generate( builder );
}
setup( { camera } ) {
const { scope } = this;
const value = this.valueNode;
let node = null;
if ( scope === ViewportDepthNode.DEPTH_BASE ) {
if ( value !== null ) {
node = depthBase().assign( value );
}
} else if ( scope === ViewportDepthNode.DEPTH ) {
if ( camera.isPerspectiveCamera ) {
node = viewZToPerspectiveDepth( positionView.z, cameraNear, cameraFar );
} else {
node = viewZToOrthographicDepth( positionView.z, cameraNear, cameraFar );
}
} else if ( scope === ViewportDepthNode.LINEAR_DEPTH ) {
if ( value !== null ) {
if ( camera.isPerspectiveCamera ) {
const viewZ = perspectiveDepthToViewZ( value, cameraNear, cameraFar );
node = viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
} else {
node = value;
}
} else {
node = viewZToOrthographicDepth( positionView.z, cameraNear, cameraFar );
}
}
return node;
}
}
ViewportDepthNode.DEPTH_BASE = 'depthBase';
ViewportDepthNode.DEPTH = 'depth';
ViewportDepthNode.LINEAR_DEPTH = 'linearDepth';
export default ViewportDepthNode;
// NOTE: viewZ, the z-coordinate in camera space, is negative for points in front of the camera
/**
* TSL function for converting a viewZ value to an orthographic depth value.
*
* @tsl
* @function
* @param {Node<float>} viewZ - The viewZ node.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const viewZToOrthographicDepth = ( viewZ, near, far ) => viewZ.add( near ).div( near.sub( far ) );
/**
* TSL function for converting an orthographic depth value to a viewZ value.
*
* @tsl
* @function
* @param {Node<float>} depth - The orthographic depth.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const orthographicDepthToViewZ = ( depth, near, far ) => near.sub( far ).mul( depth ).sub( near );
/**
* TSL function for converting a viewZ value to a perspective depth value.
*
* Note: {link https://twitter.com/gonnavis/status/1377183786949959682}.
*
* @tsl
* @function
* @param {Node<float>} viewZ - The viewZ node.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const viewZToPerspectiveDepth = ( viewZ, near, far ) => near.add( viewZ ).mul( far ).div( far.sub( near ).mul( viewZ ) );
/**
* TSL function for converting a perspective depth value to a viewZ value.
*
* @tsl
* @function
* @param {Node<float>} depth - The perspective depth.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const perspectiveDepthToViewZ = ( depth, near, far ) => near.mul( far ).div( far.sub( near ).mul( depth ).sub( far ) );
/**
* TSL function for converting a viewZ value to a logarithmic depth value.
*
* @tsl
* @function
* @param {Node<float>} viewZ - The viewZ node.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const viewZToLogarithmicDepth = ( viewZ, near, far ) => {
// NOTE: viewZ must be negative--see explanation at the end of this comment block.
// The final logarithmic depth formula used here is adapted from one described in an
// article by Thatcher Ulrich (see http://tulrich.com/geekstuff/log_depth_buffer.txt),
// which was an improvement upon an earlier formula one described in an
// Outerra article (https://outerra.blogspot.com/2009/08/logarithmic-z-buffer.html).
// Ulrich's formula is the following:
// z = K * log( w / cameraNear ) / log( cameraFar / cameraNear )
// where K = 2^k - 1, and k is the number of bits in the depth buffer.
// The Outerra variant ignored the camera near plane (it assumed it was 0) and instead
// opted for a "C-constant" for resolution adjustment of objects near the camera.
// Outerra states: "Notice that the 'C' variant doesnt use a near plane distance, it has it
// set at 0" (quote from https://outerra.blogspot.com/2012/11/maximizing-depth-buffer-range-and.html).
// Ulrich's variant has the benefit of constant relative precision over the whole near-far range.
// It was debated here whether Outerra's "C-constant" or Ulrich's "near plane" variant should
// be used, and ultimately Ulrich's "near plane" version was chosen.
// Outerra eventually made another improvement to their original "C-constant" variant,
// but it still does not incorporate the camera near plane (for this version,
// see https://outerra.blogspot.com/2013/07/logarithmic-depth-buffer-optimizations.html).
// Here we make 4 changes to Ulrich's formula:
// 1. Clamp the camera near plane so we don't divide by 0.
// 2. Use log2 instead of log to avoid an extra multiply (shaders implement log using log2).
// 3. Assume K is 1 (K = maximum value in depth buffer; see Ulrich's formula above).
// 4. To maintain consistency with the functions "viewZToOrthographicDepth" and "viewZToPerspectiveDepth",
// we modify the formula here to use 'viewZ' instead of 'w'. The other functions expect a negative viewZ,
// so we do the same here, hence the 'viewZ.negate()' call.
// For visual representation of this depth curve, see https://www.desmos.com/calculator/uyqk0vex1u
near = near.max( 1e-6 ).toVar();
const numerator = log2( viewZ.negate().div( near ) );
const denominator = log2( far.div( near ) );
return numerator.div( denominator );
};
/**
* TSL function for converting a logarithmic depth value to a viewZ value.
*
* @tsl
* @function
* @param {Node<float>} depth - The logarithmic depth.
* @param {Node<float>} near - The camera's near value.
* @param {Node<float>} far - The camera's far value.
* @returns {Node<float>}
*/
export const logarithmicDepthToViewZ = ( depth, near, far ) => {
// NOTE: we add a 'negate()' call to the return value here to maintain consistency with
// the functions "orthographicDepthToViewZ" and "perspectiveDepthToViewZ" (they return
// a negative viewZ).
const exponent = depth.mul( log( far.div( near ) ) );
return float( Math.E ).pow( exponent ).mul( near ).negate();
};
/**
* TSL function for defining a value for the current fragment's depth.
*
* @tsl
* @function
* @param {Node<float>} value - The depth value to set.
* @returns {ViewportDepthNode<float>}
*/
const depthBase = /*@__PURE__*/ nodeProxy( ViewportDepthNode, ViewportDepthNode.DEPTH_BASE );
/**
* TSL object that represents the depth value for the current fragment.
*
* @tsl
* @type {ViewportDepthNode}
*/
export const depth = /*@__PURE__*/ nodeImmutable( ViewportDepthNode, ViewportDepthNode.DEPTH );
/**
* TSL function for converting a perspective depth value to linear depth.
*
* @tsl
* @function
* @param {?Node<float>} [value=null] - The perspective depth. If `null` is provided, the current fragment's depth is used.
* @returns {ViewportDepthNode<float>}
*/
export const linearDepth = /*@__PURE__*/ nodeProxy( ViewportDepthNode, ViewportDepthNode.LINEAR_DEPTH ).setParameterLength( 0, 1 );
/**
* TSL object that represents the linear (orthographic) depth value of the current fragment
*
* @tsl
* @type {ViewportDepthNode}
*/
export const viewportLinearDepth = /*@__PURE__*/ linearDepth( viewportDepthTexture() );
depth.assign = ( value ) => depthBase( value );

View File

@@ -0,0 +1,55 @@
import ViewportTextureNode from './ViewportTextureNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { DepthTexture } from '../../textures/DepthTexture.js';
let sharedDepthbuffer = null;
/**
* Represents the depth of the current viewport as a texture. This module
* can be used in combination with viewport texture to achieve effects
* that require depth evaluation.
*
* @augments ViewportTextureNode
*/
class ViewportDepthTextureNode extends ViewportTextureNode {
static get type() {
return 'ViewportDepthTextureNode';
}
/**
* Constructs a new viewport depth texture node.
*
* @param {Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
*/
constructor( uvNode = screenUV, levelNode = null ) {
if ( sharedDepthbuffer === null ) {
sharedDepthbuffer = new DepthTexture();
}
super( uvNode, levelNode, sharedDepthbuffer );
}
}
export default ViewportDepthTextureNode;
/**
* TSL function for a viewport depth texture node.
*
* @tsl
* @function
* @param {?Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
* @returns {ViewportDepthTextureNode}
*/
export const viewportDepthTexture = /*@__PURE__*/ nodeProxy( ViewportDepthTextureNode ).setParameterLength( 0, 2 );

View File

@@ -0,0 +1,61 @@
import ViewportTextureNode from './ViewportTextureNode.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { FramebufferTexture } from '../../textures/FramebufferTexture.js';
let _sharedFramebuffer = null;
/**
* `ViewportTextureNode` creates an internal texture for each node instance. This module
* shares a texture across all instances of `ViewportSharedTextureNode`. It should
* be the first choice when using data of the default/screen framebuffer for performance reasons.
*
* @augments ViewportTextureNode
*/
class ViewportSharedTextureNode extends ViewportTextureNode {
static get type() {
return 'ViewportSharedTextureNode';
}
/**
* Constructs a new viewport shared texture node.
*
* @param {Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
*/
constructor( uvNode = screenUV, levelNode = null ) {
if ( _sharedFramebuffer === null ) {
_sharedFramebuffer = new FramebufferTexture();
}
super( uvNode, levelNode, _sharedFramebuffer );
}
updateReference() {
return this;
}
}
export default ViewportSharedTextureNode;
/**
* TSL function for creating a shared viewport texture node.
*
* @tsl
* @function
* @param {?Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
* @returns {ViewportSharedTextureNode}
*/
export const viewportSharedTexture = /*@__PURE__*/ nodeProxy( ViewportSharedTextureNode ).setParameterLength( 0, 2 );

View File

@@ -0,0 +1,138 @@
import TextureNode from '../accessors/TextureNode.js';
import { NodeUpdateType } from '../core/constants.js';
import { nodeProxy } from '../tsl/TSLBase.js';
import { screenUV } from './ScreenNode.js';
import { Vector2 } from '../../math/Vector2.js';
import { FramebufferTexture } from '../../textures/FramebufferTexture.js';
import { LinearMipmapLinearFilter } from '../../constants.js';
const _size = /*@__PURE__*/ new Vector2();
/**
* A special type of texture node which represents the data of the current viewport
* as a texture. The module extracts data from the current bound framebuffer with
* a copy operation so no extra render pass is required to produce the texture data
* (which is good for performance). `ViewportTextureNode` can be used as an input for a
* variety of effects like refractive or transmissive materials.
*
* @augments TextureNode
*/
class ViewportTextureNode extends TextureNode {
static get type() {
return 'ViewportTextureNode';
}
/**
* Constructs a new viewport texture node.
*
* @param {Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
* @param {?Texture} [framebufferTexture=null] - A framebuffer texture holding the viewport data. If not provided, a framebuffer texture is created automatically.
*/
constructor( uvNode = screenUV, levelNode = null, framebufferTexture = null ) {
if ( framebufferTexture === null ) {
framebufferTexture = new FramebufferTexture();
framebufferTexture.minFilter = LinearMipmapLinearFilter;
}
super( framebufferTexture, uvNode, levelNode );
/**
* Whether to generate mipmaps or not.
*
* @type {boolean}
* @default false
*/
this.generateMipmaps = false;
/**
* This flag can be used for type testing.
*
* @type {boolean}
* @readonly
* @default true
*/
this.isOutputTextureNode = true;
/**
* The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders the
* scene once per frame in its {@link ViewportTextureNode#updateBefore} method.
*
* @type {string}
* @default 'frame'
*/
this.updateBeforeType = NodeUpdateType.FRAME;
}
updateBefore( frame ) {
const renderer = frame.renderer;
renderer.getDrawingBufferSize( _size );
//
const framebufferTexture = this.value;
if ( framebufferTexture.image.width !== _size.width || framebufferTexture.image.height !== _size.height ) {
framebufferTexture.image.width = _size.width;
framebufferTexture.image.height = _size.height;
framebufferTexture.needsUpdate = true;
}
//
const currentGenerateMipmaps = framebufferTexture.generateMipmaps;
framebufferTexture.generateMipmaps = this.generateMipmaps;
renderer.copyFramebufferToTexture( framebufferTexture );
framebufferTexture.generateMipmaps = currentGenerateMipmaps;
}
clone() {
const viewportTextureNode = new this.constructor( this.uvNode, this.levelNode, this.value );
viewportTextureNode.generateMipmaps = this.generateMipmaps;
return viewportTextureNode;
}
}
export default ViewportTextureNode;
/**
* TSL function for creating a viewport texture node.
*
* @tsl
* @function
* @param {?Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
* @param {?Texture} [framebufferTexture=null] - A framebuffer texture holding the viewport data. If not provided, a framebuffer texture is created automatically.
* @returns {ViewportTextureNode}
*/
export const viewportTexture = /*@__PURE__*/ nodeProxy( ViewportTextureNode ).setParameterLength( 0, 3 );
/**
* TSL function for creating a viewport texture node with enabled mipmap generation.
*
* @tsl
* @function
* @param {?Node} [uvNode=screenUV] - The uv node.
* @param {?Node} [levelNode=null] - The level node.
* @param {?Texture} [framebufferTexture=null] - A framebuffer texture holding the viewport data. If not provided, a framebuffer texture is created automatically.
* @returns {ViewportTextureNode}
*/
export const viewportMipTexture = /*@__PURE__*/ nodeProxy( ViewportTextureNode, null, null, { generateMipmaps: true } ).setParameterLength( 0, 3 );