Appearance
学习wgsl的传参-1
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="learnWgsl1" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
WebGPUEngine,
Scene,
HemisphericLight,
ShaderStore,
ShaderMaterial,
ShaderLanguage,
ArcRotateCamera,
Vector3,
Color3,
StorageBuffer,
UniformBuffer,
Constants,
Texture,
MeshBuilder,
TextureSampler
} from 'babylonjs'
const fps = ref(0)
const isRunning = ref(false)
let sceneResources, adt
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("learnWgsl1") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1.5
camera.panningSensibility = 8
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 0, 10))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(40, 40, 40), scene)
light.direction = new Vector3(0.0, 1.0, 0.0)
light.diffuse = new Color3(1.0, 0.95, 0.8)
return light
}
ShaderStore.ShadersStoreWGSL['customVertexShader'] = `
#include<sceneUboDeclaration>
#include<meshUboDeclaration>
attribute position: vec3<f32>;
attribute uv: vec2<f32>;
varying vUV: vec2<f32>;
@vertex
fn main(input: VertexInputs) -> FragmentInputs {
vertexOutputs.vUV = vertexInputs.uv;
vertexOutputs.position = scene.viewProjection * mesh.world * vec4<f32>(vertexInputs.position, 1.0);
}
`
ShaderStore.ShadersStoreWGSL['customFragmentShader'] = `
var<storage, read_write> colors: array<f32>;
struct MyUBO {
time: f32,
};
var<uniform> myUBO: MyUBO;
uniform vColor : array<vec4<f32>, 2>;
varying vUV: vec2<f32>;
var diffuse: texture_2d<f32>;
var mySampler: sampler;
@fragment
fn main(input: FragmentInputs) -> FragmentOutputs {
let offset = u32(floor(fragmentInputs.vUV.x * sin(myUBO.time) * 5)) * 3;
let textureColor = textureSample(diffuse, mySampler, fragmentInputs.vUV) + uniforms.vColor[0];
fragmentOutputs.color = vec4<f32>(colors[offset], colors[offset + 1], colors[offset + 2], 1.0) + textureColor;
}
`
const shaderMaterial = new ShaderMaterial('shader', scene, {
vertex: 'custom',
fragment: 'custom',
}, {
attributes: ['position', 'normal', 'uv', 'rgb'],
uniformBuffers: ['Scene', 'Mesh'],
shaderLanguage: ShaderLanguage.WGSL,
})
const colors = new Float32Array([
1, 0, 1, // Purple
1, 1, 0 // Yellow
])
const colorsBuffer = new StorageBuffer(engine, colors.byteLength)
colorsBuffer.update(colors)
shaderMaterial.setStorageBuffer('colors', colorsBuffer)
shaderMaterial.setFloats('vColor', [1, 0, 0, 1, 0, 1, 0, 1])
let time = 0.2
const myUBO = new UniformBuffer(engine)
myUBO.addUniform('time', time)
shaderMaterial.setUniformBuffer('myUBO', myUBO)
const sampler = new TextureSampler()
var mainTexture = new Texture('/images/wood.jpg', scene)
sampler.setParameters()
sampler.samplingMode = Constants.TEXTURE_NEAREST_SAMPLINGMODE
shaderMaterial.setTextureSampler('mySampler', sampler)
shaderMaterial.setTexture('diffuse', mainTexture)
const tri = MeshBuilder.CreateDisc('tri', { tessellation: 3, radius: 2 })
tri.material = shaderMaterial
tri.rotation = new Vector3(Math.PI, 0, 0)
scene.registerBeforeRender(async () => {
time += 0.01
myUBO.updateFloat('time', time)
myUBO.update()
})
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>给ground设置图片材质-1
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="setTextureToGround1" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
Scene,
ArcRotateCamera,
Vector3,
Color4,
HemisphericLight,
MeshBuilder,
StandardMaterial,
WebGPUEngine,
Texture,
RawTexture,
ComputeShader,
} from 'babylonjs'
import {
AdvancedDynamicTexture,
StackPanel,
Control,
TextBlock,
} from 'babylonjs-gui'
let sceneResources, adt
const fps = ref(0)
const isRunning = ref(false)
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("setTextureToGround1") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1
camera.panningSensibility = 10
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 160, -160))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(1, 1, 0), scene)
return light
}
const createAxis = () => {
const axisX = MeshBuilder.CreateLines(
'axisX', {
colors: [new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1)],
points: [new Vector3(0, 0, 0), new Vector3(80, 0, 0)]
},
scene
)
const axisY = MeshBuilder.CreateLines(
'axisY', {
colors: [new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1) ],
points: [new Vector3(0, 0, 0), new Vector3(0, 80, 0) ]
},
scene
)
const axisZ = MeshBuilder.CreateLines(
'axisZ', {
colors: [new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1)],
points: [new Vector3(0, 0, 0), new Vector3(0, 0, 80)]
},
scene
)
return [axisX, axisY, axisZ]
}
const createGui = async () => {
adt = AdvancedDynamicTexture.CreateFullscreenUI('UI')
const xBox = MeshBuilder.CreateBox('x', { size: 1 }, scene)
xBox.position = new Vector3(80, 0, 0)
const xPanel = new StackPanel()
xPanel.width = '20px'
xPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
xPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const x = new TextBlock()
x.text = 'X'
x.height = '30px'
x.color = 'red'
adt.addControl(xPanel)
xPanel.addControl(x)
xPanel.linkWithMesh(xBox)
const yBox = MeshBuilder.CreateBox('y', { size: 1 }, scene)
yBox.position = new Vector3(0, 80, 0)
const yPanel = new StackPanel()
yPanel.width = '20px'
yPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
yPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const y = new TextBlock()
y.text = 'Y'
y.height = '30px'
y.color = 'green'
adt.addControl(yPanel)
yPanel.addControl(y)
yPanel.linkWithMesh(yBox)
const zBox = MeshBuilder.CreateBox('z', { size: 1 }, scene)
zBox.position = new Vector3(0, 0, 80)
const zPanel = new StackPanel()
zPanel.width = '20px'
zPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
zPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const z = new TextBlock()
z.text = 'Z'
z.height = '30px'
z.color = 'blue'
adt.addControl(zPanel)
zPanel.addControl(z)
zPanel.linkWithMesh(zBox)
}
// 完整流程总结
// 创建计算着色器,定义输入/输出绑定(bindingsMapping)。
// 绑定资源(如 setTexture + setStorageTexture)。
// 调用 dispatchWhenReady() 异步启动计算。
// 在 Promise 回调中处理结果(如读取像素或更新材质)。
// 将计算结果用于后续渲染。
const createGround = () => {
const ground = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
// 默认绑定了 sampler_src ???
// 只有先 sampler 才能 src , 至于 dest binding到其他位置无所谓?把 dest 的 binding 从0改成3,依旧可以运行
const copyTextureComputeShader = `
// 定义采样器 sampler_src,用于控制纹理 src 的采样方式(如过滤模式、寻址模式)。
@group(0) @binding(0) var sampler_src: sampler;
// 定义输入纹理 src,类型为 texture_2d<f32>(二维浮点纹理),不可直接写入,需通过 textureSampleLevel 读取。
@group(0) @binding(1) var src: texture_2d<f32>;
// 定义输出纹理 dest,类型为 texture_storage_2d(可写入的存储纹理),格式为 rgba8unorm(8位规范化无符号整数 RGBA),用途为 write(仅写入)
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
// 定义每个工作组(Workgroup)的线程数为 1x1x1(即每次调用只处理一个像素)。
// @workgroup_size(1,1,1) 效率较低,建议优化为更大的工作组(如 8x8x1)。
@compute @workgroup_size(1, 1, 1)
// @builtin(global_invocation_id)
// 注入内置变量 global_id,表示当前线程在全局网格中的坐标(vec3<u32> 类型,对应 dispatch(width, height, depth) 的参数)。
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
// textureDimensions(src, 0) 返回纹理 src 的第 0 级细节(Mipmap)的宽高(vec2<u32>)。
// 转换为 vec2<f32> 以便后续除法运算。
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
// global_id.xy / dims:将线程坐标归一化为 [0, 1] 范围内的纹理坐标。
// textureSampleLevel:使用采样器 sampler_src 读取 src 的像素颜色,0.0 表示使用最高细节级别(无 Mipmap)。
// 从纹理中采样一个颜色值,支持 指定 mipmap 层级(通过 level 参数)。
// textureSampleLevel(
// src, // 输入纹理(类型为 texture_2d<f32> 或类似)
// sampler_src, // 采样器
// uv, // 归一化的 UV 坐标(范围 [0, 1])
// 0.0 // 指定采样的 mipmap 层级(0 表示最详细的层级)。
// );
let pix: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
// 将采样得到的颜色 pix 写入 dest 的对应坐标(需转换为 vec2<i32>)。
// textureStore(
// texture: texture_storage_2d<T, access>, // 存储纹理(可写)
// coords: vec2<i32>, // 写入坐标(整数类型)
// value: vec4<T> // 要写入的颜色值
// );
textureStore(dest, vec2<i32>(global_id.xy), pix);
}
`
const shader = new ComputeShader(
'myCompute',
engine,
{ computeSource: copyTextureComputeShader },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 }
}
}
)
const src = new Texture('/images/grass.png', scene)
const dest = RawTexture.CreateRGBAStorageTexture(null, 512, 512, scene, false, false)
shader.setTexture('src', src)
shader.setStorageTexture('dest', dest)
// dispatchWhenReady()
// 这是 Compute Shader 的调度方法,用于启动计算任务。
// 参数 (dest.getSize().width, dest.getSize().height, 1) 表示:
// 工作组网格维度:计算着色器将在 width × height × 1 的二维网格上执行(每个像素对应一个线程)。
// 实际线程总数 = width × height × 1(因为 @workgroup_size(1,1,1) 定义了每个工作组只有 1 个线程)。
// 返回一个 Promise,当计算着色器完成执行后解析。
// dest.readPixels()
// 在计算着色器完成后,从存储纹理 dest 中读取像素数据。
// 返回一个 Promise,解析为 Uint8Array 或 Float32Array(取决于纹理格式),包含 RGBA 像素值。
// 调试或验证计算结果。
// 将数据传递给其他处理流程(如 CPU 端分析或二次处理)
shader.dispatchWhenReady(dest.getSize().width, dest.getSize().height, 1).then(() => {
// dest?.readPixels()?.then((data) => {
// //console.log(data)
// })
})
const mat = new StandardMaterial('mat', scene)
mat.diffuseTexture = dest
ground.material = mat
}
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
createAxis()
createGui()
createGround()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>给ground设置图片材质-2(shareData)
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="setTextureToGround2" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
Scene,
ArcRotateCamera,
Vector3,
Color4,
HemisphericLight,
MeshBuilder,
StandardMaterial,
WebGPUEngine,
Texture,
RawTexture,
ComputeShader,
} from 'babylonjs'
import {
AdvancedDynamicTexture,
StackPanel,
Control,
TextBlock,
} from 'babylonjs-gui'
let sceneResources, adt
const fps = ref(0)
const isRunning = ref(false)
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("setTextureToGround2") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1
camera.panningSensibility = 1010
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 160, -160))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(1, 1, 0), scene)
return light
}
const createAxis = () => {
const axisX = MeshBuilder.CreateLines(
'axisX', {
colors: [new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1)],
points: [new Vector3(0, 0, 0), new Vector3(80, 0, 0)]
},
scene
)
const axisY = MeshBuilder.CreateLines(
'axisY', {
colors: [new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1) ],
points: [new Vector3(0, 0, 0), new Vector3(0, 80, 0) ]
},
scene
)
const axisZ = MeshBuilder.CreateLines(
'axisZ', {
colors: [new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1)],
points: [new Vector3(0, 0, 0), new Vector3(0, 0, 80)]
},
scene
)
return [axisX, axisY, axisZ]
}
const createGui = async () => {
adt = AdvancedDynamicTexture.CreateFullscreenUI('UI')
const xBox = MeshBuilder.CreateBox('x', { size: 1 }, scene)
xBox.position = new Vector3(80, 0, 0)
const xPanel = new StackPanel()
xPanel.width = '20px'
xPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
xPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const x = new TextBlock()
x.text = 'X'
x.height = '30px'
x.color = 'red'
adt.addControl(xPanel)
xPanel.addControl(x)
xPanel.linkWithMesh(xBox)
const yBox = MeshBuilder.CreateBox('y', { size: 1 }, scene)
yBox.position = new Vector3(0, 80, 0)
const yPanel = new StackPanel()
yPanel.width = '20px'
yPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
yPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const y = new TextBlock()
y.text = 'Y'
y.height = '30px'
y.color = 'green'
adt.addControl(yPanel)
yPanel.addControl(y)
yPanel.linkWithMesh(yBox)
const zBox = MeshBuilder.CreateBox('z', { size: 1 }, scene)
zBox.position = new Vector3(0, 0, 80)
const zPanel = new StackPanel()
zPanel.width = '20px'
zPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
zPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const z = new TextBlock()
z.text = 'Z'
z.height = '30px'
z.color = 'blue'
adt.addControl(zPanel)
zPanel.addControl(z)
zPanel.linkWithMesh(zBox)
}
const createGround = async () => {
const imgSize = 512
const workGroupSize = 16
const ground = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
/* 读入原始纹理 ------------------------------------------------------- */
const srcTexture = new Texture('/images/grass.png', scene)
/* 准备两张可 ping-pong 的 storage 纹理 -------------------------------- */
const texA = RawTexture.CreateRGBAStorageTexture(null, imgSize, imgSize, scene, false, false)
const computeShader = `
@group(0) @binding(0) var sampler_src: sampler;
@group(0) @binding(1) var src: texture_2d<f32>;
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
var<workgroup> sharedData: array<array<vec4<f32>, ${workGroupSize}>, ${workGroupSize}>;
@compute @workgroup_size(${workGroupSize}, ${workGroupSize}, 1)
fn main(
@builtin(global_invocation_id) global_id: vec3<u32>,
@builtin(workgroup_id) group_id: vec3<u32>,
@builtin(local_invocation_id) local_id: vec3<u32>
) {
// 要进行采样,不然后面就不能运行,即使 origin_texture 并没有使用到
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
let origin_texture: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
let pixel = global_id.xy;
sharedData[local_id.y][local_id.x] = textureLoad(src, pixel, 0);
workgroupBarrier();
var final_texture: vec4<f32> = sharedData[local_id.y][local_id.x];
textureStore(dest, vec2<i32>(global_id.xy), final_texture);
}
`
const shader = new ComputeShader(
'myCompute',
engine,
{ computeSource: computeShader },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 }
}
}
)
/* 绑定纹理 ----------------------------------------------------------- */
shader.setTexture('src', srcTexture)
shader.setStorageTexture('dest', texA)
await shader.dispatchWhenReady(imgSize / workGroupSize, imgSize / workGroupSize, 1)
/* 把最终结果挂到材质 -------------------------------------------------- */
const mat = new StandardMaterial('mat', scene)
mat.diffuseTexture = texA
ground.material = mat
}
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
createAxis()
createGui()
createGround()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>给ground设置图片材质-3
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="setTextureToGround3" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
Scene,
ArcRotateCamera,
Vector3,
Color4,
HemisphericLight,
MeshBuilder,
StandardMaterial,
WebGPUEngine,
Texture,
RawTexture,
ComputeShader,
// UniformBuffer,
} from 'babylonjs'
import {
AdvancedDynamicTexture,
StackPanel,
Control,
TextBlock,
} from 'babylonjs-gui'
let sceneResources, adt
const fps = ref(0)
const isRunning = ref(false)
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("setTextureToGround3") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1
camera.panningSensibility = 10
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 320, -200))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(1, 1, 0), scene)
return light
}
const createAxis = () => {
const axisX = MeshBuilder.CreateLines(
'axisX', {
colors: [new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1)],
points: [new Vector3(0, 0, 0), new Vector3(80, 0, 0)]
},
scene
)
const axisY = MeshBuilder.CreateLines(
'axisY', {
colors: [new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1) ],
points: [new Vector3(0, 0, 0), new Vector3(0, 80, 0) ]
},
scene
)
const axisZ = MeshBuilder.CreateLines(
'axisZ', {
colors: [new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1)],
points: [new Vector3(0, 0, 0), new Vector3(0, 0, 80)]
},
scene
)
return [axisX, axisY, axisZ]
}
const createGui = async () => {
adt = AdvancedDynamicTexture.CreateFullscreenUI('UI')
const xBox = MeshBuilder.CreateBox('x', { size: 1 }, scene)
xBox.position = new Vector3(80, 0, 0)
const xPanel = new StackPanel()
xPanel.width = '20px'
xPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
xPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const x = new TextBlock()
x.text = 'X'
x.height = '30px'
x.color = 'red'
adt.addControl(xPanel)
xPanel.addControl(x)
xPanel.linkWithMesh(xBox)
const yBox = MeshBuilder.CreateBox('y', { size: 1 }, scene)
yBox.position = new Vector3(0, 80, 0)
const yPanel = new StackPanel()
yPanel.width = '20px'
yPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
yPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const y = new TextBlock()
y.text = 'Y'
y.height = '30px'
y.color = 'green'
adt.addControl(yPanel)
yPanel.addControl(y)
yPanel.linkWithMesh(yBox)
const zBox = MeshBuilder.CreateBox('z', { size: 1 }, scene)
zBox.position = new Vector3(0, 0, 80)
const zPanel = new StackPanel()
zPanel.width = '20px'
zPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
zPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const z = new TextBlock()
z.text = 'Z'
z.height = '30px'
z.color = 'blue'
adt.addControl(zPanel)
zPanel.addControl(z)
zPanel.linkWithMesh(zBox)
}
const createGround = async () => {
const imgSize = 256
const workGroupSizeRowX = 256
const workGroupSizeRowY = 1
const workGroupSizeColX = 1
const workGroupSizeColY = 256
const groundOrigin = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
groundOrigin.position = new Vector3(0, 0, 74)
const groundRow = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
groundRow.position = new Vector3(-74, 0, -74)
const groundCol = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
groundCol.position = new Vector3(74, 0, -74)
/* 读入原始纹理 ------------------------------------------------------- */
const srcTexture = new Texture('/images/mergeImage1.jpg', scene)
/* 准备两张可 ping-pong 的 storage 纹理 -------------------------------- */
const texRow = RawTexture.CreateRGBAStorageTexture(null, imgSize, imgSize, scene, false, false)
const texCol = RawTexture.CreateRGBAStorageTexture(null, imgSize, imgSize, scene, false, false)
const computeShaderRow = `
@group(0) @binding(0) var sampler_src: sampler;
@group(0) @binding(1) var src: texture_2d<f32>;
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
var<workgroup> sharedData: array<vec4<f32>, ${imgSize}>;
// workgroup_size 定义了每个工作组(workgroup)中线程(thread)的数量。它是一个三维向量 (x, y, z),表示每个工作组在三个维度上的线程数。
// workgroup_size = (256, 1, 1):
// x:表示每个工作组在 x 方向上有 256 个线程。
// y:表示每个工作组在 y 方向上有 1 个线程。
// z:表示每个工作组在 z 方向上有 1 个线程。
// 这意味着每个工作组包含 256 个线程,这些线程可以并行处理一行像素(256 个像素)。每个工作组只处理一行像素,因此 y 方向和 z 方向的线程数为 1。
@compute @workgroup_size(${workGroupSizeRowX}, ${workGroupSizeRowY}, 1)
fn main(
@builtin(global_invocation_id) global_id: vec3<u32>, // global_id.x 范围是 0~255 , global_id.y 范围是 0~255
@builtin(local_invocation_id) local_id: vec3<u32> // local_id.x 范围是 0~255 共256个线程 , local_id.y 范围是 0 共1个线程
) {
// 要进行采样,不然后面就不能运行,即使 origin_texture 并没有使用到
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
let origin_texture: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
// 对行进行操作
for (var i = 0u; i < ${imgSize / 2}u; i++) {
let idx1 = vec2<u32>(i, global_id.y);
let idx2 = vec2<u32>(i + ${imgSize / 2}u, global_id.y);
let p1 = textureLoad(src, vec2<i32>(idx1), 0);
let p2 = textureLoad(src, vec2<i32>(idx2), 0);
sharedData[i] = p1 + p2 * 0.5;
sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
}
workgroupBarrier();
// 第二次变换(基于第一次结果)
// for (var i = 0u; i < ${imgSize / 2}u; i++) {
// let p1 = sharedData[i];
// let p2 = sharedData[i + ${imgSize / 2}u];
// sharedData[i] = p1 + p2 * 0.5;
// sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
// }
// workgroupBarrier();
let final_texture: vec4<f32> = sharedData[local_id.x];
// 原图输出
// let pixel = global_id.xy;
// sharedData[local_id.x] = textureLoad(src, pixel, 0);
// let final_texture: vec4<f32> = sharedData[local_id.x];
textureStore(dest, vec2<i32>(global_id.xy), final_texture);
}
`
const computeShaderCol = `
@group(0) @binding(0) var sampler_src: sampler;
@group(0) @binding(1) var src: texture_2d<f32>;
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
var<workgroup> sharedData: array<vec4<f32>, ${imgSize}>;
// workgroup_size 定义了每个工作组(workgroup)中线程(thread)的数量。它是一个三维向量 (x, y, z),表示每个工作组在三个维度上的线程数。
// workgroup_size = (1, 256, 1):
// x:表示每个工作组在 x 方向上有 1 个线程。
// y:表示每个工作组在 y 方向上有 256 个线程。
// z:表示每个工作组在 z 方向上有 1 个线程。
// 这意味着每个工作组包含 256 个线程,这些线程可以并行处理一列像素(256 个像素)。每个工作组只处理一列像素,因此 y 方向和 z 方向的线程数为 1。
@compute @workgroup_size(${workGroupSizeColX}, ${workGroupSizeColY}, 1)
fn main(
@builtin(global_invocation_id) global_id: vec3<u32>, // global_id.x 范围是 0~255 , global_id.y 范围是 0~255
@builtin(local_invocation_id) local_id: vec3<u32> // local_id.x 范围是 0 共1个线程 , local_id.y 范围是 0~255 共256个线程
) {
// 要进行采样,不然后面就不能运行,即使 origin_texture 并没有使用到
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
let origin_texture: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
// 对列进行操作
for (var i = 0u; i < ${imgSize / 2}u; i++) {
let idx1 = vec2<u32>(global_id.x, i);
let idx2 = vec2<u32>(global_id.x, i + ${imgSize / 2}u);
let p1 = textureLoad(src, vec2<i32>(idx1), 0);
let p2 = textureLoad(src, vec2<i32>(idx2), 0);
sharedData[i] = p1 + p2 * 0.5;
sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
}
workgroupBarrier();
// 第二次变换(基于第一次结果)
// for (var i = 0u; i < ${imgSize / 2}u; i++) {
// let p1 = sharedData[i];
// let p2 = sharedData[i + ${imgSize / 2}u];
// sharedData[i] = p1 + p2 * 0.5;
// sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
// }
// workgroupBarrier();
let final_texture: vec4<f32> = sharedData[local_id.y];
// 原图输出
// let pixel = global_id.xy;
// sharedData[local_id.y] = textureLoad(src, pixel, 0);
// let final_texture: vec4<f32> = sharedData[local_id.y];
textureStore(dest, vec2<i32>(global_id.xy), final_texture);
}
`
const shaderRow = new ComputeShader(
'myCompute',
engine,
{ computeSource: computeShaderRow },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 },
}
}
)
const shaderCol = new ComputeShader(
'myCompute',
engine,
{ computeSource: computeShaderCol },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 },
}
}
)
/* 绑定纹理 ----------------------------------------------------------- */
shaderRow.setTexture('src', srcTexture)
shaderRow.setStorageTexture('dest', texRow)
shaderCol.setTexture('src', srcTexture)
shaderCol.setStorageTexture('dest', texCol)
// dispatchWhenReady = (1, 256, 1):
// x:表示在 x 方向上启动 1 个工作组。只启动 1 个工作组,因为每行只需要一个工作组来处理。
// y:表示在 y 方向上启动 256 个工作组。启动 256 个工作组,因为图像有 256 行,每行需要一个工作组来处理。
// z:表示在 z 方向上启动 1 个工作组。只启动 1 个工作组,因为处理的是二维图像。
// ☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️☑️
// ☑️☑️即 shaderRow.dispatchWhenReady(1, 256, 1) 把 256*256 分割成 256 行,然后 workgroup_size = (256, 1, 1) 把每行分割成256个元素。
await shaderRow.dispatchWhenReady(imgSize / workGroupSizeRowX, imgSize / workGroupSizeRowY, 1)
// dispatchWhenReady = (256, 1, 1):
// x:表示在 x 方向上启动 256 个工作组。启动 256 个工作组,因为图像有 256 列,每行需要一个工作组来处理。
// y:表示在 y 方向上启动 1 个工作组。只启动 1 个工作组,因为每列只需要一个工作组来处理。
// z:表示在 z 方向上启动 1 个工作组。只启动 1 个工作组,因为处理的是二维图像。
await shaderCol.dispatchWhenReady(imgSize / workGroupSizeColX, imgSize / workGroupSizeColY, 1)
/* 第二次 dispatch:列变换 texA → texB ------------------------------- */
// dirBuffer.updateInt('dir', 2) // dir = 2 -> 列
// dirBuffer.update()
// shader.setTexture('src', texA) // 输入改为上一轮输出
// shader.setStorageTexture('dest', texB)
// await shader.dispatchWhenReady(X, Y, 1)
/* 把最终结果挂到材质 -------------------------------------------------- */
const matOrigin = new StandardMaterial('matOrigin', scene)
matOrigin.diffuseTexture = srcTexture
groundOrigin.material = matOrigin
const matRow = new StandardMaterial('matRow', scene)
matRow.diffuseTexture = texRow
groundRow.material = matRow
const matCol = new StandardMaterial('matCol', scene)
matCol.diffuseTexture = texCol
groundCol.material = matCol
}
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
createAxis()
createGui()
createGround()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>给ground设置图片材质-4(uniform---time)
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="setTextureToGround4" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
Scene,
ArcRotateCamera,
Vector3,
Color4,
HemisphericLight,
MeshBuilder,
StandardMaterial,
WebGPUEngine,
Texture,
RawTexture,
ComputeShader,
UniformBuffer,
} from 'babylonjs'
import {
AdvancedDynamicTexture,
StackPanel,
Control,
TextBlock,
} from 'babylonjs-gui'
let sceneResources, adt
let uTime = 0.002
const fps = ref(0)
const isRunning = ref(false)
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("setTextureToGround4") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1
camera.panningSensibility = 10
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 320, -200))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(1, 1, 0), scene)
return light
}
const createAxis = () => {
const axisX = MeshBuilder.CreateLines(
'axisX', {
colors: [new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1)],
points: [new Vector3(0, 0, 0), new Vector3(80, 0, 0)]
},
scene
)
const axisY = MeshBuilder.CreateLines(
'axisY', {
colors: [new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1) ],
points: [new Vector3(0, 0, 0), new Vector3(0, 80, 0) ]
},
scene
)
const axisZ = MeshBuilder.CreateLines(
'axisZ', {
colors: [new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1)],
points: [new Vector3(0, 0, 0), new Vector3(0, 0, 80)]
},
scene
)
return [axisX, axisY, axisZ]
}
const createGui = async () => {
adt = AdvancedDynamicTexture.CreateFullscreenUI('UI')
const xBox = MeshBuilder.CreateBox('x', { size: 1 }, scene)
xBox.position = new Vector3(80, 0, 0)
const xPanel = new StackPanel()
xPanel.width = '20px'
xPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
xPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const x = new TextBlock()
x.text = 'X'
x.height = '30px'
x.color = 'red'
adt.addControl(xPanel)
xPanel.addControl(x)
xPanel.linkWithMesh(xBox)
const yBox = MeshBuilder.CreateBox('y', { size: 1 }, scene)
yBox.position = new Vector3(0, 80, 0)
const yPanel = new StackPanel()
yPanel.width = '20px'
yPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
yPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const y = new TextBlock()
y.text = 'Y'
y.height = '30px'
y.color = 'green'
adt.addControl(yPanel)
yPanel.addControl(y)
yPanel.linkWithMesh(yBox)
const zBox = MeshBuilder.CreateBox('z', { size: 1 }, scene)
zBox.position = new Vector3(0, 0, 80)
const zPanel = new StackPanel()
zPanel.width = '20px'
zPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
zPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const z = new TextBlock()
z.text = 'Z'
z.height = '30px'
z.color = 'blue'
adt.addControl(zPanel)
zPanel.addControl(z)
zPanel.linkWithMesh(zBox)
}
const createGround = async () => {
const imgSize = 256
const workGroupSizeRowX = 256
const workGroupSizeRowY = 1
const workGroupSizeColX = 1
const workGroupSizeColY = 256
const groundOrigin = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
groundOrigin.position = new Vector3(0, 0, 74)
const groundChange = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
groundChange.position = new Vector3(0, 0, -74)
/* 读入原始纹理 ------------------------------------------------------- */
const srcTexture = new Texture('/images/mergeImage1.jpg', scene)
/* 准备两张可 ping-pong 的 storage 纹理 -------------------------------- */
const texRow = RawTexture.CreateRGBAStorageTexture(null, imgSize, imgSize, scene, false, false)
const texCol = RawTexture.CreateRGBAStorageTexture(null, imgSize, imgSize, scene, false, false)
const computeShaderRow = `
@group(0) @binding(0) var sampler_src: sampler;
@group(0) @binding(1) var src: texture_2d<f32>;
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
@group(0) @binding(3) var<uniform> uTime: f32;
var<workgroup> sharedData: array<vec4<f32>, ${imgSize}>;
@compute @workgroup_size(${workGroupSizeRowX}, ${workGroupSizeRowY}, 1)
fn main(
@builtin(global_invocation_id) global_id: vec3<u32>, // global_id.x 范围是 0~255 , global_id.y 范围是 0~255
@builtin(local_invocation_id) local_id: vec3<u32> // local_id.x 范围是 0~255 共256个线程 , local_id.y 范围是 0 共1个线程
) {
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
let origin_texture: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
for (var i = 0u; i < ${imgSize / 2}u; i++) {
let idx1 = vec2<u32>(i, global_id.y);
let idx2 = vec2<u32>(i + ${imgSize / 2}u, global_id.y);
let p1 = textureLoad(src, vec2<i32>(idx1), 0);
let p2 = textureLoad(src, vec2<i32>(idx2), 0);
sharedData[i] = p1 + p2 * sin(uTime);
sharedData[i + ${imgSize / 2}u] = p1 - p2 * cos(uTime);
}
workgroupBarrier();
// for (var i = 0u; i < ${imgSize / 2}u; i++) {
// let p1 = sharedData[i];
// let p2 = sharedData[i + ${imgSize / 2}u];
// sharedData[i] = p1 + p2 * 0.5;
// sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
// }
// workgroupBarrier();
let final_texture: vec4<f32> = sharedData[local_id.x];
textureStore(dest, vec2<i32>(global_id.xy), final_texture);
}
`
const computeShaderCol = `
@group(0) @binding(0) var sampler_src: sampler;
@group(0) @binding(1) var src: texture_2d<f32>;
@group(0) @binding(2) var dest: texture_storage_2d<rgba8unorm, write>;
var<workgroup> sharedData: array<vec4<f32>, ${imgSize}>;
@compute @workgroup_size(${workGroupSizeColX}, ${workGroupSizeColY}, 1)
fn main(
@builtin(global_invocation_id) global_id: vec3<u32>, // global_id.x 范围是 0~255 , global_id.y 范围是 0~255
@builtin(local_invocation_id) local_id: vec3<u32> // local_id.x 范围是 0 共1个线程 , local_id.y 范围是 0~255 共256个线程
) {
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
let origin_texture: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
// for (var i = 0u; i < ${imgSize / 2}u; i++) {
// let idx1 = vec2<u32>(global_id.x, i);
// let idx2 = vec2<u32>(global_id.x, i + ${imgSize / 2}u);
// let p1 = textureLoad(src, vec2<i32>(idx1), 0);
// let p2 = textureLoad(src, vec2<i32>(idx2), 0);
// sharedData[i] = p1 + p2 * 0.5;
// sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
// }
// workgroupBarrier();
// for (var i = 0u; i < ${imgSize / 2}u; i++) {
// let p1 = sharedData[i];
// let p2 = sharedData[i + ${imgSize / 2}u];
// sharedData[i] = p1 + p2 * 0.5;
// sharedData[i + ${imgSize / 2}u] = p1 - p2 * 0.5;
// }
// workgroupBarrier();
// let final_texture: vec4<f32> = sharedData[local_id.y];
let pixel = global_id.xy;
sharedData[local_id.y] = textureLoad(src, pixel, 0);
let final_texture: vec4<f32> = sharedData[local_id.y];
textureStore(dest, vec2<i32>(global_id.xy), final_texture);
}
`
const shaderRow = new ComputeShader(
'myCompute',
engine,
{ computeSource: computeShaderRow },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 },
'uTime': { group: 0, binding: 3 },
}
}
)
const shaderCol = new ComputeShader(
'myCompute',
engine,
{ computeSource: computeShaderCol },
{ bindingsMapping: {
'src': { group: 0, binding: 1 },
'dest': { group: 0, binding: 2 },
}
}
)
const timeBuffer = new UniformBuffer(engine)
timeBuffer.addUniform('uTime', 4)
/* 绑定纹理 ----------------------------------------------------------- */
// shaderRow.setTexture('src', srcTexture)
// shaderRow.setStorageTexture('dest', texRow)
// await shaderRow.dispatchWhenReady(imgSize / workGroupSizeRowX, imgSize / workGroupSizeRowY, 1)
// shaderCol.setTexture('src', texRow)
// shaderCol.setStorageTexture('dest', texCol)
// await shaderCol.dispatchWhenReady(imgSize / workGroupSizeColX, imgSize / workGroupSizeColY, 1)
const matOrigin = new StandardMaterial('matOrigin', scene)
matOrigin.diffuseTexture = srcTexture
groundOrigin.material = matOrigin
const matFinal = new StandardMaterial('matFinal', scene)
matFinal.diffuseTexture = texCol
groundChange.material = matFinal
shaderRow.setTexture('src', srcTexture)
shaderRow.setStorageTexture('dest', texRow)
shaderCol.setTexture('src', texRow)
shaderCol.setStorageTexture('dest', texCol)
scene.registerBeforeRender(async() => {
uTime += 0.02
timeBuffer.updateFloat('uTime', uTime)
timeBuffer.update()
// 重新执行 Compute Shader
shaderRow.setUniformBuffer('uTime', timeBuffer)
// shaderRow.setTexture('src', srcTexture)
// shaderRow.setStorageTexture('dest', texRow)
await shaderRow.dispatchWhenReady(imgSize / workGroupSizeRowX, imgSize / workGroupSizeRowY, 1)
// shaderRow.dispatch(imgSize / workGroupSizeRowX, imgSize / workGroupSizeRowY, 1)
// shaderCol.setTexture('src', texRow)
// shaderCol.setStorageTexture('dest', texCol)
await shaderCol.dispatchWhenReady(imgSize / workGroupSizeColX, imgSize / workGroupSizeColY, 1)
// shaderCol.dispatch(imgSize / workGroupSizeColX, imgSize / workGroupSizeColY, 1)
})
}
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
createAxis()
createGui()
createGround()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>wave
fps: 0
点击运行
<template>
<div>
<div class="flex space-between">
<div>fps: {{ fps }}</div>
<div @click="onTrigger" class="pointer">点击{{ !isRunning ? '运行' : '关闭' }}</div>
</div>
<canvas v-if="isRunning" id="waveGround" class="stage"></canvas>
</div>
</template>
<script lang="ts" setup>
import { onMounted, ref, nextTick, onUnmounted } from 'vue'
import {
Scene,
ArcRotateCamera,
Vector3,
Color4,
HemisphericLight,
MeshBuilder,
StandardMaterial,
WebGPUEngine,
Texture,
RawTexture,
ComputeShader,
UniformBuffer,
Effect,
ShaderMaterial,
} from 'babylonjs'
import {
AdvancedDynamicTexture,
StackPanel,
Control,
TextBlock,
} from 'babylonjs-gui'
let sceneResources, adt, uTime = 0.01
const fps = ref(0)
const isRunning = ref(false)
const onTrigger = async () => {
if (!isRunning.value) {
isRunning.value = true
await nextTick()
sceneResources = await initScene()
} else {
isRunning.value = false
destroy()
}
}
const initScene = async () => {
const ele = document.getElementById("waveGround") as any
ele.addEventListener('wheel', function(event) {
// 根据需要处理滚动
// 例如,可以修改相机的半径或角度
event.preventDefault() // 阻止默认滚动行为
})
const engine: any = new WebGPUEngine(ele)
await engine.initAsync()
const scene = new Scene(engine)
scene.useRightHandedSystem = false
const camera = new ArcRotateCamera('camera', -Math.PI / 1.5, Math.PI / 2.2, 15, new Vector3(0, 0, 0), scene)
camera.upperBetaLimit = Math.PI / 2.2
camera.wheelPrecision = 1
camera.panningSensibility = 10
camera.attachControl(ele, true)
camera.setPosition(new Vector3(0, 160, -160))
const createLight = () => {
const light = new HemisphericLight('light',new Vector3(1, 1, 0), scene)
return light
}
const createAxis = () => {
const axisX = MeshBuilder.CreateLines(
'axisX', {
colors: [new Color4(1, 0, 0, 1), new Color4(1, 0, 0, 1)],
points: [new Vector3(0, 0, 0), new Vector3(80, 0, 0)]
},
scene
)
const axisY = MeshBuilder.CreateLines(
'axisY', {
colors: [new Color4(0, 1, 0, 1), new Color4(0, 1, 0, 1) ],
points: [new Vector3(0, 0, 0), new Vector3(0, 80, 0) ]
},
scene
)
const axisZ = MeshBuilder.CreateLines(
'axisZ', {
colors: [new Color4(0, 0, 1, 1), new Color4(0, 0, 1, 1)],
points: [new Vector3(0, 0, 0), new Vector3(0, 0, 80)]
},
scene
)
return [axisX, axisY, axisZ]
}
const createGui = async () => {
adt = AdvancedDynamicTexture.CreateFullscreenUI('UI')
const xBox = MeshBuilder.CreateBox('x', { size: 1 }, scene)
xBox.position = new Vector3(80, 0, 0)
const xPanel = new StackPanel()
xPanel.width = '20px'
xPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
xPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const x = new TextBlock()
x.text = 'X'
x.height = '30px'
x.color = 'red'
adt.addControl(xPanel)
xPanel.addControl(x)
xPanel.linkWithMesh(xBox)
const yBox = MeshBuilder.CreateBox('y', { size: 1 }, scene)
yBox.position = new Vector3(0, 80, 0)
const yPanel = new StackPanel()
yPanel.width = '20px'
yPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
yPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const y = new TextBlock()
y.text = 'Y'
y.height = '30px'
y.color = 'green'
adt.addControl(yPanel)
yPanel.addControl(y)
yPanel.linkWithMesh(yBox)
const zBox = MeshBuilder.CreateBox('z', { size: 1 }, scene)
zBox.position = new Vector3(0, 0, 80)
const zPanel = new StackPanel()
zPanel.width = '20px'
zPanel.horizontalAlignment = Control.HORIZONTAL_ALIGNMENT_RIGHT
zPanel.verticalAlignment = Control.VERTICAL_ALIGNMENT_BOTTOM
const z = new TextBlock()
z.text = 'Z'
z.height = '30px'
z.color = 'blue'
adt.addControl(zPanel)
zPanel.addControl(z)
zPanel.linkWithMesh(zBox)
}
const createGround = async () => {
const ground = MeshBuilder.CreateGround('Ground', { width: 128, height: 128, subdivisions: 128 }, scene)
Effect.ShadersStore['waveGroundVertexShader'] = `
precision highp float;
attribute vec3 position;
attribute vec2 uv;
uniform mat4 worldViewProjection;
uniform sampler2D uSampler;
varying vec2 vUV;
void main() {
vUV = uv;
vec4 baseColor = texture(uSampler, vUV);
vec3 newPosition = position;
newPosition.y = cos(baseColor.r * 10.0 + newPosition.x * 5.0);
gl_Position = worldViewProjection * vec4(newPosition, 1.0);
}
`
Effect.ShadersStore['waveGroundFragmentShader'] = `
precision highp float;
uniform sampler2D uSampler;
varying vec2 vUV;
void main() {
vec4 baseColor = texture(uSampler, vUV);
gl_FragColor = baseColor;
}
`
const copyTextureComputeShader = `
@group(0) @binding(1) var sampler_src: sampler;
@group(0) @binding(2) var src: texture_2d<f32>;
@group(0) @binding(3) var dest: texture_storage_2d<rgba8unorm, write>;
@group(0) @binding(4) var<uniform> uTime: f32;
@compute @workgroup_size(1, 1, 1)
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
let dims: vec2<f32> = vec2<f32>(textureDimensions(src, 0));
var pix: vec4<f32> = textureSampleLevel(src, sampler_src, vec2<f32>(global_id.xy) / dims, 0.0);
pix.r = uTime - 1.0 * floor(uTime / 1.0);
textureStore(dest, vec2<i32>(global_id.xy), pix);
}
`
const shader = new ComputeShader(
'myCompute',
engine,
{ computeSource: copyTextureComputeShader },
{ bindingsMapping: {
'src': { group: 0, binding: 2 },
'dest': { group: 0, binding: 3 },
'uTime': { group: 0, binding: 4 },
}
}
)
const src = new Texture('/images/grass.png', scene)
const dest = RawTexture.CreateRGBAStorageTexture(null, 512, 512, scene, false, false)
const timeBuffer = new UniformBuffer(engine)
timeBuffer.addUniform('uTime', 4)
const mat = new StandardMaterial('mat', scene)
mat.diffuseTexture = dest
const waveGroundShader = new ShaderMaterial(
'waveGround',
scene, {
vertex: 'waveGround',
fragment: 'waveGround',
}, {
attributes: ['position', 'uv'],
uniforms: ['worldViewProjection', 'uSampler'],
samplers: ['uSampler'],
needAlphaBlending: true,
},
)
waveGroundShader.setTexture('uSampler', dest)
// 使用shader
ground.material = waveGroundShader
// 使用标准材质
// ground.material = mat
scene.registerBeforeRender(() => {
uTime += 0.01
timeBuffer.updateFloat('uTime', uTime)
timeBuffer.update()
shader.setUniformBuffer('uTime', timeBuffer)
shader.setTexture('src', src)
shader.setStorageTexture('dest', dest)
shader.dispatch(dest.getSize().width, dest.getSize().height, 1)
})
}
const runAnimate = () => {
engine.runRenderLoop(function() {
if (scene && scene.activeCamera) {
scene.render()
fps.value = engine.getFps().toFixed(2)
}
})
}
createLight()
createAxis()
createGui()
createGround()
runAnimate()
return {
scene,
engine,
}
}
const destroy = () => {
if (sceneResources) {
sceneResources.engine.stopRenderLoop()
sceneResources.engine.dispose()
sceneResources.scene.dispose()
sceneResources = null
}
if (adt) {
adt.dispose()
adt = null
}
}
onMounted(async() => {
await nextTick()
})
onUnmounted(() => {
destroy()
})
</script>