[orx-jvm] Move panel, gui, dnk3, keyframer, triangulation to orx-jvm

This commit is contained in:
Edwin Jakobs
2021-06-27 21:32:24 +02:00
parent 5814acef8f
commit 874d49779f
159 changed files with 22 additions and 21 deletions

View File

@@ -0,0 +1,76 @@
# orx-dnk3
A scene graph based 3d renderer with support for Gltf based assets
Status: in development
Supported Gltf features
- [x] Scene hierarchy
- [x] Loading mesh data
- [x] Glb
- [ ] Materials
- [x] Basic materials
- [x] Normal maps
- [x] Metallic/roughness maps
- [x] Skinning
- [x] Double-sided materials
- [ ] Transparency
- [x] Animations
- [ ] Cameras
- [ ] Lights
<!-- __demos__ -->
## Demos
### DemoAnimations01
[source code](src/demo/kotlin/DemoAnimations01.kt)
![DemoAnimations01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoAnimations01Kt.png)
### DemoCamera01
[source code](src/demo/kotlin/DemoCamera01.kt)
![DemoCamera01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoCamera01Kt.png)
### DemoDSL01
[source code](src/demo/kotlin/DemoDSL01.kt)
![DemoDSL01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoDSL01Kt.png)
### DemoDSL02
[source code](src/demo/kotlin/DemoDSL02.kt)
![DemoDSL02Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoDSL02Kt.png)
### DemoLights01
[source code](src/demo/kotlin/DemoLights01.kt)
![DemoLights01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoLights01Kt.png)
### DemoLights02
[source code](src/demo/kotlin/DemoLights02.kt)
![DemoLights02Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoLights02Kt.png)
### DemoLights03
[source code](src/demo/kotlin/DemoLights03.kt)
![DemoLights03Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoLights03Kt.png)
### DemoObject01
[source code](src/demo/kotlin/DemoObject01.kt)
![DemoObject01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoObject01Kt.png)
### DemoScene01
[source code](src/demo/kotlin/DemoScene01.kt)
![DemoScene01Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoScene01Kt.png)
### DemoScene02
[source code](src/demo/kotlin/DemoScene02.kt)
![DemoScene02Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoScene02Kt.png)
### DemoScene03
[source code](src/demo/kotlin/DemoScene03.kt)
![DemoScene03Kt](https://raw.githubusercontent.com/openrndr/orx/media/orx-dnk3/images/DemoScene03Kt.png)

View File

@@ -0,0 +1,30 @@
sourceSets {
demo {
java {
srcDirs = ["src/demo/kotlin"]
compileClasspath += main.getCompileClasspath()
runtimeClasspath += main.getRuntimeClasspath()
}
}
}
dependencies {
implementation "com.google.code.gson:gson:$gsonVersion"
implementation(project(":orx-fx"))
implementation(project(":orx-jvm:orx-keyframer"))
implementation(project(":orx-easing"))
implementation(project(":orx-shader-phrases"))
implementation(project(":orx-mesh-generators"))
demoImplementation(project(":orx-camera"))
demoImplementation(project(":orx-mesh-generators"))
demoImplementation(project(":orx-noise"))
demoImplementation("org.openrndr:openrndr-application:$openrndrVersion")
demoImplementation("org.openrndr:openrndr-extensions:$openrndrVersion")
demoImplementation("org.openrndr:openrndr-ffmpeg:$openrndrVersion")
demoImplementation("org.openrndr:openrndr-filter:$openrndrVersion")
demoRuntimeOnly("org.openrndr:openrndr-gl3:$openrndrVersion")
demoRuntimeOnly("org.openrndr:openrndr-gl3-natives-$openrndrOS:$openrndrVersion")
demoImplementation(sourceSets.getByName("main").output)
}

View File

@@ -0,0 +1,59 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import org.openrndr.math.mod_
import org.openrndr.math.transforms.transform
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/box-animated/BoxAnimated.glb"))
val scene = Scene(SceneNode())
// -- add some lights
val lightNode = SceneNode()
lightNode.transform = transform {
translate(0.0, 10.0, 0.0)
rotate(Vector3.UNIT_X, -65.0)
}
lightNode.entities.add(DirectionalLight())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa.BLUE.shade(0.4)
downColor = ColorRGBa.GRAY.shade(0.1)
})
scene.root.children.add(lightNode)
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
extend(Orbital()) {
far = 50.0
eye = Vector3(1.5, 0.0, 3.0)
fov = 40.0
}
extend {
sceneData.animations[0].applyToTargets(seconds.mod_(sceneData.animations[0].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,49 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.math.*
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/camera/Scene.glb"))
val scene = Scene(SceneNode())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa(0.1, 0.1, 0.4)
downColor = ColorRGBa(0.1, 0.0, 0.0)
})
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
val cameras = scene.root.findContent { this as? PerspectiveCamera }
extend {
sceneData.animations[0].applyToTargets(seconds.mod_(sceneData.animations[0].duration))
drawer.view = cameras[0].content.viewMatrix
drawer.projection = cameras[0].content.projectionMatrix
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,86 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.dsl.*
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extra.dnk3.tools.addSkybox
import org.openrndr.extras.camera.Orbital
import org.openrndr.extras.meshgenerators.boxMesh
import org.openrndr.extras.meshgenerators.groundPlaneMesh
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
suspend fun main() = application {
configure {
width = 1280
height = 720
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
extend(Orbital()) {
eye = Vector3(4.0, 4.0, 4.0)
}
val renderer = dryRenderer()
val scene = scene {
addSkybox("file:demo-data/cubemaps/garage_iem.dds")
root.hemisphereLight {
upColor = ColorRGBa.WHITE.shade(0.1)
downColor = ColorRGBa.BLACK
}
root.node {
transform = transform {
translate(0.0, 2.0, 0.0)
}
pointLight {
constantAttenuation = 0.0
quadraticAttenuation = 1.0
}
}
root.node {
simpleMesh {
vertexBuffer = groundPlaneMesh(100.0, 100.0)
material = pbrMaterial {
color = ColorRGBa.GREEN
}
}
}
for (j in -3..3) {
for (i in -3..3) {
root.node {
transform = transform {
translate(i * 2.0, 1.0, j * 2.0)
}
update {
transform = transform {
translate(i * 2.0, 1.0, j * 2.0)
rotate(Vector3.UNIT_Z, seconds* 45.0 + i * 20.0 + j * 50.0)
}
}
simpleMesh {
vertexBuffer = boxMesh()
material = pbrMaterial {
color = ColorRGBa.WHITE
}
}
}
}
}
}
extend {
drawer.clear(ColorRGBa.BLACK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,87 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.dsl.*
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extra.dnk3.tools.addSkybox
import org.openrndr.extra.noise.simplex
import org.openrndr.extras.camera.Orbital
import org.openrndr.extras.meshgenerators.groundPlaneMesh
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import org.openrndr.shape.path3D
suspend fun main() = application {
configure {
width = 1280
height = 720
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
extend(Orbital()) {
eye = Vector3(4.0, 4.0, 4.0)
}
val renderer = dryRenderer()
val scene = scene {
addSkybox("file:demo-data/cubemaps/garage_iem.dds")
root.hemisphereLight {
upColor = ColorRGBa.WHITE.shade(0.1)
downColor = ColorRGBa.BLACK
}
root.node {
transform = transform {
translate(0.0, 2.0, 0.0)
}
pointLight {
constantAttenuation = 0.0
quadraticAttenuation = 1.0
}
}
root.node {
simpleMesh {
vertexBuffer = groundPlaneMesh(100.0, 100.0)
material = pbrMaterial {
color = ColorRGBa.GREEN
}
}
}
root.node {
pathMesh {
weight = 10.0
material = pbrMaterial {
color = ColorRGBa.PINK
}
update {
paths = mutableListOf(
path3D {
val t = seconds * 0.1
moveTo(Vector3.ZERO)
val control = Vector3.simplex(3032, t).let { it.copy(y = it.y * 0.5 + 0.5) } * 4.0
val target = Vector3.simplex(5077, t).let { it.copy(y = it.y * 0.5 + 0.5) } * 4.0
val end = Vector3.simplex(9041, t).let { it.copy(y = it.y * 0.5 + 0.5) } * 4.0
curveTo(control, target)
continueTo(end)
}
)
}
}
}
}
extend {
drawer.clear(ColorRGBa.BLACK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,115 @@
import kotlinx.coroutines.yield
import org.openrndr.*
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.BufferMultisample
import org.openrndr.draw.ColorFormat
import org.openrndr.draw.ColorType
import org.openrndr.draw.DrawPrimitive
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.features.IrradianceSH
import org.openrndr.extra.dnk3.features.addIrradianceSH
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.post.ScreenspaceReflections
import org.openrndr.extra.dnk3.post.VolumetricIrradiance
import org.openrndr.extra.dnk3.renderers.postRenderer
import org.openrndr.extra.shaderphrases.annotations.ShaderPhrases
import org.openrndr.extras.camera.Orbital
import org.openrndr.extras.meshgenerators.sphereMesh
import org.openrndr.ffmpeg.ScreenRecorder
import org.openrndr.filter.color.Delinearize
import org.openrndr.math.Matrix44
import org.openrndr.math.Spherical
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.scale
import org.openrndr.math.transforms.transform
import org.openrndr.math.transforms.translate
import java.io.File
import kotlin.math.cos
import kotlin.math.sin
suspend fun main() = application {
configure {
width = 1280
height = 720
multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/irradiance-probes/model.glb"))
val scene = Scene(SceneNode())
val probeBox = sphereMesh(16, 16, 0.1)
val probeGeometry = Geometry(listOf(probeBox), null, DrawPrimitive.TRIANGLES, 0, probeBox.vertexCount)
val c = 5
scene.addIrradianceSH(c, c, c, 3.0 / c, cubemapSize = 32, offset = Vector3(0.0, 0.0, 0.0))
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = postRenderer()
// renderer.postSteps.add(
// FilterPostStep(1.0, ScreenspaceReflections(), listOf("color", "clipDepth", "viewNormal"), "reflections", ColorFormat.RGB, ColorType.FLOAT16) {
// val p = Matrix44.scale(drawer.width / 2.0, drawer.height / 2.0, 1.0) * Matrix44.translate(Vector3(1.0, 1.0, 0.0)) * drawer.projection
// this.projection = p
// this.projectionMatrixInverse = drawer.projection.inversed
// }
// )
// renderer.postSteps.add(
// FilterPostStep(1.0, VolumetricIrradiance(), listOf("color", "clipDepth"), "volumetric-irradiance", ColorFormat.RGB, ColorType.FLOAT16) {
// this.irradianceSH = scene.features[0] as IrradianceSH
// this.projectionMatrixInverse = drawer.projection.inversed
// this.viewMatrixInverse = drawer.view.inversed
// }
// )
renderer.postSteps.add(
FilterPostStep(1.0, Delinearize(), listOf("color"), "ldr", ColorFormat.RGB, ColorType.FLOAT16)
)
val orb = extend(Orbital()) {
this.fov = 20.0
camera.setView(Vector3(-0.49, -0.24, 0.20), Spherical(26.56, 90.0, 6.533), 40.0)
}
renderer.draw(drawer, scene)
val dynNode = SceneNode()
val dynMaterial = PBRMaterial()
val dynPrimitive = MeshPrimitive(probeGeometry, dynMaterial)
val dynMesh = Mesh(listOf(dynPrimitive))
dynNode.entities.add(dynMesh)
scene.root.children.add(dynNode)
scene.dispatcher.launch {
while (true) {
dynNode.transform = transform {
translate(cos(seconds) * 0.5, 0.5, sin(seconds) * 0.5)
scale(2.0)
}
yield()
}
}
extend {
drawer.clear(ColorRGBa.BLACK)
renderer.draw(drawer, scene)
drawer.defaults()
}
}
}

View File

@@ -0,0 +1,49 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.*
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/point-light/Scene.glb"))
val scene = Scene(SceneNode())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa(0.1, 0.1, 0.4)
downColor = ColorRGBa(0.1, 0.0, 0.0)
})
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
val orb = extend(Orbital()) {
far = 50.0
camera.setView(Vector3.ZERO, Spherical(30.50, 26.0, 5.6), 40.0)
}
extend {
sceneData.animations[0].applyToTargets(seconds.mod_(sceneData.animations[0].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,50 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.*
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/spot-light/Scene.glb"))
val scene = Scene(SceneNode())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa(0.1, 0.1, 0.4)
downColor = ColorRGBa(0.1, 0.0, 0.0)
})
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
val orb = extend(Orbital()) {
far = 50.0
camera.setView(Vector3(-0.514, -0.936, -1.122), Spherical(454.346, 25.0, 8.444), 40.0)
}
extend {
sceneData.animations[0].applyToTargets(seconds.mod_(sceneData.animations[0].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,49 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.*
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/directional-light/Scene.glb"))
val scene = Scene(SceneNode())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa(0.1, 0.1, 0.4)
downColor = ColorRGBa(0.1, 0.0, 0.0)
})
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
val orb = extend(Orbital()) {
camera.setView(Vector3(-0.49, -0.24, 0.20), Spherical(26.56, 90.0, 6.533), 40.0)
}
extend {
sceneData.animations[0].applyToTargets(seconds.mod_(sceneData.animations[0].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,47 @@
import org.openrndr.application
import org.openrndr.draw.DrawPrimitive
import org.openrndr.draw.shadeStyle
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import java.io.File
suspend fun main() = application {
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/duck/Duck.gltf"))
val meshes = gltf.meshes.map {
it.createDrawCommands(gltf)
}
extend(Orbital()) {
far = 400.0
lookAt = Vector3(0.0, 50.0, 0.0)
eye = Vector3(100.0, 200.0, 150.0)
fov = 45.0
}
extend {
drawer.shadeStyle = shadeStyle {
fragmentTransform = """
x_fill.rgb = vec3(v_viewNormal.z);
""".trimIndent()
}
for (mesh in meshes) {
for (primitive in mesh) {
if (primitive.indexBuffer == null) {
drawer.vertexBuffer(primitive.vertexBuffer, DrawPrimitive.TRIANGLES)
} else {
drawer.vertexBuffer(primitive.indexBuffer!!, listOf(primitive.vertexBuffer), DrawPrimitive.TRIANGLES)
}
}
}
}
}
}

View File

@@ -0,0 +1,56 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/suzanne/Suzanne.gltf"))
val scene = Scene(SceneNode())
// -- add some lights
val lightNode = SceneNode()
lightNode.transform = transform {
translate(0.0, 10.0, 0.0)
rotate(Vector3.UNIT_X, -65.0)
}
lightNode.entities.add(DirectionalLight())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa.BLUE.shade(0.4)
downColor = ColorRGBa.GRAY.shade(0.1)
})
scene.root.children.add(lightNode)
scene.root.children.addAll(gltf.buildSceneNodes().scenes.first())
// -- create a renderer
val renderer = dryRenderer()
extend(Orbital()) {
far = 50.0
eye = Vector3(1.5, 0.0, 3.0)
fov = 40.0
}
extend {
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,58 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/duck/Duck.gltf"))
val scene = Scene(SceneNode())
// -- add some lights
val lightNode = SceneNode()
lightNode.transform = transform {
translate(0.0, 10.0, 0.0)
rotate(Vector3.UNIT_X, -90.0)
}
lightNode.entities.add(DirectionalLight())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa.WHITE.shade(1.0)
downColor = ColorRGBa.WHITE.shade(0.1)
})
scene.root.children.add(lightNode)
scene.root.children.addAll(gltf.buildSceneNodes().scenes.first())
// -- create a renderer
val renderer = dryRenderer()
extend(Orbital()) {
far = 500.0
lookAt = Vector3(0.0, 0.8, 0.0)
eye = Vector3(3.0, 0.8, -2.0)
fov = 30.0
}
extend {
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,60 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.DrawPrimitive
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.extras.meshgenerators.sphereMesh
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val root = SceneNode()
val scene = Scene(root)
val lightNode = SceneNode()
lightNode.transform = transform {
translate(0.0, 10.0, 0.0)
}
lightNode.entities.add(PointLight())
lightNode.entities.add(HemisphereLight(upColor = ColorRGBa.PINK, downColor = ColorRGBa(0.1,0.1,0.1)))
scene.root.children.add(lightNode)
val meshNode = SceneNode()
val box = sphereMesh(32, 32)
val geometry = Geometry(listOf(box), null, DrawPrimitive.TRIANGLES, 0, box.vertexCount)
val material = PBRMaterial()
val primitive = MeshPrimitive(geometry, material)
val mesh = Mesh(listOf(primitive))
meshNode.entities.add(mesh)
root.children.add(meshNode)
// -- create a renderer
val renderer = dryRenderer()
extend(Orbital()) {
far = 500.0
lookAt = Vector3(0.0, 0.0, 0.0)
eye = Vector3(3.0, 2.0, -3.0)
fov = 30.0
}
extend {
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,49 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.BufferMultisample
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.segmentContourRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import org.openrndr.math.mod_
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/fox/Fox.glb"))
val scene = Scene(SceneNode())
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer, try it with BufferMultisample.SampleCount(8) for better results
val renderer = segmentContourRenderer(BufferMultisample.Disabled)
extend(Orbital()) {
far = 500.0
lookAt = Vector3(0.0, 40.0, 0.0)
eye = Vector3(150.0, 40.0, 200.0)
fov = 40.0
}
extend {
sceneData.animations[2].applyToTargets(seconds.mod_(sceneData.animations[2].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,53 @@
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.dryRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.math.Vector3
import org.openrndr.math.mod_
import java.io.File
suspend fun main() = application {
configure {
width = 1280
height = 720
//multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/fox/Fox.glb"))
val scene = Scene(SceneNode())
scene.root.entities.add(HemisphereLight().apply {
upColor = ColorRGBa.WHITE.shade(0.4)
downColor = ColorRGBa.GRAY.shade(0.1)
})
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = dryRenderer()
extend(Orbital()) {
far = 500.0
lookAt = Vector3(0.0, 40.0, 0.0)
eye = Vector3(150.0, 40.0, 200.0)
fov = 40.0
}
extend {
sceneData.animations[2].applyToTargets(seconds.mod_(sceneData.animations[2].duration))
drawer.clear(ColorRGBa.PINK)
renderer.draw(drawer, scene)
}
}
}

View File

@@ -0,0 +1,114 @@
import kotlinx.coroutines.yield
import org.openrndr.*
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extensions.SingleScreenshot
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.features.addVoxelConeTracing
import org.openrndr.extra.dnk3.gltf.buildSceneNodes
import org.openrndr.extra.dnk3.gltf.loadGltfFromFile
import org.openrndr.extra.dnk3.renderers.postRenderer
import org.openrndr.extras.camera.Orbital
import org.openrndr.extras.meshgenerators.sphereMesh
import org.openrndr.filter.color.Delinearize
import org.openrndr.math.Spherical
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import java.io.File
import kotlin.math.cos
import kotlin.math.sin
suspend fun main() = application {
configure {
width = 1280
height = 720
multisample = WindowMultisample.SampleCount(8)
}
program {
if (System.getProperty("takeScreenshot") == "true") {
extend(SingleScreenshot()) {
this.outputFile = System.getProperty("screenshotPath")
}
}
val gltf = loadGltfFromFile(File("demo-data/gltf-models/irradiance-probes/model.glb"))
val scene = Scene(SceneNode())
val probeBox = sphereMesh(16, 16, 0.1)
val probeGeometry = Geometry(listOf(probeBox), null, DrawPrimitive.TRIANGLES, 0, probeBox.vertexCount)
val c = 5
// scene.addIrradianceSH(c, c, c, 3.0 / c, cubemapSize = 32, offset = Vector3(0.0, 0.0, 0.0))
val vctFeature = scene.addVoxelConeTracing(64,64,64, 0.1)
val sceneData = gltf.buildSceneNodes()
scene.root.children.addAll(sceneData.scenes.first())
// -- create a renderer
val renderer = postRenderer()
// renderer.postSteps.add(
// FilterPostStep(1.0, ScreenspaceReflections(), listOf("color", "clipDepth", "viewNormal"), "reflections", ColorFormat.RGB, ColorType.FLOAT16) {
// val p = Matrix44.scale(drawer.width / 2.0, drawer.height / 2.0, 1.0) * Matrix44.translate(Vector3(1.0, 1.0, 0.0)) * drawer.projection
// this.projection = p
// this.projectionMatrixInverse = drawer.projection.inversed
// }
// )
// renderer.postSteps.add(
// FilterPostStep(1.0, VolumetricIrradiance(), listOf("color", "clipDepth"), "volumetric-irradiance", ColorFormat.RGB, ColorType.FLOAT16) {
// this.irradianceSH = scene.features[0] as IrradianceSH
// this.projectionMatrixInverse = drawer.projection.inversed
// this.viewMatrixInverse = drawer.view.inversed
// }
// )
renderer.postSteps.add(
FilterPostStep(1.0, Delinearize(), listOf("color"), "ldr", ColorFormat.RGB, ColorType.FLOAT16)
)
val orb = extend(Orbital()) {
this.fov = 20.0
camera.setView(Vector3(-0.49, -0.24, 0.20), Spherical(26.56, 90.0, 6.533), 40.0)
}
renderer.draw(drawer, scene)
val dynNode = SceneNode()
val dynMaterial = PBRMaterial()
val dynPrimitive = MeshPrimitive(probeGeometry, dynMaterial)
val dynMesh = Mesh(listOf(dynPrimitive))
dynNode.entities.add(dynMesh)
scene.root.children.add(dynNode)
scene.dispatcher.launch {
while (true) {
dynNode.transform = transform {
translate(cos(seconds) * 0.5, 0.5, sin(seconds) * 0.5)
scale(2.0)
}
yield()
}
}
val viz = colorBuffer(64,64)
extend {
drawer.clear(ColorRGBa.BLACK)
renderer.draw(drawer, scene)
drawer.defaults()
for (i in 0 until 128) {
vctFeature.voxelMap?.let {
it.copyTo(viz, i)
}
drawer.image(viz, (i * 128) % width + 0.0, ((i * 128)/width * 128 + 0.0 ))
}
drawer.image(vctFeature.voxelRenderTarget!!.colorBuffer(0))
}
}
}

View File

@@ -0,0 +1,47 @@
package org.openrndr.extra.dnk3
import org.openrndr.math.Matrix44
import org.openrndr.math.transforms.ortho
import org.openrndr.math.transforms.perspective
class PerspectiveCamera(var node: SceneNode) : Camera() {
override val projectionMatrix: Matrix44
get() = perspective(fov, aspectRatio, near, far)
override val viewMatrix: Matrix44
get() = node.worldTransform.inversed
var aspectRatio: Double = 16.0 / 9.0
var fov = 45.0
var far = 100.0
var near = 0.1
override fun hashCode(): Int {
var result = aspectRatio.hashCode()
result = 31 * result + fov.hashCode()
result = 31 * result + far.hashCode()
result = 31 * result + near.hashCode()
return result
}
}
class OrthographicCamera(var node: SceneNode) : Camera() {
override val projectionMatrix: Matrix44
get() = ortho(xMag, yMag, near, far)
override val viewMatrix: Matrix44
get() = node.worldTransform.inversed
var xMag = 1.0
var yMag = 1.0
var near = 0.1
var far = 100.0
override fun hashCode(): Int {
var result = xMag.hashCode()
result = 31 * result + yMag.hashCode()
result = 31 * result + near.hashCode()
result = 31 * result + far.hashCode()
return result
}
}

View File

@@ -0,0 +1,103 @@
package org.openrndr.extra.dnk3
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.DrawPrimitive
import org.openrndr.draw.IndexBuffer
import org.openrndr.draw.VertexBuffer
import org.openrndr.math.Matrix44
import org.openrndr.math.transforms.perspective
import org.openrndr.shape.Path3D
class Geometry(val vertexBuffers: List<VertexBuffer>,
val indexBuffer: IndexBuffer?,
val primitive: DrawPrimitive,
val offset: Int,
val vertexCount: Int) {
override fun toString(): String {
return "Geometry(vertexBuffers: $vertexBuffers, indexBuffers: $indexBuffer, primitive: $primitive, offset: $offset, vertexCount: $vertexCount)"
}
override fun hashCode(): Int {
var result = 0
result = 31 * result + primitive.ordinal.hashCode()
result = 31 * result + offset.hashCode()
result = 31 * result + vertexCount.hashCode()
return result
}
}
val DummyGeometry = Geometry(emptyList(), null, DrawPrimitive.TRIANGLES, 0, 0)
sealed class Entity
class MeshPrimitive(var geometry: Geometry, var material: Material) {
override fun toString(): String {
return "MeshPrimitive(geometry: $geometry, material: $material)"
}
override fun hashCode(): Int {
var result = geometry.hashCode()
result = 31 * result + material.hashCode()
return result
}
}
class MeshPrimitiveInstance(val primitive: MeshPrimitive, val instances: Int, val attributes: List<VertexBuffer>)
class PathMesh(var paths: MutableList<Path3D>, var material: Material, var weight: Double) : Entity() {
override fun toString(): String {
return "PathMesh(paths=$paths)"
}
override fun hashCode(): Int {
return paths.hashCode()
}
}
abstract class MeshBase(var primitives: List<MeshPrimitive>) : Entity()
class Mesh(primitives: List<MeshPrimitive>) : MeshBase(primitives) {
override fun toString(): String {
return "Mesh(primitives: $primitives)"
}
override fun hashCode(): Int {
return primitives.hashCode()
}
}
class SkinnedMesh(primitives: List<MeshPrimitive>,
val joints: List<SceneNode>,
val skeleton: SceneNode,
val inverseBindMatrices: List<Matrix44>
) : MeshBase(primitives)
class InstancedMesh(primitives: List<MeshPrimitive>,
var instances: Int,
var attributes: List<VertexBuffer>) : MeshBase(primitives)
data class Fog(var color: ColorRGBa = ColorRGBa.WHITE, var end: Double = 100.0) : Entity()
abstract class Light : Entity() {
var color: ColorRGBa = ColorRGBa.WHITE
}
abstract class Camera : Entity() {
abstract val projectionMatrix: Matrix44
abstract val viewMatrix: Matrix44
}
abstract class CubemapProbe : Entity() {
open val projectionMatrix: Matrix44
get() {
return perspective(90.0, 1.0, 0.1, 150.0)
}
var dirty = true
}
class IrradianceProbe : CubemapProbe() {
override fun hashCode(): Int {
return true.hashCode()
}
}

View File

@@ -0,0 +1,158 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.BlendMode
import org.openrndr.draw.ColorFormat
import org.openrndr.draw.ColorType
enum class FacetType(val shaderFacet: String) {
WORLD_POSITION("f_worldPosition"),
VIEW_POSITION("f_viewPosition"),
CLIP_POSITION("f_clipPosition"),
WORLD_NORMAL("f_worldNormal"),
VIEW_NORMAL("f_viewNormal"),
SPECULAR("f_specular"),
DIFFUSE("f_diffuse"),
EMISSIVE("f_emission"),
AMBIENT("f_ambient"),
OCCLUSION("f_occlusion"),
FRAGMENT_ID("f_fragmentID"),
COLOR("m_color"),
}
abstract class FacetCombiner(val facets: Set<FacetType>, val targetOutput: String) {
abstract fun generateShader(): String
override fun toString(): String {
return "FacetCombiner(facets=$facets, targetOutput='$targetOutput')"
}
}
abstract class ColorBufferFacetCombiner(facets: Set<FacetType>,
targetOutput: String,
val format: ColorFormat,
val type: ColorType,
val blendMode: BlendMode = BlendMode.BLEND) : FacetCombiner(facets, targetOutput) {
}
class MomentsFacet : ColorBufferFacetCombiner(setOf(FacetType.WORLD_POSITION), "moments", ColorFormat.RG, ColorType.FLOAT16) {
override fun generateShader(): String {
return """
float depth = length(v_viewPosition);
float dx = dFdx(depth);
float dy = dFdy(depth);
o_$targetOutput = vec4(depth, depth*depth + 0.25 * dx*dx+dy*dy, 0.0, 1.0);
"""
}
}
class DiffuseSpecularFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR),
"diffuseSpecular", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4( max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0), f_specular.rgb), 1.0);"
}
class DiffuseSpecularAlphaFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR),
"diffuseSpecular", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4( (max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0), f_specular.rgb)) * f_alpha, f_alpha);"
}
class AmbientOcclusionFacet : ColorBufferFacetCombiner(setOf(FacetType.AMBIENT, FacetType.OCCLUSION),
"ambientOcclusion", ColorFormat.RGBa, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4(f_ambient, f_occlusion);"
}
class MaterialFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE),
"material", ColorFormat.RGBa, ColorType.UINT8) {
override fun generateShader(): String =
"o_$targetOutput = vec4(m_metalness, m_roughness, 0.0, 1.0);"
}
class BaseColorFacet : ColorBufferFacetCombiner(setOf(FacetType.COLOR),
"baseColor", ColorFormat.RGB, ColorType.UINT8) {
override fun generateShader(): String = "o_$targetOutput = vec4(m_color.rgb, 1.0);"
}
class DiffuseFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE),
"diffuse", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4( max(vec3(0.0), f_diffuse.rgb), 1.0 );"
}
class SpecularFacet : ColorBufferFacetCombiner(setOf(FacetType.SPECULAR),
"diffuseSpecular", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4( max(vec3(0.0), f_specular.rgb), 1.0);"
}
class EmissiveFacet: ColorBufferFacetCombiner(setOf(FacetType.EMISSIVE),
"emissive", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String =
"o_$targetOutput = vec4(f_emission, 1.0);"
}
class EmissiveAlphaFacet: ColorBufferFacetCombiner(setOf(FacetType.EMISSIVE),
"emissive", ColorFormat.RGB, ColorType.FLOAT16, BlendMode.OVER) {
override fun generateShader(): String =
"o_$targetOutput = vec4(f_emission, f_alpha);"
}
class PositionFacet : ColorBufferFacetCombiner(setOf(FacetType.WORLD_POSITION), "position", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String = "o_$targetOutput = vec4(v_worldPosition.rgb, 1.0);"
}
class NormalFacet : ColorBufferFacetCombiner(setOf(FacetType.WORLD_NORMAL), "normal", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String = "o_$targetOutput = vec4(v_worldNormal.rgb, 1.0);"
}
class ViewDepthFacet : ColorBufferFacetCombiner(setOf(FacetType.VIEW_POSITION), "viewDepth", ColorFormat.R, ColorType.FLOAT16) {
override fun generateShader(): String = "o_$targetOutput.r = v_viewPosition.z;"
}
class ClipDepthFacet : ColorBufferFacetCombiner(setOf(FacetType.CLIP_POSITION), "clipDepth", ColorFormat.R, ColorType.FLOAT32) {
override fun generateShader(): String = "o_$targetOutput.r = gl_FragCoord.z;"
}
class ViewPositionFacet : ColorBufferFacetCombiner(setOf(FacetType.VIEW_POSITION), "viewPosition", ColorFormat.RGB, ColorType.FLOAT32) {
override fun generateShader(): String = "o_$targetOutput.rgb = v_viewPosition.rgb;"
}
class ViewNormalFacet : ColorBufferFacetCombiner(setOf(FacetType.VIEW_NORMAL), "viewNormal", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader(): String = "o_$targetOutput.rgb = normalize( (u_viewNormalMatrix * vec4(f_worldNormal,0.0)).xyz );"
}
class ClipPositionFacet : ColorBufferFacetCombiner(setOf(FacetType.CLIP_POSITION), "position", ColorFormat.RGB, ColorType.FLOAT16) {
override fun generateShader() = "o_$targetOutput.rgb = gl_FragCoord.xyz;"
}
class FragmentIDFacet: ColorBufferFacetCombiner(setOf(FacetType.FRAGMENT_ID), "fragmentID", ColorFormat.R, ColorType.UINT16_INT) {
override fun generateShader(): String {
return "o_$targetOutput = f_fragmentID;"
}
}
class LDRColorFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR, FacetType.EMISSIVE), "color", ColorFormat.RGBa, ColorType.UINT8) {
override fun generateShader() = """
vec3 finalColor = (max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0),f_specular.rgb) + max(vec3(0.0), f_emission.rgb) + max(vec3(0.0), f_ambient.rgb)) * (1.0 - f_fog.a) + f_fog.rgb * f_fog.a;
o_$targetOutput = pow(vec4(finalColor.rgb, 1.0), vec4(1.0/2.2));
o_$targetOutput *= m_color.a;
"""
}
class HDRColorFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR, FacetType.EMISSIVE), "color", ColorFormat.RGBa, ColorType.FLOAT16) {
override fun generateShader() = """
vec3 finalColor = (max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0),f_specular.rgb) + max(vec3(0.0), f_emission.rgb) + max(vec3(0.0), f_ambient.rgb)) * (1.0 - f_fog.a) + f_fog.rgb * f_fog.a;
o_$targetOutput = vec4(finalColor.rgb, 1.0);
o_$targetOutput *= m_color.a;
"""
}
class DiffuseIrradianceFacet : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR), "color", ColorFormat.RGBa, ColorType.UINT8) {
override fun generateShader() = """
vec3 finalColor = (max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0), f_emission.rgb));
o_$targetOutput = vec4(finalColor.rgb, 1.0);
"""
}

View File

@@ -0,0 +1,13 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.Drawer
interface Feature {
fun <T : Feature> update(
drawer: Drawer,
sceneRenderer: SceneRenderer,
scene: Scene,
feature: T,
context: RenderContext
)
}

View File

@@ -0,0 +1,82 @@
package org.openrndr.extra.dnk3
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.Cubemap
import org.openrndr.draw.RenderTarget
import org.openrndr.math.Matrix44
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.ortho
import org.openrndr.math.transforms.perspective
data class LightContext(val lights: List<NodeContent<Light>>,
val shadowMaps: Map<ShadowLight, RenderTarget>)
interface AttenuatedLight {
var constantAttenuation: Double
var linearAttenuation: Double
var quadraticAttenuation: Double
}
class DirectionalLight(var direction: Vector3 = -Vector3.UNIT_Z, override var shadows: Shadows = Shadows.None) : Light(), ShadowLight {
var projectionSize = 50.0
override fun projection(renderTarget: RenderTarget): Matrix44 {
return ortho(-projectionSize / 2.0, projectionSize / 2.0, -projectionSize / 2.0, projectionSize / 2.0, 1.0, 150.0)
}
override fun hashCode(): Int {
return color.hashCode()
}
}
class SpotLight(var direction: Vector3 = -Vector3.UNIT_Z, var innerAngle: Double = 45.0, var outerAngle: Double = 90.0) : Light(), ShadowLight, AttenuatedLight {
override var constantAttenuation = 1.0
override var linearAttenuation = 0.0
override var quadraticAttenuation = 0.0
override var shadows: Shadows = Shadows.None
override fun projection(renderTarget: RenderTarget): Matrix44 {
return perspective(outerAngle * 2.0, renderTarget.width * 1.0 / renderTarget.height, 1.0, 150.0)
}
override fun hashCode(): Int {
var result = direction.hashCode()
result = 31 * result + innerAngle.hashCode()
result = 31 * result + outerAngle.hashCode()
result = 31 * result + constantAttenuation.hashCode()
result = 31 * result + linearAttenuation.hashCode()
result = 31 * result + quadraticAttenuation.hashCode()
return result
}
}
class HemisphereLight(var direction: Vector3 = Vector3.UNIT_Y,
var upColor: ColorRGBa = ColorRGBa.WHITE,
var downColor: ColorRGBa = ColorRGBa.BLACK) : Light() {
var irradianceMap: Cubemap? = null
override fun hashCode(): Int {
var result = direction.hashCode()
result = 31 * result + upColor.hashCode()
result = 31 * result + downColor.hashCode()
return result
}
}
class PointLight(var constantAttenuation: Double = 1.0,
var linearAttenuation: Double = 0.0,
var quadraticAttenuation: Double = 1.0) : Light() {
override fun hashCode(): Int {
var result = constantAttenuation.hashCode()
result = 31 * result + linearAttenuation.hashCode()
result = 31 * result + quadraticAttenuation.hashCode()
result = 31 * result + color.hashCode()
return result
}
}
class AmbientLight : Light() {
override fun hashCode(): Int {
return color.hashCode()
}
}

View File

@@ -0,0 +1,59 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.features.IrradianceSH
import org.openrndr.math.Vector3
interface Material {
val name: String?
var doubleSided: Boolean
var transparent: Boolean
val fragmentID: Int
fun generateShadeStyle(context: MaterialContext, primitiveContext: PrimitiveContext): ShadeStyle
fun applyToShadeStyle(context: MaterialContext, shadeStyle: ShadeStyle)
}
class DummyMaterial : Material {
override var name: String? = null
override var doubleSided: Boolean = true
override var transparent: Boolean = false
override var fragmentID = 0
override fun generateShadeStyle(context: MaterialContext, primitiveContext: PrimitiveContext): ShadeStyle {
return shadeStyle {
fragmentPreamble = """
int f_fragmentID = p_fragmentID;
""".trimIndent()
fragmentTransform = """
x_fill.rgb = vec3(normalize(v_viewNormal).z);
""".trimIndent()
parameter("fragmentID", fragmentID)
}
}
override fun applyToShadeStyle(context: MaterialContext, shadeStyle: ShadeStyle) {
}
}
data class MaterialContext(val pass: RenderPass,
val lights: List<NodeContent<Light>>,
val fogs: List<NodeContent<Fog>>,
val shadowMaps: Map<ShadowLight, RenderTarget>,
val meshCubemaps: Map<Mesh, Cubemap>,
val irradianceProbeCount: Int
) {
var irradianceSH: IrradianceSH? = null
}
data class PrimitiveContext(val hasNormalAttribute: Boolean, val hasSkinning: Boolean)
data class ContextKey(val materialContext: MaterialContext, val primitiveContext: PrimitiveContext)

View File

@@ -0,0 +1,727 @@
package org.openrndr.extra.dnk3
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.cubemap.glslEvaluateSH
import org.openrndr.extra.dnk3.cubemap.glslFetchSH
import org.openrndr.extra.dnk3.cubemap.genGlslGatherSH
import org.openrndr.extra.shaderphrases.phrases.phraseTbnMatrix
import org.openrndr.math.Vector2
import org.openrndr.math.Vector3
import org.openrndr.math.Vector4
import org.openrndr.math.transforms.normalMatrix
import java.nio.ByteBuffer
import kotlin.math.cos
private val noise128 by lazy {
val cb = colorBuffer(128, 128)
val items = cb.width * cb.height * cb.format.componentCount
val buffer = ByteBuffer.allocateDirect(items)
for (y in 0 until cb.height) {
for (x in 0 until cb.width) {
for (i in 0 until 4)
buffer.put((Math.random() * 255).toInt().toByte())
}
}
buffer.rewind()
cb.write(buffer)
cb.generateMipmaps()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
cb
}
private fun PointLight.fs(index: Int, hasNormalAttribute: Boolean): String = """
|{
| vec3 Lr = p_lightPosition$index - v_worldPosition;
| float distance = length(Lr);
| float attenuation = 1.0 / (p_lightConstantAttenuation$index +
| p_lightLinearAttenuation$index * distance + p_lightQuadraticAttenuation$index * distance * distance);
| vec3 L = normalize(Lr);
|
| float side = ${if (hasNormalAttribute) "dot(L, N)" else "3.1415"};
| f_diffuse += attenuation * max(0, side / 3.1415) * p_lightColor$index.rgb * m_color.rgb;
| f_specular += attenuation * ggx(N, V, L, m_roughness, m_f0) * p_lightColor$index.rgb * m_color.rgb;
}
""".trimMargin()
private fun AmbientLight.fs(index: Int): String = "f_ambient += p_lightColor$index.rgb * ((1.0 - m_metalness) * m_color.rgb);"
private fun DirectionalLight.fs(index: Int, hasNormalAttribute: Boolean) = """
|{
| vec3 L = normalize(-p_lightDirection$index);
| float attenuation = 1.0;
| vec3 H = normalize(V + L);
| float NoL = ${if (hasNormalAttribute) "clamp(dot(N, L), 0.0, 1.0)" else "1"};
| float LoH = clamp(dot(L, H), 0.0, 1.0);
| float NoH = ${if (hasNormalAttribute) "clamp(dot(N, H), 0.0, 1.0)" else "1"};
| vec3 Lr = (p_lightPosition$index - v_worldPosition);
//| vec3 L = normalize(Lr);
| ${shadows.fs(index)}
|
| f_diffuse += NoL * attenuation * Fd_Burley(m_roughness * m_roughness, NoV, NoL, LoH) * p_lightColor$index.rgb * m_color.rgb * m_ambientOcclusion;;
| float Dg = D_GGX(m_roughness * m_roughness, NoH, H);
| float Vs = V_SmithGGXCorrelated(m_roughness * m_roughness, NoV, NoL);
| vec3 F = F_Schlick(m_color.rgb * (m_metalness) + 0.04 * (1.0-m_metalness), LoH);
| vec3 Fr = (Dg * Vs) * F;
| f_specular += NoL * attenuation * Fr * p_lightColor$index.rgb * m_ambientOcclusion;;
|}
""".trimMargin()
private fun HemisphereLight.fs(index: Int, hasNormalAttribute: Boolean): String = """
|{
| float f = ${if (hasNormalAttribute) "dot(N, p_lightDirection$index) * 0.5 + 0.5" else "1"};
| vec3 irr = ${irradianceMap?.let { "texture(p_lightIrradianceMap$index, N).rgb" } ?: "vec3(1.0)"};
| f_diffuse += mix(p_lightDownColor$index.rgb, p_lightUpColor$index.rgb, f) * irr * ((1.0 - m_metalness) * m_color.rgb) * m_ambientOcclusion;
|}
""".trimMargin()
private fun SpotLight.fs(index: Int, hasNormalAttribute: Boolean): String {
val shadows = shadows
return """
|{
| vec3 Lr = p_lightPosition$index - v_worldPosition;
| float distance = length(Lr);
| float attenuation = 1.0 / (p_lightConstantAttenuation$index +
| p_lightLinearAttenuation$index * distance + p_lightQuadraticAttenuation$index * distance * distance);
| attenuation = 1.0;
| vec3 L = normalize(Lr);
| float NoL = ${if (hasNormalAttribute) "clamp(dot(N, L), 0.0, 1.0)" else "1"};
| float side = dot(L, N);
| float hit = max(dot(-L, p_lightDirection$index), 0.0);
| float falloff = clamp((hit - p_lightOuterCos$index) / (p_lightInnerCos$index - p_lightOuterCos$index), 0.0, 1.0);
| attenuation *= falloff;
| ${shadows.fs(index)}
| {
| vec3 H = normalize(V + L);
| float LoH = clamp(dot(L, H), 0.0, 1.0);
| float NoH = ${if (hasNormalAttribute) "clamp(dot(N, H), 0.0, 1.0)" else 1.0};
| f_diffuse += NoL * (0.1+0.9*attenuation) * Fd_Burley(m_roughness * m_roughness, NoV, NoL, LoH) * p_lightColor$index.rgb * m_color.rgb ;
| float Dg = D_GGX(m_roughness * m_roughness, NoH, H);
| float Vs = V_SmithGGXCorrelated(m_roughness * m_roughness, NoV, NoL);
| vec3 F = F_Schlick(m_color.rgb * (m_metalness) + 0.04 * (1.0-m_metalness), LoH);
| vec3 Fr = (Dg * Vs) * F;
| f_specular += NoL * attenuation * Fr * p_lightColor$index.rgb;
| }
}
""".trimMargin()
}
private fun Fog.fs(index: Int): String = """
|{
| float dz = min(1.0, -v_viewPosition.z/p_fogEnd$index);
| f_fog = vec4(p_fogColor$index.rgb, dz);
|}
""".trimMargin()
sealed class TextureSource
object DummySource : TextureSource() {
override fun toString(): String {
return "DummySource()"
}
}
abstract class TextureFromColorBuffer(var texture: ColorBuffer, var textureFunction: TextureFunction) : TextureSource()
class TextureFromCode(val code: String) : TextureSource() {
override fun hashCode(): Int {
return code.hashCode()
}
}
private fun TextureFromCode.fs(index: Int, target: TextureTarget) = """
|vec4 tex$index = vec4(0.0, 0.0, 0.0, 1.0);
|{
|vec4 texOut;
|$code;
|tex$index = texOut;
|}
"""
enum class TextureFunction(val function: (String, String) -> String) {
TILING({ texture, uv -> "texture($texture, $uv)" }),
NOT_TILING({ texture, uv -> "textureNoTile(p_textureNoise, $texture, x_noTileOffset, $uv)" })
;
}
/**
* @param texture the texture to sample from
* @param input input coordinates, default is "va_texCoord0.xy"
* @param textureFunction the texture function to use, default is TextureFunction.TILING
* @param pre the pre-fetch shader code to inject, can only adjust "x_texCoord"
* @param post the post-fetch shader code to inject, can only adjust "x_texture"
*/
class ModelCoordinates(texture: ColorBuffer,
var input: String = "va_texCoord0.xy",
var tangentInput: String? = null,
textureFunction: TextureFunction = TextureFunction.TILING,
var pre: String? = null,
var post: String? = null) : TextureFromColorBuffer(texture, textureFunction) {
override fun toString(): String {
return "ModelCoordinates(texture: $texture, input: $input, $tangentInput: $tangentInput, textureFunction: $textureFunction, pre: $pre, post: $post)"
}
override fun hashCode(): Int {
var result = input.hashCode()
result = 31 * result + (tangentInput?.hashCode() ?: 0)
result = 31 * result + (pre?.hashCode() ?: 0)
result = 31 * result + (post?.hashCode() ?: 0)
return result
}
}
class Triplanar(texture: ColorBuffer,
var scale: Double = 1.0,
var offset: Vector3 = Vector3.ZERO,
var sharpness: Double = 2.0,
textureFunction: TextureFunction = TextureFunction.TILING,
var pre: String? = null,
var post: String? = null) : TextureFromColorBuffer(texture, textureFunction) {
init {
texture.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
texture.wrapU = WrapMode.REPEAT
texture.wrapV = WrapMode.REPEAT
}
override fun hashCode(): Int {
var result = scale.hashCode()
result = 31 * result + offset.hashCode()
result = 31 * result + sharpness.hashCode()
result = 31 * result + (pre?.hashCode() ?: 0)
result = 31 * result + (post?.hashCode() ?: 0)
return result
}
}
private fun ModelCoordinates.fs(index: Int) = """
|vec4 tex$index = vec4(0.0, 0.0, 0.0, 1.0);
|{
| vec2 x_texCoord = $input;
| vec2 x_noTileOffset = vec2(0.0);
| vec4 x_texture;
| ${if (pre != null) "{ $pre } " else ""}
| x_texture = ${textureFunction.function("p_texture$index", "x_texCoord")};
| ${if (post != null) "{ $post } " else ""}
| ${if (tangentInput != null) {
"""
| vec3 normal = normalize(va_normal.xyz);
| vec3 tangent = normalize(${tangentInput}.xyz);
| vec3 bitangent = cross(normal, tangent) * ${tangentInput}.w;
| mat3 tbn = mat3(tangent, bitangent, normal);
| x_texture.rgb = tbn * normalize( (x_texture.rgb - vec3(0.5, 0.5, 0.0))*vec3(2.0, 2.0, 1.0)) ;
""".trimMargin()
} else ""}
| tex$index = x_texture;
|}
""".trimMargin()
private fun Triplanar.fs(index: Int, target: TextureTarget) = """
|vec4 tex$index = vec4(0.0, 0.0, 0.0, 1.0);
|{
| vec3 x_normal = va_normal;
| vec3 x_position = va_position;
| float x_scale = p_textureTriplanarScale$index;
| vec3 x_offset = p_textureTriplanarOffset$index;
| vec2 x_noTileOffset = vec2(0.0);
| ${if (pre != null) "{ $pre } " else ""}
| vec3 n = normalize(x_normal);
| vec3 an = abs(n);
| vec2 uvY = x_position.xz * x_scale + x_offset.x;
| vec2 uvX = x_position.zy * x_scale + x_offset.y;
| vec2 uvZ = x_position.xy * x_scale + x_offset.z;
| vec4 tY = ${textureFunction.function("p_texture$index", "uvY")};
| vec4 tX = ${textureFunction.function("p_texture$index", "uvX")};
| vec4 tZ = ${textureFunction.function("p_texture$index", "uvZ")};
| vec3 weights = pow(an, vec3(p_textureTriplanarSharpness$index));
| weights = weights / (weights.x + weights.y + weights.z);
| tex$index = tX * weights.x + tY * weights.y + weights.z * tZ;
| ${if (target == TextureTarget.NORMAL) """
| vec3 tnX = normalize( tX.xyz - vec3(0.5, 0.5, 0.0));
| vec3 tnY = normalize( tY.xyz - vec3(0.5, 0.5, 0.0)) * vec3(1.0, -1.0, 1.0);
| vec3 tnZ = normalize( tZ.xyz - vec3(0.5, 0.5, 0.0));
| vec3 nX = vec3(0.0, tnX.yx);
| vec3 nY = vec3(tnY.x, 0.0, tnY.y);
| vec3 nZ = vec3(tnZ.xy, 0.0);
| vec3 normal = normalize(nX * weights.x + nY * weights.y + nZ * weights.z + n);
| tex$index = vec4(normal, 0.0);
""".trimMargin() else ""}
|}
${if (post != null) """
vec4 x_texture = tex$index;
{
$post
}
tex$index = x_texture;
""".trimIndent() else ""}
""".trimMargin()
sealed class TextureTarget(val name: String) {
object NONE : TextureTarget("NONE")
object COLOR : TextureTarget("COLOR")
object ROUGHNESS : TextureTarget("ROUGHNESS")
object METALNESS : TextureTarget("METALNESS")
object METALNESS_ROUGHNESS : TextureTarget("METALNESS_ROUGHNESS")
object EMISSION : TextureTarget("EMISSION")
object NORMAL : TextureTarget("NORMAL")
object AMBIENT_OCCLUSION : TextureTarget("AMBIENT_OCCLUSION")
class Height(var scale: Double = 1.0) : TextureTarget("Height")
override fun toString(): String {
return "TextureTarget(name: $name)"
}
override fun hashCode(): Int {
return name.hashCode()
}
}
class Texture(var source: TextureSource,
var target: TextureTarget) {
fun copy(): Texture {
val copied = Texture(source, target)
return copied
}
override fun toString(): String {
return "Texture(source: $source, target: $target)"
}
override fun hashCode(): Int {
var result = source.hashCode()
result = 31 * result + target.hashCode()
return result
}
}
private var fragmentIDCounter = 1
data class SubsurfaceScatter(var enabled: Boolean) {
var color: ColorRGBa = ColorRGBa.WHITE
var shape = 1.0
fun fs(): String {
return if (enabled) """
f_diffuse.rgb += pow(smoothstep(1.0, 0.0, abs(dot(normalize(N),normalize(V)))), p_sssShape) * clamp(evaluateSH(-V, sh), vec3(0.0), vec3(1.0)) * p_sssColor.rgb;
""" else ""
}
fun applyToShadeStyle(shadeStyle: ShadeStyle) {
if (enabled) {
shadeStyle.parameter("sssColor", color)
shadeStyle.parameter("sssShape", shape)
}
}
}
data class CubemapReflection(var cubemap: Cubemap? = null) {
var color: ColorRGBa = ColorRGBa.WHITE
fun fs(): String {
return if (cubemap != null) {
"""
vec2 dfg = PrefilteredDFG_Karis(m_roughness, NoV);
vec3 sc = m_metalness * m_color.rgb + (1.0-m_metalness) * vec3(0.04);
f_specular.rgb += sc * (texture(p_radianceMap, reflect(-V, normalize(f_worldNormal)), m_roughness*7.0 ).rgb * dfg.x + dfg.y) * p_radianceColor.rgb;
"""
} else { "" }
}
fun applyToShadeStyle(shadeStyle: ShadeStyle) {
if (cubemap != null) {
shadeStyle.parameter("radianceMap", cubemap!!)
shadeStyle.parameter("radianceColor", color)
}
}
}
class PBRMaterial : Material {
override var name: String? = null
override fun toString(): String {
return "PBRMaterial(name: $name, fragmentID: $fragmentID, doubleSided: $doubleSided, textures: $textures, color: $color, metalness: $metalness, roughness: $roughness, emissive: $emission))"
}
override var fragmentID = fragmentIDCounter.apply {
fragmentIDCounter++
}
override var doubleSided: Boolean = false
override var transparent: Boolean = false
var environmentMap = false
var color = ColorRGBa.WHITE
var metalness = 0.5
var roughness = 1.0
var emission = ColorRGBa.BLACK
var subsurfaceScatter = SubsurfaceScatter(false)
var cubemapReflection = CubemapReflection(null)
var fragmentPreamble: String? = null
var vertexPreamble: String? = null
var vertexTransform: String? = null
var parameters = mutableMapOf<String, Any>()
var textures = mutableListOf<Texture>()
val shadeStyles = mutableMapOf<ContextKey, ShadeStyle>()
override fun generateShadeStyle(materialContext: MaterialContext, primitiveContext: PrimitiveContext): ShadeStyle {
val cached = shadeStyles.getOrPut(ContextKey(materialContext, primitiveContext)) {
val needLight = needLight(materialContext)
val preambleFS = """
vec4 m_color = p_color;
uint f_fragmentID = uint(p_fragmentID);
float m_f0 = 0.5;
float m_roughness = p_roughness;
float m_metalness = p_metalness;
float m_ambientOcclusion = 1.0;
vec3 m_emission = p_emission.rgb;
vec3 m_normal = vec3(0.0, 0.0, 1.0);
vec4 f_fog = vec4(0.0, 0.0, 0.0, 0.0);
vec3 f_worldNormal = v_worldNormal;
vec3 f_emission = m_emission;
""".trimIndent()
val textureFs = if (needLight) {
(textures.mapIndexed { index, it ->
when (val source = it.source) {
DummySource -> "vec4 tex$index = vec4(1.0);"
is ModelCoordinates -> source.fs(index)
is Triplanar -> source.fs(index, it.target)
is TextureFromCode -> source.fs(index, it.target)
else -> TODO()
}
} + textures.mapIndexed { index, texture ->
when (texture.target) {
TextureTarget.NONE -> ""
TextureTarget.COLOR -> "m_color.rgb *= pow(tex$index.rgb, vec3(2.2)); m_color.a *= tex$index.a;"
TextureTarget.METALNESS -> "m_metalness = tex$index.r;"
TextureTarget.ROUGHNESS -> "m_roughness = tex$index.r;"
TextureTarget.METALNESS_ROUGHNESS -> "m_metalness = tex$index.r; m_roughness = tex$index.g;"
TextureTarget.EMISSION -> "m_emission *= tex$index.rgb;"
TextureTarget.NORMAL -> "f_worldNormal = normalize((v_modelNormalMatrix * vec4(tex$index.xyz,0.0)).xyz);"
TextureTarget.AMBIENT_OCCLUSION -> "m_ambientOcclusion *= tex$index.r;"
is TextureTarget.Height -> ""
}
}).joinToString("\n")
} else ""
val displacers = textures.filter { it.target is TextureTarget.Height }
val skinVS = if (primitiveContext.hasSkinning) """
uvec4 j = a_joints;
mat4 skinTransform = p_jointTransforms[j.x] * a_weights.x
+ p_jointTransforms[j.y] * a_weights.y
+ p_jointTransforms[j.z] * a_weights.z
+ p_jointTransforms[j.w] * a_weights.w;
${if (primitiveContext.hasNormalAttribute) """
x_normal = normalize(mat3(skinTransform) * x_normal);
""".trimIndent() else ""}
x_position = (skinTransform * vec4(x_position,1)).xyz;
""".trimIndent() else ""
val textureVS = if (displacers.isNotEmpty()) textures.mapIndexed { index, it ->
if (it.target is TextureTarget.Height) {
when (val source = it.source) {
DummySource -> "vec4 tex$index = vec4(1.0);"
is ModelCoordinates -> source.fs(index)
is Triplanar -> source.fs(index, it.target)
is TextureFromCode -> source.fs(index, it.target)
else -> TODO()
} + """
x_position += x_normal * tex$index.r * p_textureHeightScale$index;
""".trimIndent()
} else ""
}.joinToString("\n") else ""
val lights = materialContext.lights
val doubleSidedFS = if (doubleSided) {
"""
if (dot(V, N) <0) {
N *= -1.0;
}
""".trimIndent()
} else ""
val lightFS = if (needLight) """
vec3 f_diffuse = vec3(0.0);
vec3 f_specular = vec3(0.0);
vec3 f_ambient = vec3(0.0);
float f_occlusion = 1.0;
vec3 N = normalize(f_worldNormal);
vec3 ep = (p_viewMatrixInverse * vec4(0.0, 0.0, 0.0, 1.0)).xyz;
vec3 Vr = ep - v_worldPosition;
vec3 V = normalize(Vr);
float NoV = ${if (primitiveContext.hasNormalAttribute) "abs(dot(N, V)) + 1e-5" else "1"};
${if (environmentMap && materialContext.meshCubemaps.isNotEmpty() && primitiveContext.hasNormalAttribute) """
{
vec2 dfg = PrefilteredDFG_Karis(m_roughness, NoV);
vec3 sc = m_metalness * m_color.rgb + (1.0-m_metalness) * vec3(0.04);
f_specular.rgb += sc * (texture(p_environmentMap, reflect(-V, normalize(f_worldNormal))).rgb * dfg.x + dfg.y) * m_ambientOcclusion;
}
""".trimIndent() else ""}
${lights.mapIndexed { index, (node, light) ->
when (light) {
is AmbientLight -> light.fs(index)
is PointLight -> light.fs(index, primitiveContext.hasNormalAttribute)
is SpotLight -> light.fs(index, primitiveContext.hasNormalAttribute)
is DirectionalLight -> light.fs(index, primitiveContext.hasNormalAttribute)
is HemisphereLight -> light.fs(index, primitiveContext.hasNormalAttribute)
else -> TODO()
}
}.joinToString("\n")}
${if (materialContext.irradianceSH?.shMap != null) """
vec3[9] sh;
gatherSH(p_shMap, v_worldPosition, sh);
vec3 irradiance = clamp(evaluateSH(normalize(N), sh), vec3(0.0), vec3(1.0)) * m_color.rgb;
vec3 ks = F_SchlickRoughness(m_color.rgb * (m_metalness) + 0.04 * (1.0-m_metalness), m_roughness+0.1, min(NoV, 1.0-1.0e-6));
f_diffuse.rgb = irradiance * ks;
f_ambient.rgb = (1.0-ks) * irradiance;
${subsurfaceScatter.fs()}
${cubemapReflection.fs()}
""".trimIndent() else ""
}
${materialContext.fogs.mapIndexed { index, (node, fog) ->
fog.fs(index)
}.joinToString("\n")}
""".trimIndent() else ""
val rt = RenderTarget.active
val combinerFS = materialContext.pass.combiners.map {
it.generateShader()
}.joinToString("\n")
val fs = preambleFS + textureFs + lightFS + combinerFS
val vs = (this@PBRMaterial.vertexTransform ?: "") + textureVS + skinVS
shadeStyle {
fragmentPreamble = this@PBRMaterial.fragmentPreamble ?: ""
vertexPreamble = """
$shaderNoRepetitionVert
${(this@PBRMaterial.vertexPreamble) ?: ""}
""".trimIndent()
fragmentPreamble += """
${if (materialContext.irradianceSH?.shMap != null) {
"""
$glslEvaluateSH
$glslFetchSH
${genGlslGatherSH(materialContext.irradianceSH!!.xCount, materialContext.irradianceSH!!.yCount,
materialContext.irradianceSH!!.zCount, materialContext.irradianceSH!!.spacing, materialContext.irradianceSH!!.offset)}
"""
} else {
""
}
}
|$shaderLinePlaneIntersect
|$shaderProjectOnPlane
|$shaderSideOfPlane
|$shaderGGX
|$shaderVSM
|$shaderNoRepetition
|$phraseTbnMatrix
""".trimMargin()
this.suppressDefaultOutput = true
this.vertexTransform = vs
fragmentTransform = fs
materialContext.pass.combiners.map {
if (rt is ProgramRenderTarget || materialContext.pass === DefaultPass || materialContext.pass === DefaultOpaquePass || materialContext.pass == DefaultTransparentPass || materialContext.pass == IrradianceProbePass || materialContext.pass.skipTarget ) {
this.output(it.targetOutput, ShadeStyleOutput(0))
} else {
val index = rt.colorAttachmentIndexByName(it.targetOutput)?:error("attachment ${it.targetOutput} not found")
val type = rt.colorBuffer(index).type
val format = rt.colorBuffer(index).format
this.output(it.targetOutput, ShadeStyleOutput(index, format, type))
}
}
}
}
return cached
}
private fun needLight(context: MaterialContext): Boolean {
val needSpecular = context.pass.combiners.any { FacetType.SPECULAR in it.facets }
val needDiffuse = context.pass.combiners.any { FacetType.DIFFUSE in it.facets }
val needLight = needSpecular || needDiffuse
return needLight
}
override fun applyToShadeStyle(context: MaterialContext, shadeStyle: ShadeStyle) {
shadeStyle.parameter("emission", emission)
shadeStyle.parameter("color", color)
shadeStyle.parameter("metalness", metalness)
shadeStyle.parameter("roughness", roughness)
shadeStyle.parameter("fragmentID", fragmentID)
if (context.irradianceProbeCount > 0) {
shadeStyle.parameter("shMap", context.irradianceSH?.shMap!!)
}
parameters.forEach { (k, v) ->
when (v) {
is Double -> shadeStyle.parameter(k, v)
is Int -> shadeStyle.parameter(k, v)
is Vector2 -> shadeStyle.parameter(k, v)
is Vector3 -> shadeStyle.parameter(k, v)
is Vector4 -> shadeStyle.parameter(k, v)
is BufferTexture -> shadeStyle.parameter(k, v)
is ColorBuffer -> shadeStyle.parameter(k, v)
else -> TODO("support ${v::class.java}")
}
}
if (needLight(context)) {
subsurfaceScatter.applyToShadeStyle(shadeStyle)
cubemapReflection.applyToShadeStyle(shadeStyle)
textures.forEachIndexed { index, texture ->
when (val source = texture.source) {
is TextureFromColorBuffer -> {
shadeStyle.parameter("texture$index", source.texture)
if (source.textureFunction == TextureFunction.NOT_TILING) {
shadeStyle.parameter("textureNoise", noise128)
}
}
}
when (val source = texture.source) {
is Triplanar -> {
shadeStyle.parameter("textureTriplanarSharpness$index", source.sharpness)
shadeStyle.parameter("textureTriplanarScale$index", source.scale)
shadeStyle.parameter("textureTriplanarOffset$index", source.offset)
}
}
if (texture.target is TextureTarget.Height) {
val target = texture.target as TextureTarget.Height
shadeStyle.parameter("textureHeightScale$index", target.scale)
}
}
val lights = context.lights
lights.forEachIndexed { index, (node, light) ->
shadeStyle.parameter("lightColor$index", light.color)
when (light) {
is AmbientLight -> {
}
is PointLight -> {
shadeStyle.parameter("lightPosition$index", (node.worldTransform * Vector4.UNIT_W).xyz)
shadeStyle.parameter("lightConstantAttenuation$index", light.constantAttenuation)
shadeStyle.parameter("lightLinearAttenuation$index", light.linearAttenuation)
shadeStyle.parameter("lightQuadraticAttenuation$index", light.quadraticAttenuation)
}
is SpotLight -> {
shadeStyle.parameter("lightPosition$index", (node.worldTransform * Vector4.UNIT_W).xyz)
shadeStyle.parameter("lightDirection$index", ((normalMatrix(node.worldTransform)) * light.direction.xyz0).normalized.xyz)
shadeStyle.parameter("lightConstantAttenuation$index", light.constantAttenuation)
shadeStyle.parameter("lightLinearAttenuation$index", light.linearAttenuation)
shadeStyle.parameter("lightQuadraticAttenuation$index", light.quadraticAttenuation)
shadeStyle.parameter("lightInnerCos$index", cos(Math.toRadians(light.innerAngle)))
shadeStyle.parameter("lightOuterCos$index", cos(Math.toRadians(light.outerAngle)))
if (light.shadows is Shadows.MappedShadows) {
context.shadowMaps[light]?.let {
val look = light.view(node)
shadeStyle.parameter("lightTransform$index",
light.projection(it) * look)
if (light.shadows is Shadows.DepthMappedShadows) {
shadeStyle.parameter("lightShadowMap$index", it.depthBuffer ?: TODO())
}
if (light.shadows is Shadows.ColorMappedShadows) {
shadeStyle.parameter("lightShadowMap$index", it.colorBuffer(0))
}
}
}
}
is DirectionalLight -> {
shadeStyle.parameter("lightPosition$index", (node.worldTransform * Vector4.UNIT_W).xyz)
shadeStyle.parameter("lightDirection$index", ((normalMatrix(node.worldTransform)) * light.direction.xyz0).normalized.xyz)
if (light.shadows is Shadows.MappedShadows) {
context.shadowMaps[light]?.let {
val look = light.view(node)
shadeStyle.parameter("lightTransform$index",
light.projection(it) * look)
if (light.shadows is Shadows.DepthMappedShadows) {
shadeStyle.parameter("lightShadowMap$index", it.depthBuffer ?: TODO())
}
if (light.shadows is Shadows.ColorMappedShadows) {
shadeStyle.parameter("lightShadowMap$index", it.colorBuffer(0))
}
}
}
}
is HemisphereLight -> {
shadeStyle.parameter("lightDirection$index", ((normalMatrix(node.worldTransform)) * light.direction.xyz0).normalized.xyz)
shadeStyle.parameter("lightUpColor$index", light.upColor)
shadeStyle.parameter("lightDownColor$index", light.downColor)
light.irradianceMap?.let {
shadeStyle.parameter("lightIrradianceMap$index", it)
}
}
}
}
context.fogs.forEachIndexed { index, (node, fog) ->
shadeStyle.parameter("fogColor$index", fog.color)
shadeStyle.parameter("fogEnd$index", fog.end)
}
} else {
textures.forEachIndexed { index, texture ->
if (texture.target is TextureTarget.Height) {
when (val source = texture.source) {
is TextureFromColorBuffer -> shadeStyle.parameter("texture$index", source.texture)
}
when (val source = texture.source) {
is Triplanar -> {
shadeStyle.parameter("textureTriplanarSharpness$index", source.sharpness)
shadeStyle.parameter("textureTriplanarScale$index", source.scale)
shadeStyle.parameter("textureTriplanarOffset$index", source.offset)
}
}
val target = texture.target as TextureTarget.Height
shadeStyle.parameter("textureHeightScale$index", target.scale)
}
}
}
}
override fun hashCode(): Int {
var result = fragmentID.hashCode()
result = 31 * doubleSided.hashCode()
result = 31 * result + transparent.hashCode()
// result = 31 * result + environmentMap.hashCode()
result = 31 * result + color.hashCode()
result = 31 * result + metalness.hashCode()
result = 31 * result + roughness.hashCode()
result = 31 * result + emission.hashCode()
result = 31 * result + (fragmentPreamble?.hashCode() ?: 0)
result = 31 * result + (vertexPreamble?.hashCode() ?: 0)
result = 31 * result + (vertexTransform?.hashCode() ?: 0)
// result = 31 * result + parameters.hashCode()
// result = 31 * result + textures.hashCode()
// result = 31 * result + shadeStyles.hashCode()
return result
}
}

View File

@@ -0,0 +1,61 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.*
import org.openrndr.math.Matrix44
data class PostContext(val lightContext: LightContext, val inverseViewMatrix: Matrix44)
interface PostStep {
fun apply(buffers: MutableMap<String, ColorBuffer>, postContext: PostContext)
}
class FilterPostStep<T:Filter>(val outputScale: Double,
val filter: T,
val inputs: List<String>,
val output: String,
val outputFormat: ColorFormat,
val outputType: ColorType,
val update: (T.(PostContext) -> Unit)? = null) : PostStep {
override fun apply(buffers: MutableMap<String, ColorBuffer>, postContext: PostContext) {
val inputBuffers = inputs.map { buffers[it]?: error("buffer not found: $it") }
val outputBuffer = buffers.getOrPut(output) {
colorBuffer((inputBuffers[0].width * outputScale).toInt(),
(inputBuffers[0].height * outputScale).toInt(),
format = outputFormat,
type = outputType)
}
update?.invoke(filter, postContext)
filter.apply(inputBuffers.toTypedArray(), outputBuffer)
}
}
class FunctionPostStep(val function:(MutableMap<String, ColorBuffer>)->Unit) : PostStep {
override fun apply(buffers: MutableMap<String, ColorBuffer>, postContext: PostContext) {
function(buffers)
}
}
class FilterPostStepBuilder<T : Filter>(val filter: T) {
var outputScale = 1.0
val inputs = mutableListOf<String>()
var output = "untitled"
var outputFormat = ColorFormat.RGBa
var outputType = ColorType.UINT8
var update: (T.(PostContext) -> Unit)? = null
internal fun build(): PostStep {
@Suppress("UNCHECKED_CAST", "PackageDirectoryMismatch")
return FilterPostStep(outputScale, filter, inputs, output, outputFormat, outputType, update as (Filter.(PostContext) -> Unit)?)
}
}
fun <T : Filter> postStep(filter: T, configure: FilterPostStepBuilder<T>.() -> Unit) : PostStep {
val psb = FilterPostStepBuilder(filter)
psb.configure()
return psb.build()
}
fun postStep(function: (MutableMap<String, ColorBuffer>)->Unit) : PostStep {
return FunctionPostStep(function)
}

View File

@@ -0,0 +1,35 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.BufferMultisample
import org.openrndr.draw.DepthFormat
import org.openrndr.draw.RenderTarget
import org.openrndr.draw.renderTarget
data class RenderPass(val combiners: List<FacetCombiner>,
val renderOpaque: Boolean = true,
val renderTransparent: Boolean = false,
val depthWrite: Boolean = true,
val multisample: BufferMultisample = BufferMultisample.Disabled,
val skipTarget: Boolean = false
)
val DefaultPass = RenderPass(listOf(LDRColorFacet()))
val IrradianceProbePass = RenderPass(listOf(DiffuseIrradianceFacet()))
val DefaultOpaquePass = RenderPass(listOf(LDRColorFacet()), renderOpaque = true, renderTransparent = false)
val DefaultTransparentPass = RenderPass(listOf(LDRColorFacet()), renderOpaque = false, renderTransparent = true, depthWrite = false)
val LightPass = RenderPass(emptyList())
val VSMLightPass = RenderPass(listOf(MomentsFacet()))
fun RenderPass.createPassTarget(width: Int, height: Int, depthFormat: DepthFormat = DepthFormat.DEPTH24, multisample: BufferMultisample = this.multisample): RenderTarget {
return renderTarget(width, height, multisample = multisample) {
for (combiner in combiners) {
when (combiner) {
is ColorBufferFacetCombiner ->
colorBuffer(combiner.targetOutput, combiner.format, combiner.type)
}
}
depthBuffer(depthFormat)
}
}

View File

@@ -0,0 +1,95 @@
package org.openrndr.extra.dnk3
import org.openrndr.Dispatcher
import org.openrndr.math.Matrix44
import org.openrndr.math.Vector3
import org.openrndr.math.Vector4
import java.util.*
class Scene(val root: SceneNode = SceneNode(), val dispatcher: Dispatcher = Dispatcher()) {
val features = mutableListOf<Feature>()
override fun hashCode(): Int {
var result = root.hashCode()
result = result * 31 + features.hashCode()
return result
}
fun hash(): String = Base64.getEncoder().encodeToString(hashCode().toString().toByteArray())
}
open class SceneNode {
var name: String = ""
var entities: MutableList<Entity> = mutableListOf()
var parent: SceneNode? = null
open var transform = Matrix44.IDENTITY
var worldTransform = Matrix44.IDENTITY
val children = mutableListOf<SceneNode>()
var disposed = false
override fun hashCode(): Int {
var result = name.hashCode()
result = 31 * result + entities.hashCode()
// result = 31 * result + (parent?.hashCode() ?: 0)
result = 31 * result + transform.hashCode()
result = 31 * result + worldTransform.hashCode()
result = 31 * result + children.hashCode()
result = 31 * result + disposed.hashCode()
return result
}
}
val SceneNode.worldPosition: Vector3
get() {
return (worldTransform * Vector4.UNIT_W).xyz
}
class NodeContent<T>(val node: SceneNode, val content: T) {
operator fun component1() = node
operator fun component2() = content
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as NodeContent<*>
if (node != other.node) return false
if (content != other.content) return false
return true
}
override fun hashCode(): Int {
var result = node.hashCode()
result = 31 * result + content.hashCode()
return result
}
}
fun SceneNode.visit(visitor: SceneNode.() -> Unit) {
visitor()
children.forEach { it.visit(visitor) }
}
fun <P> SceneNode.scan(initial: P, scanner: SceneNode.(P) -> P) {
val p = scanner(initial)
children.forEach { it.scan(p, scanner) }
}
fun SceneNode.findNodes(selector: SceneNode.() -> Boolean): List<SceneNode> {
val result = mutableListOf<SceneNode>()
visit {
if (selector()) result.add(this)
}
return result
}
fun <P : Entity> SceneNode.findContent(selector: Entity.() -> P?): List<NodeContent<P>> {
val result = mutableListOf<NodeContent<P>>()
visit {
entities.forEach {
val s = it.selector()
if (s != null) {
result.add(NodeContent(this, s))
}
}
}
return result
}

View File

@@ -0,0 +1,342 @@
package org.openrndr.extra.dnk3
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.features.IrradianceSH
import org.openrndr.extra.fx.blur.ApproximateGaussianBlur
import org.openrndr.math.Matrix44
import org.openrndr.math.Vector3
import java.nio.ByteBuffer
class RenderContext(
val lights: List<NodeContent<Light>>,
val meshes: List<NodeContent<Mesh>>,
val skinnedMeshes: List<NodeContent<SkinnedMesh>>,
val instancedMeshes: List<NodeContent<InstancedMesh>>,
val pathMeshes: List<NodeContent<PathMesh>>,
val fogs: List<NodeContent<Fog>>
)
class SceneRenderer {
class Configuration {
var multisampleLines = false
}
val configuration = Configuration()
val blur = ApproximateGaussianBlur()
var shadowLightTargets = mutableMapOf<ShadowLight, RenderTarget>()
var meshCubemaps = mutableMapOf<Mesh, Cubemap>()
var outputPasses = mutableListOf(DefaultOpaquePass, DefaultTransparentPass)
var outputPassTarget: RenderTarget? = null
var outputPassTargetMS: RenderTarget? = null
val postSteps = mutableListOf<PostStep>()
val buffers = mutableMapOf<String, ColorBuffer>()
var drawFinalBuffer = true
var first = true
fun draw(drawer: Drawer, scene: Scene) {
drawer.pushStyle()
drawer.depthWrite = true
drawer.depthTestPass = DepthTestPass.LESS_OR_EQUAL
drawer.cullTestPass = CullTestPass.FRONT
scene.dispatcher.execute()
// update all the transforms
scene.root.scan(Matrix44.IDENTITY) { p ->
if (p !== Matrix44.IDENTITY) {
worldTransform = p * transform
} else {
worldTransform = transform
}
worldTransform
}
val context = RenderContext(
lights = scene.root.findContent { this as? Light },
meshes = scene.root.findContent { this as? Mesh },
skinnedMeshes = scene.root.findContent { this as? SkinnedMesh },
fogs = scene.root.findContent { this as? Fog },
instancedMeshes = scene.root.findContent { this as? InstancedMesh },
pathMeshes = scene.root.findContent { this as? PathMesh}
)
// shadow passes
run {
context.lights.filter { it.content is ShadowLight && (it.content as ShadowLight).shadows is Shadows.MappedShadows }.forEach {
val shadowLight = it.content as ShadowLight
val pass: RenderPass
pass = when (shadowLight.shadows) {
is Shadows.PCF, is Shadows.Simple -> {
LightPass
}
is Shadows.VSM -> {
VSMLightPass
}
else -> TODO()
}
val target = shadowLightTargets.getOrPut(shadowLight) {
val mapSize = (shadowLight.shadows as Shadows.MappedShadows).mapSize
pass.createPassTarget(mapSize, mapSize, DepthFormat.DEPTH16)
}
target.clearDepth(depth = 1.0)
val look = shadowLight.view(it.node)
val materialContext = MaterialContext(pass, context.lights, context.fogs, shadowLightTargets, emptyMap(), 0)
drawer.isolatedWithTarget(target) {
drawer.projection = shadowLight.projection(target)
drawer.view = look
drawer.model = Matrix44.IDENTITY
drawer.clear(ColorRGBa.BLACK)
drawer.cullTestPass = CullTestPass.FRONT
drawPass(drawer, pass, materialContext, context)
}
when (shadowLight.shadows) {
is Shadows.VSM -> {
blur.gain = 1.0
blur.sigma = 3.0
blur.window = 9
blur.spread = 1.0
blur.apply(target.colorBuffer(0), target.colorBuffer(0))
}
}
}
}
// -- feature passes
for (feature in scene.features) {
feature.update(drawer, this, scene, feature, context)
}
// -- output passes
run {
val irradianceSH = scene.features.find { it is IrradianceSH } as? IrradianceSH
for (pass in outputPasses) {
val materialContext = MaterialContext(pass, context.lights, context.fogs, shadowLightTargets, meshCubemaps, irradianceSH?.probeCount
?: 0)
materialContext.irradianceSH = irradianceSH
val defaultPasses = setOf(DefaultTransparentPass, DefaultOpaquePass)
if ((pass !in defaultPasses || postSteps.isNotEmpty()) && outputPassTarget == null) {
outputPassTarget = pass.createPassTarget(RenderTarget.active.width, RenderTarget.active.height)
}
if (pass == outputPasses[0]) {
outputPassTarget?.let {
drawer.withTarget(it) {
clear(ColorRGBa.TRANSPARENT)
}
}
}
outputPassTarget?.let { target ->
pass.combiners.forEach {
if (it is ColorBufferFacetCombiner) {
val index = target.colorAttachmentIndexByName(it.targetOutput)
?: error("attachment not found ${it.targetOutput}")
target.blendMode(index, it.blendMode)
}
}
}
outputPassTarget?.bind()
drawPass(drawer, pass, materialContext, context)
outputPassTarget?.unbind()
outputPassTarget?.let { output ->
for (combiner in pass.combiners) {
buffers[combiner.targetOutput] = (output.colorAttachmentByName(combiner.targetOutput) as? ColorBufferAttachment)?.colorBuffer
?: error("attachment not found ${combiner.targetOutput}")
}
}
}
val lightContext = LightContext(context.lights, shadowLightTargets)
val postContext = PostContext(lightContext, drawer.view.inversed)
for (postStep in postSteps) {
postStep.apply(buffers, postContext)
}
}
drawer.popStyle()
if (drawFinalBuffer) {
outputPassTarget?.let { output ->
drawer.isolated {
drawer.defaults()
drawer.ortho()
val outputName = (postSteps.lastOrNull() as? FilterPostStep<*>)?.output ?: "color"
val outputBuffer = buffers[outputName]
?: throw IllegalArgumentException("can't find $outputName buffer")
drawer.image(outputBuffer)
}
}
}
}
internal fun drawPass(drawer: Drawer, pass: RenderPass, materialContext: MaterialContext,
context: RenderContext, shadeStyleTransformer: ((ShadeStyle)->Unit)? = null
) {
drawer.depthWrite = pass.depthWrite
val primitives = context.meshes.flatMap { mesh ->
mesh.content.primitives.map { primitive ->
NodeContent(mesh.node, primitive)
}
}
// -- draw all meshes
primitives
.filter { (it.content.material.transparent && pass.renderTransparent) || (!it.content.material.transparent && pass.renderOpaque) }
.forEach {
val primitive = it.content
drawer.isolated {
if (primitive.material.doubleSided) {
drawer.drawStyle.cullTestPass = CullTestPass.ALWAYS
}
val hasNormalAttribute = primitive.geometry.vertexBuffers.any { it.vertexFormat.hasAttribute("normal") }
val primitiveContext = PrimitiveContext(hasNormalAttribute, false)
val shadeStyle = primitive.material.generateShadeStyle(materialContext, primitiveContext)
shadeStyle.parameter("viewMatrixInverse", drawer.view.inversed)
primitive.material.applyToShadeStyle(materialContext, shadeStyle)
shadeStyleTransformer?.invoke(shadeStyle)
drawer.shadeStyle = shadeStyle
drawer.model = it.node.worldTransform
if (primitive.geometry.indexBuffer == null) {
drawer.vertexBuffer(primitive.geometry.vertexBuffers,
primitive.geometry.primitive,
primitive.geometry.offset,
primitive.geometry.vertexCount)
} else {
drawer.vertexBuffer(primitive.geometry.indexBuffer!!,
primitive.geometry.vertexBuffers,
primitive.geometry.primitive,
primitive.geometry.offset,
primitive.geometry.vertexCount)
}
}
}
val skinnedPrimitives = context.skinnedMeshes.flatMap { mesh ->
mesh.content.primitives.map { primitive ->
NodeContent(mesh.node, Pair(primitive, mesh))
}
}
skinnedPrimitives
.filter {
(it.content.first.material.transparent && pass.renderTransparent) ||
(!it.content.first.material.transparent && pass.renderOpaque)
}
.forEach {
val primitive = it.content.first
val skinnedMesh = it.content.second.content
drawer.isolated {
if (primitive.material.doubleSided) {
drawer.drawStyle.cullTestPass = CullTestPass.ALWAYS
}
val hasNormalAttribute = primitive.geometry.vertexBuffers.any { it.vertexFormat.hasAttribute("normal") }
val primitiveContext = PrimitiveContext(hasNormalAttribute, true)
val nodeInverse = it.node.worldTransform.inversed
val jointTransforms = (skinnedMesh.joints zip skinnedMesh.inverseBindMatrices)
.map { (nodeInverse * it.first.worldTransform * it.second) }
val shadeStyle = primitive.material.generateShadeStyle(materialContext, primitiveContext)
shadeStyle.parameter("jointTransforms", jointTransforms.toTypedArray())
shadeStyle.parameter("viewMatrixInverse", drawer.view.inversed)
primitive.material.applyToShadeStyle(materialContext, shadeStyle)
drawer.shadeStyle = shadeStyle
drawer.model = it.node.worldTransform
if (primitive.geometry.indexBuffer == null) {
drawer.vertexBuffer(primitive.geometry.vertexBuffers,
primitive.geometry.primitive,
primitive.geometry.offset,
primitive.geometry.vertexCount)
} else {
drawer.vertexBuffer(primitive.geometry.indexBuffer!!,
primitive.geometry.vertexBuffers,
primitive.geometry.primitive,
primitive.geometry.offset,
primitive.geometry.vertexCount)
}
}
}
val instancedPrimitives = context.instancedMeshes.flatMap { mesh ->
mesh.content.primitives.map { primitive ->
NodeContent(mesh.node, MeshPrimitiveInstance(primitive, mesh.content.instances, mesh.content.attributes))
}
}
// -- draw all instanced meshes
instancedPrimitives
.filter { (it.content.primitive.material.transparent && pass.renderTransparent) || (!it.content.primitive.material.transparent && pass.renderOpaque) }
.forEach {
val primitive = it.content
drawer.isolated {
val primitiveContext = PrimitiveContext(true, false)
val shadeStyle = primitive.primitive.material.generateShadeStyle(materialContext, primitiveContext)
shadeStyle.parameter("viewMatrixInverse", drawer.view.inversed)
primitive.primitive.material.applyToShadeStyle(materialContext, shadeStyle)
if (primitive.primitive.material.doubleSided) {
drawer.drawStyle.cullTestPass = CullTestPass.ALWAYS
}
drawer.shadeStyle = shadeStyle
drawer.model = it.node.worldTransform
drawer.vertexBufferInstances(primitive.primitive.geometry.vertexBuffers,
primitive.attributes,
DrawPrimitive.TRIANGLES,
primitive.instances,
primitive.primitive.geometry.offset,
primitive.primitive.geometry.vertexCount)
}
}
context.pathMeshes.filter { (it.content.material.transparent && pass.renderTransparent) || (!it.content.material.transparent && pass.renderOpaque) }
.forEach {
drawer.isolated {
val primitiveContext = PrimitiveContext(true, false)
val shadeStyle = it.content.material.generateShadeStyle(materialContext, primitiveContext)
shadeStyle.parameter("viewMatrixInverse", drawer.view.inversed)
it.content.material.applyToShadeStyle(materialContext, shadeStyle)
drawer.drawStyle.cullTestPass = CullTestPass.ALWAYS
drawer.shadeStyle = shadeStyle
drawer.model = it.node.worldTransform
drawer.strokeWeight = it.content.weight
for (path in it.content.paths) {
drawer.path(path.sampleLinear(0.0005))
}
}
}
drawer.depthWrite = true
}
}
fun sceneRenderer(builder: SceneRenderer.() -> Unit): SceneRenderer {
val sceneRenderer = SceneRenderer()
sceneRenderer.builder()
return sceneRenderer
}
internal fun ByteBuffer.putVector3(v: Vector3) {
putFloat(v.x.toFloat())
putFloat(v.y.toFloat())
putFloat(v.z.toFloat())
}

View File

@@ -0,0 +1,237 @@
package org.openrndr.extra.dnk3
val shaderNoRepetition = """
// -- shaderNoRepetition
float sum( vec3 v ) { return v.x+v.y+v.z; }
// based on https://www.shadertoy.com/view/Xtl3zf
vec4 textureNoTile(in sampler2D noiseTex, in sampler2D tex, in vec2 noiseOffset, in vec2 x)
{
float v = 1.0;
float k = texture(noiseTex, noiseOffset + x*0.01 ).x; // cheap (cache friendly) lookup
vec2 duvdx = dFdx( x );
vec2 duvdy = dFdx( x );
float l = k*8.0;
float f = fract(l);
#if 0
float ia = floor(l); // my method
float ib = ia + 1.0;
#else
float ia = floor(l+0.5); // suslik's method (see comments)
float ib = floor(l);
f = min(f, 1.0-f)*2.0;
#endif
vec2 offa = sin(vec2(3.0,7.0)*ia); // can replace with any other hash
vec2 offb = sin(vec2(3.0,7.0)*ib); // can replace with any other hash
vec3 cola = textureGrad( tex, x + v*offa, duvdx, duvdy ).xyz;
vec3 colb = textureGrad( tex, x + v*offb, duvdx, duvdy ).xyz;
return vec4(mix( cola, colb, smoothstep(0.2,0.8,f-0.1*sum(cola-colb)) ), 1.0);
}
"""
val shaderNoRepetitionVert = """
// -- shaderNoRepetitionVert
float sum( vec3 v ) { return v.x+v.y+v.z; }
// based on https://www.shadertoy.com/view/Xtl3zf
vec4 textureNoTile(in sampler2D tex, in vec2 noiseOffset, in vec2 x)
{
float v = 1.0;
float k = texture(tex, noiseOffset + 0.005*x ).x; // cheap (cache friendly) lookup
float l = k*8.0;
float f = fract(l);
#if 0
float ia = floor(l); // my method
float ib = ia + 1.0;
#else
float ia = floor(l+0.5); // suslik's method (see comments)
float ib = floor(l);
f = min(f, 1.0-f)*2.0;
#endif
vec2 offa = sin(vec2(3.0,7.0)*ia); // can replace with any other hash
vec2 offb = sin(vec2(3.0,7.0)*ib); // can replace with any other hash
vec3 cola = texture( tex, x + v*offa).xyz;
vec3 colb = texture( tex, x + v*offb).xyz;
return vec4(mix( cola, colb, smoothstep(0.2,0.8,f-0.1*sum(cola-colb)) ), 1.0);
}
"""
val shaderProjectOnPlane = """
// -- shaderProjectOnPlane
vec3 projectOnPlane(vec3 p, vec3 pc, vec3 pn) {
float distance = dot(pn, p-pc);
return p - distance * pn;
}
""".trimIndent()
val shaderSideOfPlane = """
// -- shaderSideOfPlane
int sideOfPlane(in vec3 p, in vec3 pc, in vec3 pn){
if (dot(p-pc,pn) >= 0.0) return 1; else return 0;
}
""".trimIndent()
val shaderLinePlaneIntersect = """
// -- shaderLinePlaneIntersect
vec3 linePlaneIntersect(in vec3 lp, in vec3 lv, in vec3 pc, in vec3 pn){
return lp+lv*(dot(pn,pc-lp)/dot(pn,lv));
}
""".trimIndent()
val shaderVSM = """
|// -- shaderVSM
|float linstep(float min, float max, float v)
|{
| return clamp((v - min) / (max - min), 0, 1);
|}
|// https://developer.nvidia.com/gpugems/GPUGems3/gpugems3_ch08.html
|float chebyshevUpperBound(vec2 moments, float t, float minVariance) {
| // One-tailed inequality valid if t > Moments.x
| float p = (t <= moments.x) ? 1.0 : 0.0;
| // Compute variance.
| float variance = moments.y - (moments.x * moments.x);
| variance = max(variance, minVariance);
| // Compute probabilistic upper bound.
| float d = t - moments.x;
| float p_max = variance / (variance + d*d);
| p_max = smoothstep(0.6, 1.0, p_max);
| return max(p, p_max);
}
""".trimIndent()
/*
N - world space normal
V - eye - world vertex position
L - world light pos - world vertex position
*/
val shaderGGX = """
// -- shaderGGX
#define bias 0.125
#define HASHSCALE 443.8975
vec2 hash22(vec2 p) {
vec3 p3 = fract(vec3(p.xyx) * HASHSCALE);
p3 += dot(p3, p3.yzx+19.19);
return fract(vec2((p3.x + p3.y)*p3.z, (p3.x+p3.z)*p3.y));
}
#define PI 3.1415926535
float pow5(float x) {
float x2 = x * x;
return x2 * x2 * x;
}
float D_GGX(float linearRoughness, float NoH, const vec3 h) {
// Walter et al. 2007, "Microfacet Models for Refraction through Rough Surfaces"
float oneMinusNoHSquared = 1.0 - NoH * NoH;
float a = NoH * linearRoughness;
float k = linearRoughness / (oneMinusNoHSquared + a * a);
float d = k * k * (1.0 / PI);
return d;
}
float D_GGXm(float linearRoughness, float NoH, const vec3 h, const vec3 n) {
vec3 NxH = cross(n, h);
float oneMinusNoHSquared = dot(NxH, NxH);
// Walter et al. 2007, "Microfacet Models for Refraction through Rough Surfaces"
//float oneMinusNoHSquared = 1.0 - NoH * NoH;
float a = NoH * linearRoughness;
float k = linearRoughness / (oneMinusNoHSquared + a * a);
float d = k * k * (1.0 / PI);
return d;
}
float V_SmithGGXCorrelated(float linearRoughness, float NoV, float NoL) {
// Heitz 2014, "Understanding the Masking-Shadowing Function in Microfacet-Based BRDFs"
float a2 = linearRoughness * linearRoughness;
float GGXV = NoL * sqrt((NoV - a2 * NoV) * NoV + a2);
float GGXL = NoV * sqrt((NoL - a2 * NoL) * NoL + a2);
return 0.5 / (GGXV + GGXL);
}
vec3 F_Schlick(const vec3 f0, float VoH) {
// Schlick 1994, "An Inexpensive BRDF Model for Physically-Based Rendering"
return f0 + (vec3(1.0) - f0) * pow5(1.0 - VoH);
}
vec3 F_SchlickRoughness(vec3 F0, float roughness, float VoH)
{
return F0 + (max(vec3(1.0 - roughness), F0) - F0) * pow(1.0 - VoH, 5.0);
}
float F_Schlick(float f0, float f90, float VoH) {
return f0 + (f90 - f0) * pow5(1.0 - VoH);
}
float Fd_Burley(float linearRoughness, float NoV, float NoL, float LoH) {
// Burley 2012, "Physically-Based Shading at Disney"
float f90 = 0.5 + 2.0 * linearRoughness * LoH * LoH;
float lightScatter = F_Schlick(1.0, f90, NoL);
float viewScatter = F_Schlick(1.0, f90, NoV);
return lightScatter * viewScatter * (1.0 / PI);
}
vec2 PrefilteredDFG_Karis(float roughness, float NoV) {
//https://www.shadertoy.com/view/XlKSDR
// Karis 2014, "Physically Based Material on Mobile"
const vec4 c0 = vec4(-1.0, -0.0275, -0.572, 0.022);
const vec4 c1 = vec4( 1.0, 0.0425, 1.040, -0.040);
vec4 r = roughness * c0 + c1;
float a004 = min(r.x * r.x, exp2(-9.28 * NoV)) * r.x + r.y;
return vec2(-1.04, 1.04) * a004 + r.zw;
}
float saturate(float x) {
return clamp(x, 0.0, 1.0);
}
float G1V(float dotNV, float k)
{
return 1.0f/(dotNV*(1.0f-k)+k);
}
float ggx(vec3 N, vec3 V, vec3 L, float roughness, float F0)
{
float alpha = roughness*roughness;
vec3 H = normalize(V+L);
float dotNL = saturate(dot(N,L));
float dotNV = saturate(dot(N,V));
float dotNH = saturate(dot(N,H));
float dotLH = saturate(dot(L,H));
float F, D, vis;
// D
float alphaSqr = alpha*alpha;
float pi = 3.14159f;
float denom = dotNH * dotNH *(alphaSqr-1.0) + 1.0f;
D = alphaSqr/(pi * denom * denom);
// F
float dotLH5 = pow(1.0f-dotLH,5);
F = F0 + (1.0-F0)*(dotLH5);
// V
float k = alpha/2.0f;
vis = G1V(dotNL,k)*G1V(dotNV,k);
float specular = dotNL * D * F * vis;
return specular;
}
""".trimIndent()

View File

@@ -0,0 +1,99 @@
package org.openrndr.extra.dnk3
import org.openrndr.draw.RenderTarget
import org.openrndr.math.Matrix44
sealed class Shadows {
object None : Shadows()
abstract class MappedShadows(val mapSize: Int) : Shadows()
abstract class DepthMappedShadows(mapSize: Int) : MappedShadows(mapSize)
abstract class ColorMappedShadows(mapSize: Int) : MappedShadows(mapSize)
class Simple(mapSize: Int = 1024) : DepthMappedShadows(mapSize)
class PCF(mapSize: Int = 1024, val sampleCount: Int = 12) : DepthMappedShadows(mapSize)
class VSM(mapSize: Int = 1024) : ColorMappedShadows(mapSize)
}
interface ShadowLight {
var shadows: Shadows
fun projection(renderTarget: RenderTarget): Matrix44
fun view(node: SceneNode): Matrix44 {
return node.worldTransform.inversed
}
}
// shaders
fun Shadows.VSM.fs(index: Int) : String = """
|{
| vec4 smc = (p_lightTransform$index * vec4(v_worldPosition,1.0));
| vec3 lightProj = (smc.xyz/smc.w) * 0.5 + 0.5;
| if (lightProj.x > 0.0 && lightProj.x < 1.0 && lightProj.y > 0 && lightProj.y < 1) {
| vec2 moments = texture(p_lightShadowMap$index, lightProj.xy).xy;
| attenuation *= (chebyshevUpperBound(moments, length(Lr), 50.0));
| }
|}
""".trimMargin()
fun Shadows.Simple.fs(index: Int): String = """
|{
| vec4 smc = (p_lightTransform$index * vec4(v_worldPosition,1.0));
| vec3 lightProj = (smc.xyz/smc.w) * 0.5 + 0.5;
| if (lightProj.x > 0.0 && lightProj.x < 1.0 && lightProj.y > 0 && lightProj.y < 1) {
| vec3 smz = texture(p_lightShadowMap$index, lightProj.xy).rgb;
| vec2 step = 1.0 / textureSize(p_lightShadowMap$index,0);
| float result = 0.0;
| float compToZ = (lightProj.z- 0.0020 * tan(acos(NoL))) - 0.0003;
| float currentDepth = lightProj.z;
| float closestDepth = smz.x;
| float shadow = (currentDepth - 0.0020 * tan(acos(NoL))) - 0.0003 >= closestDepth ? 0.0 : 1.0;
| attenuation *= shadow;
| }
|}
""".trimMargin()
fun Shadows.PCF.fs(index: Int): String = """
|{
| float lrl = length(Lr)/100.0;
| vec2 fTaps_Poisson[12];
| fTaps_Poisson[0] = vec2(-.326,-.406);
| fTaps_Poisson[1] = vec2(-.840,-.074);
| fTaps_Poisson[2] = vec2(-.696, .457);
| fTaps_Poisson[3] = vec2(-.203, .621);
| fTaps_Poisson[4] = vec2( .962,-.195);
| fTaps_Poisson[5] = vec2( .473,-.480);
| fTaps_Poisson[6] = vec2( .519, .767);
| fTaps_Poisson[7] = vec2( .185,-.893);
| fTaps_Poisson[8] = vec2( .507, .064);
| fTaps_Poisson[9] = vec2( .896, .412);
| fTaps_Poisson[10] = vec2(-.322,-.933);
| fTaps_Poisson[11] = vec2(-.792,-.598);
| vec4 smc = (p_lightTransform$index * vec4(v_worldPosition,1.0));
| vec3 lightProj = (smc.xyz/smc.w) * 0.5 + 0.5;
| if (lightProj.x > 0.0 && lightProj.x < 1.0 && lightProj.y > 0 && lightProj.y < 1) {
| vec3 smz = texture(p_lightShadowMap$index, lightProj.xy).rgb;
| vec2 stepSize = 1.0 / textureSize(p_lightShadowMap$index,0);
| float result = 0.0;
| float compToZ = (lightProj.z- 0.0020 * tan(acos(NoL))) - 0.0003;
| float noise = hash22(lightProj.xy*10.0).x;
| float r = noise * 3.1415926535 * 2.0;
| mat2 rot = mat2( vec2(cos(r), -sin(r)), vec2(sin(r),cos(r)));
| for (int i = 0; i < 12; ++i) {
| float depth = texture(p_lightShadowMap$index, lightProj.xy + rot*fTaps_Poisson[i]*i*lrl*stepSize ).r;
| result += step(compToZ, depth);
| }
| result /= 12;
| float currentDepth = lightProj.z;
| float closestDepth = smz.x;
| float shadow = result;// (currentDepth - 0.0020 * tan(acos(NoL))) - 0.0003 >= closestDepth ? 0.0 : 1.0;
| attenuation *= shadow;
| }
|}
""".trimMargin()
fun Shadows.fs(index: Int): String = when (this) {
is Shadows.PCF -> this.fs(index)
is Shadows.Simple -> this.fs(index)
is Shadows.VSM -> this.fs(index)
is Shadows.None -> ""
else -> TODO()
}

View File

@@ -0,0 +1,187 @@
package org.openrndr.extra.dnk3.cubemap
import org.openrndr.draw.*
import org.openrndr.color.ColorRGBa
import org.openrndr.internal.Driver
import org.openrndr.math.*
import org.openrndr.math.transforms.ortho
private val filterDrawStyle = DrawStyle().apply {
blendMode = BlendMode.REPLACE
depthWrite = false
depthTestPass = DepthTestPass.ALWAYS
stencil.stencilTest = StencilTest.DISABLED
}
private var filterQuad: VertexBuffer? = null
private var filterQuadFormat = vertexFormat {
position(2)
textureCoordinate(2)
}
/**
* Filter base class. Renders "full-screen" quads.
*/
open class CubemapFilter(private val shader: Shader? = null, private val watcher: ShaderWatcher? = null) {
/**
* parameter map
*/
val parameters = mutableMapOf<String, Any>()
var padding = 0
var depthBufferOut: DepthBuffer? = null
companion object {
val filterVertexCode: String get() = Driver.instance.internalShaderResource("filter.vert")
}
open fun apply(source: Array<Cubemap>, target: Array<Cubemap>) {
if (target.isEmpty()) {
return
}
for (side in CubemapSide.values()) {
val renderTarget = renderTarget(target[0].width, target[0].width, 1.0) {}
shader?.begin()
shader?.uniform("sideNormal", side.forward)
shader?.uniform("sideUp", side.up)
shader?.uniform("sideRight", (side.forward cross side.up))
shader?.end()
target.forEach {
renderTarget.attach(it, side, 0)
}
for (i in 1 until target.size) {
renderTarget.blendMode(i, BlendMode.REPLACE)
}
apply(source, renderTarget)
depthBufferOut?.let {
renderTarget.attach(it)
}
if (depthBufferOut != null) {
renderTarget.detachDepthBuffer()
}
renderTarget.detachColorAttachments()
renderTarget.destroy()
}
}
fun apply(source: Array<Cubemap>, target: RenderTarget) {
val shader = if (this.watcher != null) watcher.shader!! else this.shader!!
target.bind()
if (filterQuad == null) {
val fq = VertexBuffer.createDynamic(filterQuadFormat, 6, Session.root)
fq.shadow.writer().apply {
write(Vector2(0.0, 1.0)); write(Vector2(0.0, 0.0))
write(Vector2(0.0, 0.0)); write(Vector2(0.0, 1.0))
write(Vector2(1.0, 0.0)); write(Vector2(1.0, 1.0))
write(Vector2(0.0, 1.0)); write(Vector2(0.0, 0.0))
write(Vector2(1.0, 1.0)); write(Vector2(1.0, 0.0))
write(Vector2(1.0, 0.0)); write(Vector2(1.0, 1.0))
}
fq.shadow.upload()
fq.shadow.destroy()
filterQuad = fq
}
shader.begin()
source.forEachIndexed { index, cubemap ->
cubemap.bind(index)
cubemap.filter(MinifyingFilter.LINEAR, MagnifyingFilter.LINEAR)
shader.uniform("tex$index", index)
}
Driver.instance.setState(filterDrawStyle)
shader.uniform("projectionMatrix", ortho(0.0, target.width.toDouble(), target.height.toDouble(), 0.0, -1.0, 1.0))
shader.uniform("targetSize", Vector2(target.width.toDouble(), target.height.toDouble()))
shader.uniform("padding", Vector2(padding.toDouble(), padding.toDouble()))
var textureIndex = source.size + 0
parameters.forEach { (uniform, value) ->
@Suppress("UNCHECKED_CAST")
when (value) {
is Boolean -> shader.uniform(uniform, value)
is Float -> shader.uniform(uniform, value)
is Double -> shader.uniform(uniform, value.toFloat())
is Matrix44 -> shader.uniform(uniform, value)
is Vector2 -> shader.uniform(uniform, value)
is Vector3 -> shader.uniform(uniform, value)
is Vector4 -> shader.uniform(uniform, value)
is ColorRGBa -> shader.uniform(uniform, value)
is Int -> shader.uniform(uniform, value)
is Matrix55 -> shader.uniform(uniform, value.floatArray)
is FloatArray -> shader.uniform(uniform, value)
// EJ: this is not so nice but I have no other ideas for this
is Array<*> -> if (value.size > 0) when (value[0]) {
is Vector2 -> shader.uniform(uniform, value as Array<Vector2>)
is Vector3 -> shader.uniform(uniform, value as Array<Vector3>)
is Vector4 -> shader.uniform(uniform, value as Array<Vector4>)
else -> throw IllegalArgumentException("unsupported array value: ${value[0]!!::class.java}")
//is ColorRGBa -> shader.uniform(uniform, value as Array<ColorRGBa>)
}
is DepthBuffer -> {
shader.uniform("$uniform", textureIndex)
value.bind(textureIndex)
textureIndex++
}
is ColorBuffer -> {
shader.uniform("$uniform", textureIndex)
value.bind(textureIndex)
textureIndex++
}
is Cubemap -> {
shader.uniform("$uniform", textureIndex)
value.bind(textureIndex)
textureIndex++
}
is ArrayTexture -> {
shader.uniform("$uniform", textureIndex)
value.bind(textureIndex)
textureIndex++
}
is BufferTexture -> {
shader.uniform("$uniform", textureIndex)
value.bind(textureIndex)
textureIndex++
}
}
}
Driver.instance.drawVertexBuffer(shader, listOf(filterQuad!!), DrawPrimitive.TRIANGLES, 0, 6)
shader.end()
target.unbind()
}
fun apply(source: Cubemap, target: Cubemap) = apply(arrayOf(source), arrayOf(target))
fun apply(source: Cubemap, target: Array<Cubemap>) = apply(arrayOf(source), target)
fun apply(source: Array<Cubemap>, target: Cubemap) = apply(source, arrayOf(target))
fun untrack() {
shader?.let { Session.active.untrack(shader) }
}
protected val format get() = filterQuadFormat
}

View File

@@ -0,0 +1,6 @@
package org.openrndr.extra.dnk3.cubemap
import org.openrndr.draw.filterShaderFromUrl
import org.openrndr.resourceUrl
class CubemapPassthrough : CubemapFilter(filterShaderFromUrl(resourceUrl("/shaders/cubemap-filters/cubemap-passthrough.frag")))

View File

@@ -0,0 +1,6 @@
package org.openrndr.extra.dnk3.cubemap
import org.openrndr.draw.filterShaderFromUrl
import org.openrndr.resourceUrl
class IrradianceConvolution : CubemapFilter(filterShaderFromUrl(resourceUrl("/shaders/cubemap-filters/irradiance-convolution.frag")))

View File

@@ -0,0 +1,187 @@
@file:ShaderPhrases([])
package org.openrndr.extra.dnk3.cubemap
import org.openrndr.draw.*
import org.openrndr.extra.shaderphrases.annotations.ShaderPhrases
import org.openrndr.extra.shaderphrases.phraseResource
import org.openrndr.math.Vector3
import org.openrndr.math.max
import org.openrndr.resourceUrl
import java.nio.ByteBuffer
import java.nio.ByteOrder
import kotlin.math.sqrt
class SphericalHarmonics : Filter(filterShaderFromUrl(resourceUrl("/shaders/cubemap-filters/spherical-harmonics.frag"))) {
var input: Cubemap by parameters
}
/** based on https://andrew-pham.blog/2019/08/26/spherical-harmonics/ */
fun Cubemap.irradianceCoefficients(): Array<Vector3> {
val cubemap = this
require(cubemap.format == ColorFormat.RGB)
require(cubemap.type == ColorType.FLOAT32)
val result = Array(9) { Vector3.ZERO }
var buffer = ByteBuffer.allocateDirect(cubemap.width * cubemap.width * cubemap.format.componentCount * cubemap.type.componentSize)
buffer.order(ByteOrder.nativeOrder())
var weightSum = 0.0
for (side in CubemapSide.values()) {
//cubemap.side(side).read(buffer)
buffer.rewind()
cubemap.read(side, buffer)
buffer.rewind()
for (y in 0 until cubemap.width) {
for (x in 0 until cubemap.width) {
val rf = buffer.float.toDouble()
val gf = buffer.float.toDouble()
val bf = buffer.float.toDouble()
val L = Vector3(rf, gf, bf)
var u = (x + 0.5) / cubemap.width;
var v = (y + 0.5) / cubemap.width;
u = u * 2.0 - 1.0
v = v * 2.0 - 1.0
val temp = 1.0 + u * u + v * v
val weight = 4.0 / (sqrt(temp) * temp)
val N = cubemap.mapUVSToN(u, v, side)
val coefficients = genLightingCoefficientsForNormal(N, L)
for (i in 0 until 9) {
result[i] += coefficients[i] * weight
}
weightSum += weight
}
}
}
for (i in 0 until 9) {
result[i] = result[i] * (4.0 * Math.PI) / weightSum
}
return result;
}
fun genSHCoefficients(N: Vector3): DoubleArray {
val result = DoubleArray(9)
// Band 0
result[0] = 0.282095;
// Band 1
result[1] = 0.488603 * N.y
result[2] = 0.488603 * N.z
result[3] = 0.488603 * N.x
// Band 2
result[4] = 1.092548 * N.x * N.y
result[5] = 1.092548 * N.y * N.z
result[6] = 0.315392 * (3.0 * N.z * N.z - 1.0)
result[7] = 1.092548 * N.x * N.z
result[8] = 0.546274 * (N.x * N.x - N.y * N.y)
return result;
}
fun genLightingCoefficientsForNormal(N: Vector3, L: Vector3): Array<Vector3> {
val coefficients = genSHCoefficients(N)
val result = Array(9) { Vector3.ZERO }
for (i in 0 until 9) {
result[i] = L * coefficients[i]
}
return result
}
fun Cubemap.mapUVSToN(u: Double, v: Double, side: CubemapSide): Vector3 {
return (side.right * u + side.up * v + side.forward).normalized
}
// Evaluates the irradiance perceived in the provided direction
// Analytic method from http://www1.cs.columbia.edu/~ravir/papers/envmap/envmap.pdf eq. 13
//
fun evaluateSHIrradiance(direction: Vector3, _SH: Array<Vector3>): Vector3 {
val c1 = 0.42904276540489171563379376569857; // 4 * Â2.Y22 = 1/4 * sqrt(15.PI)
val c2 = 0.51166335397324424423977581244463; // 0.5 * Â1.Y10 = 1/2 * sqrt(PI/3)
val c3 = 0.24770795610037568833406429782001; // Â2.Y20 = 1/16 * sqrt(5.PI)
val c4 = 0.88622692545275801364908374167057; // Â0.Y00 = 1/2 * sqrt(PI)
val x = direction.x;
val y = direction.y;
val z = direction.z;
return max(Vector3.ZERO,
_SH[8] * (c1 * (x * x - y * y)) // c1.L22.(x²-y²)
+ _SH[6] * (c3 * (3.0 * z * z - 1)) // c3.L20.(3.z² - 1)
+ _SH[0] * c4 // c4.L00
+ (_SH[4] * x * y + _SH[7] * x * z + _SH[5] * y * z) * 2.0 * c1 // 2.c1.(L2-2.xy + L21.xz + L2-1.yz)
+ (_SH[3] * x + _SH[1] * y + _SH[2] * z) * c2 * 2.0); // 2.c2.(L11.x + L1-1.y + L10.z)
}
val glslEvaluateSH: String by phraseResource("/phrases/irradiance-sh/evaluate-sh.frag")
val glslFetchSH: String by phraseResource("/phrases/irradiance-sh/fetch-sh.frag")
val glslFetchSH0: String by phraseResource("/phrases/irradiance-sh/fetch-sh0.frag")
fun genGlslGatherSH(xProbes: Int, yProbes: Int, zProbes: Int, spacing: Double = 1.0, offset: Vector3) = """
ivec3 gridCoordinates(vec3 p, out vec3 f) {
float x = (p.x - ${offset.x}) / $spacing;
float y = (p.y - ${offset.y})/ $spacing;
float z = (p.z - ${offset.z}) / $spacing;
int ix = int(floor(x)) + $xProbes / 2;
int iy = int(floor(y)) + $yProbes / 2;
int iz = int(floor(z)) + $zProbes / 2;
f.x = fract((x));
f.y = fract((y));
f.z = fract((z));
return ivec3(ix, iy, iz);
}
int gridIndex(ivec3 p) {
ivec3 c = clamp(p, ivec3(0), ivec3(${xProbes - 1}, ${yProbes - 1}, ${zProbes - 1}));
return c.x + c.y * $xProbes + c.z * ${xProbes * yProbes};
}
void gatherSH(samplerBuffer btex, vec3 p, out vec3[9] blend) {
vec3[9] c000;
vec3[9] c001;
vec3[9] c010;
vec3[9] c011;
vec3[9] c100;
vec3[9] c101;
vec3[9] c110;
vec3[9] c111;
vec3 f;
ivec3 io = gridCoordinates(p, f);
fetchSH(btex, gridIndex(io + ivec3(0,0,0)), c000);
fetchSH(btex, gridIndex(io + ivec3(0,0,1)), c001);
fetchSH(btex, gridIndex(io + ivec3(0,1,0)), c010);
fetchSH(btex, gridIndex(io + ivec3(0,1,1)), c011);
fetchSH(btex, gridIndex(io + ivec3(1,0,0)), c100);
fetchSH(btex, gridIndex(io + ivec3(1,0,1)), c101);
fetchSH(btex, gridIndex(io + ivec3(1,1,0)), c110);
fetchSH(btex, gridIndex(io + ivec3(1,1,1)), c111);
for (int i = 0; i < 9; ++i) {
blend[i] = mix( mix( mix(c000[i], c001[i], f.z), mix(c010[i], c011[i], f.z), f.y), mix( mix(c100[i], c101[i], f.z), mix(c110[i], c111[i], f.z), f.y), f.x);
}
}
""".trimIndent()
val glslGridCoordinates: String by phraseResource("/phrases/irradiance-sh/grid-coordinates.frag")
val glslGridIndex: String by phraseResource("/phrases/irradiance-sh/grid-index.frag")
val glslGatherSH: String by phraseResource("/phrases/irradiance-sh/gather-sh.frag")
val glslGatherSH0: String by phraseResource("/phrases/irradiance-sh/gather-sh0.frag")

View File

@@ -0,0 +1,12 @@
package org.openrndr.extra.dnk3.dsl
import org.openrndr.extra.dnk3.PBRMaterial
fun pbrMaterial(builder: PBRMaterial.() -> Unit): PBRMaterial {
return PBRMaterial().apply { builder() }
}
fun test() {
pbrMaterial {
}
}

View File

@@ -0,0 +1,88 @@
package org.openrndr.extra.dnk3.dsl
import kotlinx.coroutines.yield
import org.openrndr.draw.DrawPrimitive
import org.openrndr.draw.VertexBuffer
import org.openrndr.extra.dnk3.*
import org.openrndr.launch
fun scene(builder: Scene.() -> Unit): Scene {
val scene = Scene()
scene.builder()
return scene
}
fun SceneNode.node(builder: SceneNode.() -> Unit): SceneNode {
val node = SceneNode()
node.builder()
children.add(node)
return node
}
fun SceneNode.hemisphereLight(builder: HemisphereLight.() -> Unit): HemisphereLight {
val hemisphereLight = HemisphereLight()
hemisphereLight.builder()
entities.add(hemisphereLight)
return hemisphereLight
}
fun SceneNode.directionalLight(buider: DirectionalLight.() -> Unit): DirectionalLight {
val directionalLight = DirectionalLight()
directionalLight.buider()
this.entities.add(directionalLight)
return directionalLight
}
fun SceneNode.pointLight(builder: PointLight.() -> Unit): PointLight {
val pointLight = PointLight()
pointLight.builder()
this.entities.add(pointLight)
return pointLight
}
fun SceneNode.spotLight(builder: SpotLight.() -> Unit): SpotLight {
val spotLight = SpotLight()
spotLight.builder()
this.entities.add(spotLight)
return spotLight
}
class SimpleMeshBuilder {
var vertexBuffer: VertexBuffer? = null
var primitive = DrawPrimitive.TRIANGLES
var material: Material? = null
fun build(): Mesh {
val geometry = Geometry(
listOf(vertexBuffer ?: error("no vertex buffer")),
null,
primitive,
0,
vertexBuffer?.vertexCount ?: error("no vertex buffer")
)
val primitive = MeshPrimitive(geometry, material ?: error("no material"))
return Mesh(listOf(primitive))
}
}
fun SceneNode.simpleMesh(builder: SimpleMeshBuilder.() -> Unit): Mesh {
val mesh = SimpleMeshBuilder().apply { builder() }.build()
entities.add(mesh)
return mesh
}
fun SceneNode.pathMesh(builder: PathMesh.() -> Unit): PathMesh {
val pathMesh = PathMesh(mutableListOf(), DummyMaterial(), 1.0)
pathMesh.builder()
entities.add(pathMesh)
return pathMesh
}
fun Scene.update(function: () -> Unit) {
dispatcher.launch {
while (true) {
function()
yield()
}
}
}

View File

@@ -0,0 +1,109 @@
package org.openrndr.extra.dnk3.features
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.cubemap.irradianceCoefficients
import org.openrndr.math.Matrix44
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import java.io.File
import java.nio.ByteBuffer
import java.nio.ByteOrder
data class IrradianceSH(val xCount: Int, val yCount: Int, val zCount: Int, val spacing: Double, val offset: Vector3, val cubemapSize: Int) : Feature {
override fun <T : Feature> update(drawer: Drawer, sceneRenderer: SceneRenderer, scene: Scene, feature: T, context: RenderContext) {
sceneRenderer.processIrradiance(drawer, scene, feature as IrradianceSH, context)
}
var shMap: BufferTexture? = null
val probeCount
get() = xCount * yCount * zCount
}
fun Scene.addIrradianceSH(xCount: Int,
yCount: Int,
zCount: Int,
spacing: Double,
offset: Vector3 = Vector3.ZERO,
cubemapSize: Int = 256
) {
features.add(IrradianceSH(xCount * 2 + 1, yCount * 2 + 1, zCount * 2 + 1, spacing, offset, cubemapSize))
var probeID = 0
for (k in -zCount..zCount) {
for (j in -yCount..yCount) {
for (i in -xCount..xCount) {
val probeNode = SceneNode()
probeNode.transform = transform {
translate(offset)
translate(i * spacing, j * spacing, k * spacing)
}
probeNode.entities.add(IrradianceProbe())
probeID++
root.children.add(probeNode)
}
}
}
}
private fun SceneRenderer.processIrradiance(drawer: Drawer, scene: Scene, feature: IrradianceSH, context: RenderContext) {
val irradianceProbes = scene.root.findContent { this as? IrradianceProbe }
val irradianceProbePositions = irradianceProbes.map { it.node.worldPosition }
if (feature.shMap == null && irradianceProbes.isNotEmpty()) {
val hash = scene.hash()
val cached = File("data/scene-cache/sh-$hash.orb")
if (cached.exists()) {
feature.shMap = loadBufferTexture(cached)
} else {
var probeID = 0
val tempCubemap = cubemap(feature.cubemapSize, format = ColorFormat.RGB, type = ColorType.FLOAT32)
var cubemapDepthBuffer = depthBuffer(feature.cubemapSize, feature.cubemapSize, DepthFormat.DEPTH16, BufferMultisample.Disabled)
feature.shMap = bufferTexture(irradianceProbes.size * 9, format = ColorFormat.RGB, type = ColorType.FLOAT32)
val buffer = ByteBuffer.allocateDirect(irradianceProbePositions.size * 9 * 3 * 4)
buffer.order(ByteOrder.nativeOrder())
for ((node, probe) in irradianceProbes) {
if (probe.dirty) {
val pass = IrradianceProbePass
val materialContext = MaterialContext(pass, context.lights, emptyList(), shadowLightTargets, emptyMap(), 0)
val position = node.worldPosition
for (side in CubemapSide.values()) {
val target = renderTarget(feature.cubemapSize, feature.cubemapSize) {
//this.colorBuffer(tempCubemap.side(side))
this.cubemap(tempCubemap, side)
this.depthBuffer(cubemapDepthBuffer)
}
drawer.isolatedWithTarget(target) {
drawer.clear(ColorRGBa.BLACK)
drawer.projection = probe.projectionMatrix
drawer.view = Matrix44.IDENTITY
drawer.model = Matrix44.IDENTITY
drawer.lookAt(position, position + side.forward, side.up)
drawPass(drawer, pass, materialContext, context)
}
target.detachDepthBuffer()
target.detachColorAttachments()
target.destroy()
}
val coefficients = tempCubemap.irradianceCoefficients()
for (coef in coefficients) {
buffer.putVector3((coef))
}
probeID++
println("$probeID / ${irradianceProbePositions.size}")
probe.dirty = false
}
}
feature.shMap?.let {
buffer.rewind()
it.write(buffer)
it.saveToFile(File("data/scene-cache/sh-$hash.orb"))
}
}
}
}

View File

@@ -0,0 +1,71 @@
package org.openrndr.extra.dnk3.features
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.*
import org.openrndr.math.Matrix44
import org.openrndr.math.Vector3
data class VoxelConeTracing(val xCount: Int, val yCount: Int, val zCount: Int, val spacing: Double, val offset: Vector3) : Feature {
var voxelMap: VolumeTexture? = null
var voxelRenderTarget = null as? RenderTarget?
override fun <T : Feature> update(drawer: Drawer, sceneRenderer: SceneRenderer, scene: Scene, feature: T, context: RenderContext) {
sceneRenderer.processVoxelConeTracing(drawer, scene, this, context)
}
var initialized = false
val voxelPass = RenderPass(listOf(VoxelFacet(this)), renderOpaque = true, renderTransparent = false, depthWrite = false, skipTarget = true)
}
fun Scene.addVoxelConeTracing(xCount: Int, yCount: Int, zCount: Int, spacing: Double, offset: Vector3 = Vector3.ZERO) : VoxelConeTracing {
val feature = VoxelConeTracing(xCount, yCount, zCount, spacing, offset)
features.add(feature)
return feature
}
class VoxelFacet(val voxelConeTracing: VoxelConeTracing) : ColorBufferFacetCombiner(setOf(FacetType.DIFFUSE, FacetType.SPECULAR, FacetType.EMISSIVE), "color", ColorFormat.RGBa, ColorType.FLOAT16) {
override fun generateShader() = """
vec3 finalColor = (max(vec3(0.0), f_diffuse.rgb) + max(vec3(0.0), f_emission.rgb) + max(vec3(0.0), f_ambient.rgb));
vec3 p = v_worldPosition;
{
float x = (p.x - ${voxelConeTracing.offset.x}) / ${voxelConeTracing.spacing};
float y = (p.y - ${voxelConeTracing.offset.y}) / ${voxelConeTracing.spacing};
float z = (p.z - ${voxelConeTracing.offset.z}) / ${voxelConeTracing.spacing};
int ix = int(floor(x+0.5)) + ${voxelConeTracing.xCount} / 2;
int iy = int(floor(y+0.5)) + ${voxelConeTracing.yCount} / 2;
int iz = int(floor(z+0.5)) + ${voxelConeTracing.zCount} / 2;
imageStore(p_voxelMap, ivec3(ix, iy, iz), vec4(finalColor, 1.0));
}
"""
}
private fun SceneRenderer.processVoxelConeTracing(drawer: Drawer, scene: Scene, feature: VoxelConeTracing, context: RenderContext) {
if (feature.voxelMap == null) {
feature.voxelMap = volumeTexture(feature.xCount * 2 + 1, feature.yCount * 2 + 1, feature.zCount * 2 + 1, format = ColorFormat.RGBa, type = ColorType.FLOAT16)
}
if (feature.voxelRenderTarget == null) {
feature.voxelRenderTarget = renderTarget(2048, 2048, 1.0, BufferMultisample.SampleCount(8)) {
colorBuffer()
}
}
if (!feature.initialized) {
println("drawing voxelmap")
for (side in CubemapSide.values()) {
drawer.isolatedWithTarget(feature.voxelRenderTarget ?: error("no render target")) {
val pass = feature.voxelPass
val materialContext = MaterialContext(pass, context.lights, emptyList(), shadowLightTargets, emptyMap(), 0)
drawer.clear(ColorRGBa.BLACK)
drawer.ortho(-10.0, 10.0, -10.0, 10.0, -40.0, 40.0)
drawer.view = Matrix44.IDENTITY
drawer.model = Matrix44.IDENTITY
val position = Vector3.ZERO
drawer.lookAt(position + side.forward*40.0, position , side.up)
drawPass(drawer, pass, materialContext, context) {
it.parameter("voxelMap", feature.voxelMap!!.imageBinding(0, ImageAccess.WRITE))
}
}
}
feature.initialized = true
}
}

View File

@@ -0,0 +1,47 @@
package org.openrndr.extra.dnk3.gltf
import com.google.gson.Gson
import java.io.File
import java.io.RandomAccessFile
import java.nio.ByteBuffer
import java.nio.ByteOrder
fun loadGltfFromGlbFile(file: File): GltfFile {
val channel = RandomAccessFile(file, "r").channel
val headerBuffer = ByteBuffer.allocate(12).order(ByteOrder.nativeOrder())
headerBuffer.rewind()
channel.read(headerBuffer)
headerBuffer.rewind()
val magic = headerBuffer.int
val version = headerBuffer.int
val length = headerBuffer.int
fun readChunk(): ByteBuffer {
val chunkHeader = ByteBuffer.allocate(8).order(ByteOrder.nativeOrder())
channel.read(chunkHeader)
chunkHeader.rewind()
val chunkLength = chunkHeader.int
val chunkType = chunkHeader.int
val chunkBuffer =
if (chunkType == 0x004E4942) ByteBuffer.allocateDirect(chunkLength) else ByteBuffer.allocate(chunkLength)
(chunkBuffer as ByteBuffer)
channel.read(chunkBuffer)
chunkBuffer.order(ByteOrder.nativeOrder())
return chunkBuffer
}
val jsonBuffer = readChunk()
jsonBuffer.rewind()
val jsonByteArray = ByteArray(jsonBuffer.capacity())
jsonBuffer.get(jsonByteArray)
val json = String(jsonByteArray)
val gson = Gson()
val bufferBuffer = if (channel.position() < length) readChunk() else null
return gson.fromJson(json, GltfFile::class.java).apply {
this.file = file
this.bufferBuffer = bufferBuffer
}
}

View File

@@ -0,0 +1,307 @@
@file:Suppress("MemberVisibilityCanBePrivate", "unused")
package org.openrndr.extra.dnk3.gltf
import com.google.gson.Gson
import org.openrndr.draw.*
import java.io.File
import java.io.RandomAccessFile
import java.nio.Buffer
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.util.*
import kotlin.collections.LinkedHashMap
import kotlin.math.max
const val GLTF_FLOAT = 5126
const val GLTF_UNSIGNED_INT = 5125
const val GLTF_INT = 5124
const val GLTF_UNSIGNED_SHORT = 5123
const val GLTF_SHORT = 5122
const val GLTF_UNSIGNED_BYTE = 5121
const val GLTF_BYTE = 5120
const val GLTF_ARRAY_BUFFER = 34962
const val GLTF_ELEMENT_ARRAY_BUFFER = 34963
data class GltfAsset(val generator: String?, val version: String?)
data class GltfScene(val nodes: IntArray)
data class GltfNode(val name: String?,
val children: IntArray?,
val matrix: DoubleArray?,
val scale: DoubleArray?,
val rotation: DoubleArray?,
val translation: DoubleArray?,
val mesh: Int?,
val skin: Int?,
val camera: Int?,
val extensions: GltfNodeExtensions?)
data class KHRLightsPunctualIndex(val light: Int)
data class GltfNodeExtensions(val KHR_lights_punctual: KHRLightsPunctualIndex?) {
}
data class GltfPrimitive(val attributes: LinkedHashMap<String, Int>, val indices: Int?, val mode: Int?, val material: Int) {
fun createDrawCommand(gltfFile: GltfFile): GltfDrawCommand {
val indexBuffer = indices?.let { indices ->
val accessor = gltfFile.accessors[indices]
val indexType = when (accessor.componentType) {
GLTF_UNSIGNED_SHORT -> IndexType.INT16
GLTF_UNSIGNED_INT -> IndexType.INT32
else -> error("unsupported index type: ${accessor.componentType}")
}
val bufferView = gltfFile.bufferViews[accessor.bufferView]
val buffer = gltfFile.buffers[bufferView.buffer]
val contents = buffer.contents(gltfFile)
(contents as Buffer).limit(contents.capacity())
(contents as Buffer).position((bufferView.byteOffset ?: 0) + (accessor.byteOffset))
(contents as Buffer).limit((bufferView.byteOffset ?: 0) + (accessor.byteOffset)
+ accessor.count * indexType.sizeInBytes)
val ib = indexBuffer(accessor.count, indexType)
ib.write(contents)
ib
}
var maxCount = 0
val accessors = mutableListOf<GltfAccessor>()
val format = vertexFormat {
for ((name, index) in attributes.toSortedMap()) {
val accessor = gltfFile.accessors[index]
maxCount = max(accessor.count, maxCount)
when (name) {
"NORMAL" -> {
normal(3)
accessors.add(accessor)
}
"POSITION" -> {
position(3)
accessors.add(accessor)
}
"TANGENT" -> {
attribute("tangent", VertexElementType.VECTOR4_FLOAT32)
accessors.add(accessor)
}
"TEXCOORD_0" -> {
val dimensions = when (accessor.type) {
"SCALAR" -> 1
"VEC2" -> 2
"VEC3" -> 3
else -> error("unsupported texture coordinate type ${accessor.type}")
}
textureCoordinate(dimensions, 0)
accessors.add(accessor)
}
"JOINTS_0" -> {
val type = when (Pair(accessor.type, accessor.componentType)) {
Pair("VEC4", GLTF_UNSIGNED_BYTE) -> VertexElementType.VECTOR4_UINT8
Pair("VEC4", GLTF_UNSIGNED_SHORT) -> VertexElementType.VECTOR4_UINT16
else -> error("not supported ${accessor.type} / ${accessor.componentType}")
}
attribute("joints", type)
accessors.add(accessor)
}
"WEIGHTS_0" -> {
val type = when (Pair(accessor.type, accessor.componentType)) {
Pair("VEC4", GLTF_FLOAT) -> VertexElementType.VECTOR4_FLOAT32
else -> error("not supported ${accessor.type} / ${accessor.componentType}")
}
attribute("weights", type)
accessors.add(accessor)
}
}
}
}
val buffers =
accessors.map { it.bufferView }
.distinct()
.associate {
Pair(gltfFile.bufferViews[it].buffer,
gltfFile.buffers[gltfFile.bufferViews[it].buffer].contents(gltfFile))
}
val vb = vertexBuffer(format, maxCount)
vb.put {
for (i in 0 until maxCount) {
for (a in accessors) {
val bufferView = gltfFile.bufferViews[a.bufferView]
val buffer = buffers[bufferView.buffer] ?: error("no buffer ${bufferView.buffer}")
val componentSize = when (a.componentType) {
GLTF_BYTE, GLTF_UNSIGNED_BYTE -> 1
GLTF_SHORT, GLTF_UNSIGNED_SHORT -> 2
GLTF_FLOAT, GLTF_UNSIGNED_INT, GLTF_INT -> 4
else -> error("unsupported type")
}
val componentCount = when (a.type) {
"SCALAR" -> 1
"VEC2" -> 2
"VEC3" -> 3
"VEC4" -> 4
"MAT2" -> 4
"MAT3" -> 9
"MAT4" -> 16
else -> error("unsupported type")
}
val size = componentCount * componentSize
val offset = (bufferView.byteOffset ?: 0) + a.byteOffset + i * (bufferView.byteStride ?: size)
copyBuffer(buffer, offset, size)
}
}
}
val drawPrimitive = when (mode) {
null, 4 -> DrawPrimitive.TRIANGLES
5 -> DrawPrimitive.TRIANGLE_STRIP
else -> error("unsupported mode $mode")
}
return GltfDrawCommand(vb, indexBuffer, drawPrimitive, indexBuffer?.indexCount ?: maxCount)
}
}
data class GltfMesh(val primitives: List<GltfPrimitive>, val name: String) {
fun createDrawCommands(gltfFile: GltfFile): List<GltfDrawCommand> {
return primitives.map { it.createDrawCommand(gltfFile) }
}
}
data class GltfPbrMetallicRoughness(val baseColorFactor: DoubleArray?,
val baseColorTexture: GltfMaterialTexture?,
var metallicRoughnessTexture: GltfMaterialTexture?,
val roughnessFactor: Double?,
val metallicFactor: Double?)
data class GltfMaterialTexture(val index: Int, val scale: Double?, val texCoord: Int?)
data class GltfImage(val uri: String?, val bufferView: Int?)
data class GltfSampler(val magFilter: Int, val minFilter: Int, val wrapS: Int, val wrapT: Int)
data class GltfTexture(val sampler: Int, val source: Int)
data class GltfMaterial(val name: String,
val alphaMode: String?,
val doubleSided: Boolean?,
val normalTexture: GltfMaterialTexture?,
val occlusionTexture: GltfMaterialTexture?,
val emissiveTexture: GltfMaterialTexture?,
val emissiveFactor: DoubleArray?,
val pbrMetallicRoughness: GltfPbrMetallicRoughness?,
val extensions: GltfMaterialExtensions?
)
data class GltfMaterialExtensions(
val KHR_materials_pbrSpecularGlossiness: KhrMaterialsPbrSpecularGlossiness?
)
class KhrMaterialsPbrSpecularGlossiness(val diffuseFactor: DoubleArray?, val diffuseTexture: GltfMaterialTexture?)
data class GltfBufferView(val buffer: Int,
val byteOffset: Int?,
val byteLength: Int,
val byteStride: Int?,
val target: Int)
data class GltfBuffer(val byteLength: Int, val uri: String?) {
fun contents(gltfFile: GltfFile): ByteBuffer = if (uri != null) {
if (uri.startsWith("data:")) {
val base64 = uri.substring(uri.indexOf(",") + 1)
val decoded = Base64.getDecoder().decode(base64)
val buffer = ByteBuffer.allocateDirect(decoded.size)
buffer.order(ByteOrder.nativeOrder())
buffer.put(decoded)
buffer.rewind()
buffer
} else {
val raf = RandomAccessFile(File(gltfFile.file.parentFile, uri), "r")
val buffer = ByteBuffer.allocateDirect(byteLength)
buffer.order(ByteOrder.nativeOrder())
buffer.rewind()
raf.channel.read(buffer)
buffer.rewind()
buffer
}
} else {
gltfFile.bufferBuffer ?: error("no embedded buffer from glb")
}
}
data class GltfDrawCommand(val vertexBuffer: VertexBuffer, val indexBuffer: IndexBuffer?, val primitive: DrawPrimitive, var vertexCount: Int)
data class GltfAccessor(
val bufferView: Int,
val byteOffset: Int,
val componentType: Int,
val count: Int,
val max: DoubleArray,
val min: DoubleArray,
val type: String
)
data class GltfAnimation(val name: String?, val channels: List<GltfChannel>, val samplers: List<GltfAnimationSampler>)
data class GltfAnimationSampler(val input: Int, val interpolation: String, val output: Int)
data class GltfChannelTarget(val node: Int?, val path: String?)
data class GltfChannel(val sampler: Int, val target: GltfChannelTarget)
data class GltfSkin(val inverseBindMatrices: Int, val joints: IntArray, val skeleton: Int)
data class KHRLightsPunctualLight(val color: DoubleArray?, val type: String, val intensity: Double?, val range: Double, val spot: KHRLightsPunctualLightSpot?)
data class KHRLightsPunctualLightSpot(val innerConeAngle: Double?, val outerConeAngle: Double?)
data class KHRLightsPunctual(val lights: List<KHRLightsPunctualLight>)
data class GltfExtensions(val KHR_lights_punctual: KHRLightsPunctual?)
data class GltfCameraPerspective(val aspectRatio: Double?, val yfov: Double, val zfar: Double?, val znear: Double)
data class GltfCameraOrthographic(val xmag: Double, val ymag: Double, val zfar: Double, val znear: Double)
data class GltfCamera(val name: String?, val type: String, val perspective: GltfCameraPerspective?, val orthographic: GltfCameraOrthographic?)
class GltfFile(
val asset: GltfAsset?,
val scene: Int?,
val scenes: List<GltfScene>,
val nodes: List<GltfNode>,
val meshes: List<GltfMesh>,
val accessors: List<GltfAccessor>,
val materials: List<GltfMaterial>,
val bufferViews: List<GltfBufferView>,
val buffers: List<GltfBuffer>,
val images: List<GltfImage>?,
val textures: List<GltfTexture>?,
val samplers: List<GltfSampler>?,
val animations: List<GltfAnimation>?,
val skins: List<GltfSkin>?,
val extensions: GltfExtensions?,
val cameras: List<GltfCamera>?
) {
@Transient
lateinit var file: File
@Transient
var bufferBuffer: ByteBuffer? = null
}
fun loadGltfFromFile(file: File): GltfFile = when (file.extension) {
"gltf" -> {
val gson = Gson()
val json = file.readText()
gson.fromJson(json, GltfFile::class.java).apply {
this.file = file
}
}
"glb" -> {
loadGltfFromGlbFile(file)
}
else -> error("extension ${file.extension} not supported in ${file}")
}

View File

@@ -0,0 +1,439 @@
package org.openrndr.extra.dnk3.gltf
import org.openrndr.color.ColorRGBa
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.keyframer.KeyframerChannelQuaternion
import org.openrndr.extra.keyframer.KeyframerChannelVector3
import org.openrndr.math.Matrix44
import org.openrndr.math.Quaternion
import org.openrndr.math.Vector3
import org.openrndr.math.transforms.transform
import java.io.File
import java.nio.Buffer
import java.nio.ByteOrder
import kotlin.reflect.KMutableProperty0
class SceneAnimation(var channels: List<AnimationChannel>) {
val duration: Double
get() {
return channels.maxByOrNull { it.duration }?.duration ?: 0.0
}
fun applyToTargets(input: Double) {
for (channel in channels) {
channel.applyToTarget(input)
}
}
}
sealed class AnimationChannel {
abstract val duration: Double
abstract fun applyToTarget(input: Double)
}
class QuaternionChannel(
val target: KMutableProperty0<Quaternion>,
val keyframer: KeyframerChannelQuaternion
) : AnimationChannel() {
override fun applyToTarget(input: Double) {
target.set(keyframer.value(input) ?: Quaternion.IDENTITY)
}
override val duration: Double
get() = keyframer.duration()
}
class Vector3Channel(
val target: KMutableProperty0<Vector3>,
val keyframer: KeyframerChannelVector3, val default: Vector3
) : AnimationChannel() {
override fun applyToTarget(input: Double) {
target.set(keyframer.value(input) ?: default)
}
override val duration: Double
get() = keyframer.duration()
}
class GltfSceneNode : SceneNode() {
var translation = Vector3.ZERO
var scale = Vector3.ONE
var rotation = Quaternion.IDENTITY
override fun toString(): String {
return "translation: $translation, scale: $scale, rotation: $rotation, children: ${children.size}, entities: ${entities} "
}
override var transform: Matrix44 = Matrix44.IDENTITY
get() = transform {
translate(translation)
multiply(rotation.matrix.matrix44)
scale(scale)
} * field
}
class GltfSceneData(val scenes: List<List<SceneNode>>, val animations: List<SceneAnimation>)
/** Tools to convert GltfFile into a DNK3 scene */
fun GltfFile.buildSceneNodes(): GltfSceneData {
val sceneImages = mutableMapOf<GltfImage, ColorBuffer>()
fun GltfImage.createSceneImage(): ColorBuffer {
return sceneImages.getOrPut(this) {
if (uri == null) {
bufferView?.let { bv ->
val localBufferView = bufferViews[bv]
val localBuffer = buffers[localBufferView.buffer].contents(this@buildSceneNodes)
require(localBufferView.byteOffset != null)
require(localBufferView.byteLength != null)
localBuffer.position(localBufferView.byteOffset)
localBuffer.limit(localBufferView.byteOffset + localBufferView.byteLength)
val cb = ColorBuffer.fromBuffer(localBuffer, null)
cb.generateMipmaps()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.anisotropy = 100.0
localBuffer.limit(localBuffer.capacity())
cb
} ?: error("no uri and no bufferview")
} else {
if (uri.startsWith("data:")) {
loadImage(uri)
} else {
loadImage(File(file.parent, uri))
}
}
}
}
val sceneMaterials = mutableMapOf<GltfMaterial, Material>()
fun GltfMaterial.createSceneMaterial(): Material = sceneMaterials.getOrPut(this) {
val material = PBRMaterial()
material.name = this.name
material.doubleSided = this.doubleSided ?: false
material.transparent = this.alphaMode != null
pbrMetallicRoughness?.let { pbr ->
material.roughness = pbr.roughnessFactor ?: 1.0
material.metalness = pbr.metallicFactor ?: 1.0
material.color = ColorRGBa.WHITE
pbr.baseColorFactor?.let {
material.color = ColorRGBa(it[0], it[1], it[2], it[3])
}
pbr.baseColorTexture?.let { texture ->
val cb = images!![textures!![texture.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.COLOR
)
material.textures.add(sceneTexture)
}
pbr.metallicRoughnessTexture?.let { texture ->
val cb = images!![textures!![texture.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.METALNESS_ROUGHNESS
)
material.textures.add(sceneTexture)
}
}
occlusionTexture?.let { texture ->
val cb = images!![textures!![texture.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.AMBIENT_OCCLUSION
)
material.textures.add(sceneTexture)
}
normalTexture?.let { texture ->
val cb = images!![textures!![texture.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(
texture = cb,
tangentInput = "va_tangent",
pre = "x_texCoord.y = 1.0-x_texCoord.y;"
), TextureTarget.NORMAL
)
material.textures.add(sceneTexture)
}
emissiveFactor?.let {
material.emission = ColorRGBa(it[0], it[1], it[2])
}
emissiveTexture?.let {
val cb = images!![textures!![it.index].source].createSceneImage()
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.EMISSION
)
material.textures.add(sceneTexture)
}
extensions?.let { ext ->
ext.KHR_materials_pbrSpecularGlossiness?.let { sg ->
sg.diffuseFactor?.let {
material.color = ColorRGBa(it[0], it[1], it[2], it[3])
}
sg.diffuseTexture?.let {
val cb = images!![textures!![it.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.COLOR
)
material.textures.add(sceneTexture)
}
occlusionTexture?.let { texture ->
val cb = images!![textures!![texture.index].source].createSceneImage()
cb.filter(MinifyingFilter.LINEAR_MIPMAP_LINEAR, MagnifyingFilter.LINEAR)
cb.wrapU = WrapMode.REPEAT
cb.wrapV = WrapMode.REPEAT
val sceneTexture = Texture(
ModelCoordinates(texture = cb, pre = "x_texCoord.y = 1.0-x_texCoord.y;"),
TextureTarget.AMBIENT_OCCLUSION
)
material.textures.add(sceneTexture)
}
}
}
emissiveFactor?.let {
material.emission = ColorRGBa(it[0], it[1], it[2], 1.0)
}
material
}
fun GltfPrimitive.createScenePrimitive(): MeshPrimitive {
val drawCommand = createDrawCommand(this@buildSceneNodes)
val geometry = Geometry(
listOf(drawCommand.vertexBuffer),
drawCommand.indexBuffer,
drawCommand.primitive,
0,
drawCommand.vertexCount
)
val material = materials.getOrNull(material)?.createSceneMaterial() ?: PBRMaterial()
return MeshPrimitive(geometry, material)
}
val sceneNodes = mutableMapOf<GltfNode, SceneNode>()
fun GltfNode.createSceneNode(): SceneNode = sceneNodes.getOrPut(this) {
val node = GltfSceneNode()
node.name = name ?: "no name"
node.translation = translation?.let { Vector3(it[0], it[1], it[2]) } ?: Vector3.ZERO
node.scale = scale?.let { Vector3(it[0], it[1], it[2]) } ?: Vector3.ONE
node.rotation = rotation?.let { Quaternion(it[0], it[1], it[2], it[3]) } ?: Quaternion.IDENTITY
matrix?.let {
node.transform = Matrix44.fromDoubleArray(it).transposed
}
for (child in children.orEmpty) {
val childNode = nodes.getOrNull(child) ?: error("child node not found: $child")
node.children.add(childNode.createSceneNode())
}
node
}
val sceneMeshes = mutableMapOf<GltfMesh, MeshBase>()
fun GltfMesh.createSceneMesh(skin: GltfSkin?): MeshBase = sceneMeshes.getOrPut(this) {
if (skin == null) {
Mesh(primitives.map {
it.createScenePrimitive()
})
} else {
val joints = skin.joints.map { nodes[it].createSceneNode() }
val skeleton = nodes[skin.skeleton].createSceneNode()
val ibmAccessor = accessors[skin.inverseBindMatrices]
val ibmBufferView = bufferViews[ibmAccessor.bufferView]
val ibmBuffer = buffers[ibmBufferView.buffer]
val ibmData = ibmBuffer.contents(this@buildSceneNodes)
ibmData.order(ByteOrder.nativeOrder())
(ibmData as Buffer).position(ibmAccessor.byteOffset + (ibmBufferView.byteOffset ?: 0))
require(ibmAccessor.type == "MAT4")
require(ibmAccessor.componentType == GLTF_FLOAT)
require(ibmAccessor.count == joints.size)
val ibms = (0 until ibmAccessor.count).map {
val array = DoubleArray(16)
for (i in 0 until 16) {
array[i] = ibmData.float.toDouble()
}
Matrix44.fromDoubleArray(array).transposed
}
SkinnedMesh(primitives.map {
it.createScenePrimitive()
}, joints, skeleton, ibms)
}
}
fun GltfCamera.createSceneCamera(sceneNode: SceneNode): Camera {
return when (type) {
"perspective" -> {
PerspectiveCamera(sceneNode).apply {
aspectRatio = perspective?.aspectRatio ?: aspectRatio
far = perspective?.zfar ?: far
near = perspective?.znear ?: near
fov = perspective?.yfov?.let { Math.toDegrees(it) } ?: fov
}
}
"orthographic" -> {
OrthographicCamera(sceneNode).apply {
xMag = orthographic?.xmag ?: xMag
yMag = orthographic?.ymag ?: yMag
near = orthographic?.znear ?: near
far = orthographic?.zfar ?: far
}
}
else -> error("unsupported camera type: $type")
}
}
val scenes = scenes.map { scene ->
scene.nodes.map { node ->
val gltfNode = nodes.getOrNull(node) ?: error("node not found: $node")
require(gltfNode != null)
val sceneNode = gltfNode.createSceneNode()
sceneNode
}
}
for ((gltfNode, sceneNode) in sceneNodes) {
gltfNode.mesh?.let {
val skin = gltfNode.skin?.let { (skins!!)[it] }
sceneNode.entities.add(meshes[it].createSceneMesh(skin))
}
gltfNode.camera?.let {
sceneNode.entities.add(cameras!![it].createSceneCamera(sceneNode))
}
gltfNode.extensions?.let { exts ->
exts.KHR_lights_punctual?.let { lightIndex ->
extensions?.KHR_lights_punctual?.lights?.get(lightIndex.light)?.let { light ->
val sceneLight = when (light.type) {
"point" -> {
PointLight()
}
"directional" -> {
DirectionalLight().apply {
shadows = Shadows.PCF()
}
}
"spot" -> {
SpotLight().apply {
innerAngle = Math.toDegrees(light.spot!!.innerConeAngle ?: 0.0)
outerAngle = Math.toDegrees(light.spot.outerConeAngle ?: Math.PI / 4.0)
shadows = Shadows.PCF()
}
}
else -> error("unsupported light type ${light.type}")
}
sceneLight.apply {
val lightColor = (light.color ?: doubleArrayOf(1.0, 1.0, 1.0))
color = ColorRGBa(lightColor[0], lightColor[1], lightColor[2])
}
sceneNode.entities.add(sceneLight)
}
}
}
}
val sceneAnimations = animations?.map { animation ->
val animationChannels = animation.channels.mapNotNull { channel ->
val candidate = channel.target.node?.let { nodes[it] }?.createSceneNode() as? GltfSceneNode
candidate?.let { sceneNode ->
val sampler = animation.samplers[channel.sampler]
val inputAccessor = accessors[sampler.input]
val inputBufferView = bufferViews[inputAccessor.bufferView]
val inputData = buffers[inputBufferView.buffer].contents(this)
val outputAccessor = accessors[sampler.output]
val outputBufferView = bufferViews[outputAccessor.bufferView]
val outputData = buffers[outputBufferView.buffer].contents(this)
inputData.order(ByteOrder.nativeOrder())
outputData.order(ByteOrder.nativeOrder())
require(inputAccessor.count == outputAccessor.count)
when (channel.target.path) {
"scale", "translation" -> {
require(inputAccessor.type == "SCALAR")
require(outputAccessor.type == "VEC3")
val keyframer = KeyframerChannelVector3()
val inputOffset = (inputBufferView.byteOffset ?: 0) + (inputAccessor.byteOffset ?: 0)
val outputOffset = (outputBufferView.byteOffset ?: 0) + (outputAccessor.byteOffset ?: 0)
val inputStride = (inputBufferView.byteStride ?: 4)
val outputStride = (outputBufferView.byteStride ?: 12)
inputData.limit(inputData.capacity())
for (i in 0 until outputAccessor.count) {
val input = inputData.getFloat(inputOffset + i * inputStride).toDouble()
val outputX = outputData.getFloat(outputOffset + i * outputStride).toDouble()
val outputY = outputData.getFloat(outputOffset + i * outputStride + 4).toDouble()
val outputZ = outputData.getFloat(outputOffset + i * outputStride + 8).toDouble()
keyframer.add(input, Vector3(outputX, outputY, outputZ))
}
val target =
if (channel.target.path == "translation") sceneNode::translation else sceneNode::scale
val default = if (channel.target.path == "translation") Vector3.ZERO else Vector3.ONE
Vector3Channel(target, keyframer, default)
}
"rotation" -> {
require(inputAccessor.type == "SCALAR")
require(outputAccessor.type == "VEC4") {
"${outputAccessor.type}"
}
val keyframer = KeyframerChannelQuaternion()
val inputOffset = (inputBufferView.byteOffset ?: 0) + (inputAccessor.byteOffset ?: 0)
val outputOffset = (outputBufferView.byteOffset ?: 0) + (outputAccessor.byteOffset ?: 0)
val inputStride = (inputBufferView.byteStride ?: 4)
val outputStride = (outputBufferView.byteStride ?: 16)
for (i in 0 until outputAccessor.count) {
val input = inputData.getFloat(inputOffset + i * inputStride).toDouble()
val outputX = outputData.getFloat(outputOffset + i * outputStride).toDouble()
val outputY = outputData.getFloat(outputOffset + i * outputStride + 4).toDouble()
val outputZ = outputData.getFloat(outputOffset + i * outputStride + 8).toDouble()
val outputW = outputData.getFloat(outputOffset + i * outputStride + 12).toDouble()
keyframer.add(input, Quaternion(outputX, outputY, outputZ, outputW))
}
QuaternionChannel(sceneNode::rotation, keyframer)
}
else -> error("unsupported path ${channel.target.path}")
}
}
}
SceneAnimation(animationChannels)
}
return GltfSceneData(scenes, sceneAnimations.orEmpty())
}
private val IntArray?.orEmpty: IntArray get() = this ?: IntArray(0)

View File

@@ -0,0 +1,71 @@
package org.openrndr.extra.dnk3.materials
import org.openrndr.draw.ShadeStyle
import org.openrndr.draw.shadeStyle
import org.openrndr.extra.dnk3.Material
import org.openrndr.extra.dnk3.MaterialContext
import org.openrndr.extra.dnk3.PrimitiveContext
import org.openrndr.extra.dnk3.cubemap.glslEvaluateSH
import org.openrndr.extra.dnk3.cubemap.glslFetchSH
import org.openrndr.extra.dnk3.cubemap.genGlslGatherSH
class IrradianceDebugMaterial : Material {
override val name: String? = null
override var doubleSided: Boolean = false
override var transparent: Boolean = false
override val fragmentID: Int = 0
override fun generateShadeStyle(context: MaterialContext, primitiveContext: PrimitiveContext): ShadeStyle {
return shadeStyle {
fragmentPreamble = """
$glslEvaluateSH
$glslFetchSH
${genGlslGatherSH(context.irradianceSH!!.xCount, context.irradianceSH!!.yCount, context.irradianceSH!!.zCount, context.irradianceSH!!.spacing, context.irradianceSH!!.offset)}
vec3 f_emission = vec3(0.0);
"""
if (context.irradianceSH != null) {
fragmentTransform = """
vec3[9] sh;
gatherSH(p_shMap, v_worldPosition, sh);
x_fill.rgb = evaluateSH(normalize(v_worldNormal), sh);
""".trimIndent()
} else {
fragmentTransform = """
discard;
"""
}
}
}
override fun applyToShadeStyle(context: MaterialContext, shadeStyle: ShadeStyle) {
context.irradianceSH?.shMap?.let {
shadeStyle.parameter("shMap", it)
}
}
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (other !is IrradianceDebugMaterial) return false
if (name != other.name) return false
if (doubleSided != other.doubleSided) return false
if (transparent != other.transparent) return false
if (fragmentID != other.fragmentID) return false
return true
}
override fun hashCode(): Int {
var result = name?.hashCode() ?: 0
result = 31 * result + doubleSided.hashCode()
result = 31 * result + transparent.hashCode()
result = 31 * result + fragmentID
return result
}
}

View File

@@ -0,0 +1,38 @@
package org.openrndr.extra.dnk3.post
import org.openrndr.draw.Filter
import org.openrndr.draw.Shader
import org.openrndr.draw.filterShaderFromUrl
import org.openrndr.math.Matrix44
import org.openrndr.resourceUrl
class ScreenspaceReflections : Filter(preprocessedFilterShaderFromUrl(resourceUrl("/shaders/screenspace-reflections.frag"))) {
var projection: Matrix44 by parameters
var projectionMatrixInverse: Matrix44 by parameters
var colors: Int by parameters
var projDepth: Int by parameters
var normals: Int by parameters
var jitterOriginGain: Double by parameters
var iterationLimit: Int by parameters
var distanceLimit: Double by parameters
var gain: Double by parameters
var borderWidth: Double by parameters
init {
colors = 0
projDepth = 1
normals = 2
projection = Matrix44.IDENTITY
projectionMatrixInverse = Matrix44.IDENTITY
distanceLimit = 100.0
iterationLimit = 128
jitterOriginGain = 0.0
gain = 1.0
borderWidth = 130.0
}
}

View File

@@ -0,0 +1,8 @@
package org.openrndr.extra.dnk3.post
import org.openrndr.draw.Filter
import org.openrndr.draw.filterShaderFromUrl
import org.openrndr.resourceUrl
class SegmentContoursMSAA8 : Filter(filterShaderFromUrl(resourceUrl("/shaders/segment-contours-msaa-8.frag")))
class SegmentContours : Filter(filterShaderFromUrl(resourceUrl("/shaders/segment-contours.frag")))

View File

@@ -0,0 +1,45 @@
package org.openrndr.extra.dnk3.post
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.features.IrradianceSH
import org.openrndr.extra.shaderphrases.preprocessShader
import org.openrndr.math.IntVector3
import org.openrndr.math.Matrix44
import org.openrndr.resourceUrl
import java.net.URL
fun preprocessedFilterShaderFromUrl(url: String): Shader {
return filterShaderFromCode( preprocessShader(URL(url).readText()), "filter-shader: $url")
}
fun preprocessedFilterShaderFromCode(fragmentShaderCode: String, name: String): Shader {
return Shader.createFromCode(vsCode = Filter.filterVertexCode, fsCode = fragmentShaderCode, name = name)
}
class VolumetricIrradiance : Filter(preprocessedFilterShaderFromUrl(resourceUrl("/shaders/volumetric-irradiance.frag"))) {
var stepLength: Double by parameters
var irradianceSH: IrradianceSH? = null
var viewMatrixInverse: Matrix44 by parameters
var projectionMatrixInverse: Matrix44 by parameters
init {
stepLength = 0.1
viewMatrixInverse = Matrix44.IDENTITY
projectionMatrixInverse = Matrix44.IDENTITY
}
override fun apply(source: Array<ColorBuffer>, target: Array<ColorBuffer>) {
irradianceSH?.shMap?.let {
parameters["shMap"] = it
}
irradianceSH?.let {
parameters["shMapDimensions"] = IntVector3(it.xCount, it.yCount, it.zCount)
parameters["shMapOffset"] = it.offset
parameters["shMapSpacing"] = it.spacing
}
super.apply(source, target)
}
}

View File

@@ -0,0 +1,56 @@
package org.openrndr.extra.dnk3.query
import org.openrndr.extra.dnk3.Material
import org.openrndr.extra.dnk3.Mesh
import org.openrndr.extra.dnk3.Scene
import org.openrndr.extra.dnk3.SceneNode
fun Scene.findNodeByName(name: String): SceneNode? {
return root.findNodeByName(name)
}
fun SceneNode.findNodeByName(name: String): SceneNode? {
if (this.name == name) {
return this
} else {
for (child in children) {
val candidate = child.findNodeByName(name)
if (candidate != null) {
return candidate
}
}
}
return null
}
fun SceneNode.findMaterialByName(name: String): Material? {
return allMaterials().find { it.name == name }
}
fun Scene.allMaterials(): Set<Material> {
return root.allMaterials()
}
fun SceneNode.allMaterials(): Set<Material> {
val materials = mutableSetOf<Material>()
fun processNode(node: SceneNode) {
for (entity in node.entities) {
when (entity) {
is Mesh -> {
materials.addAll(entity.primitives.map { it.material })
}
else -> {
}
}
}
for (child in node.children) {
processNode(child)
}
}
processNode(this)
return materials
}

View File

@@ -0,0 +1,8 @@
package org.openrndr.extra.dnk3.renderers
import org.openrndr.extra.dnk3.SceneRenderer
fun dryRenderer() : SceneRenderer {
val sr = SceneRenderer()
return sr
}

View File

@@ -0,0 +1,22 @@
package org.openrndr.extra.dnk3.renderers
import org.openrndr.draw.BufferMultisample
import org.openrndr.draw.ColorFormat
import org.openrndr.draw.ColorType
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.post.SegmentContours
import org.openrndr.extra.dnk3.post.SegmentContoursMSAA8
fun postRenderer(multisample: BufferMultisample = BufferMultisample.Disabled): SceneRenderer {
val sr = SceneRenderer()
sr.outputPasses.clear()
sr.outputPasses.add(
RenderPass(
listOf(HDRColorFacet(),FragmentIDFacet(), ClipDepthFacet(), ViewNormalFacet()),
multisample = multisample
)
)
sr.drawFinalBuffer = true
return sr
}

View File

@@ -0,0 +1,34 @@
package org.openrndr.extra.dnk3.renderers
import org.openrndr.draw.BufferMultisample
import org.openrndr.draw.ColorFormat
import org.openrndr.draw.ColorType
import org.openrndr.extra.dnk3.*
import org.openrndr.extra.dnk3.post.SegmentContours
import org.openrndr.extra.dnk3.post.SegmentContoursMSAA8
fun segmentContourRenderer(multisample: BufferMultisample = BufferMultisample.Disabled): SceneRenderer {
val sr = SceneRenderer()
sr.outputPasses.clear()
sr.outputPasses.add(
RenderPass(
listOf(LDRColorFacet(),FragmentIDFacet()),
multisample = multisample
)
)
sr.postSteps.add(
FilterPostStep(1.0,
when (multisample) {
BufferMultisample.Disabled -> SegmentContours()
BufferMultisample.SampleCount(8) -> SegmentContoursMSAA8()
else -> error("unsupported multisampling mode $multisample")
},
listOf("fragmentID"),
"segments",
ColorFormat.RGB,
ColorType.UINT8
)
)
sr.drawFinalBuffer = true
return sr
}

View File

@@ -0,0 +1,98 @@
package org.openrndr.extra.dnk3.tools
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.Geometry
import org.openrndr.extra.dnk3.Mesh
import org.openrndr.extra.dnk3.MeshPrimitive
import org.openrndr.extra.dnk3.PBRMaterial
import java.nio.ByteBuffer
import java.nio.ByteOrder
private data class CollapseItem(val vertexFormats: List<VertexFormat>,
val drawPrimitive: DrawPrimitive,
val hasIndexBuffer: Boolean)
fun Mesh.collapse() {
val grouped = primitives.groupBy {
CollapseItem(it.geometry.vertexBuffers.map { it.vertexFormat }, it.geometry.primitive, it.geometry.indexBuffer != null)
}
grouped.map {
val vertexCount = it.value.sumBy { primitive ->
primitive.geometry.vertexCount
}
val indexCount = if (it.key.hasIndexBuffer)
it.value.sumBy { primitive ->
primitive.geometry.indexBuffer?.indexCount ?: 0
}
else 0
val collapsedVertices = it.key.vertexFormats.map {
vertexBuffer(it, vertexCount)
} + vertexBuffer(vertexFormat { attribute("fragmentID", VertexElementType.INT16) }, vertexCount)
val fragmentBuffer = ByteBuffer.allocateDirect(vertexCount * 2)
fragmentBuffer.order(ByteOrder.nativeOrder())
for (i in 0 until collapsedVertices.size) {
var offset = 0
for (fromPrimitive in it.value) {
val fromBuffer = fromPrimitive.geometry.vertexBuffers[i]
val copy = ByteBuffer.allocateDirect(fromBuffer.vertexCount * fromBuffer.vertexFormat.size)
copy.order(ByteOrder.nativeOrder())
fromBuffer.read(copy)
copy.rewind()
collapsedVertices[i].write(copy, offset)
offset += copy.capacity()
for (v in 0 until fromBuffer.vertexCount) {
fragmentBuffer.putShort(fromPrimitive.material.fragmentID.toShort())
}
}
}
val collapsedIndices = if (it.key.hasIndexBuffer) indexBuffer(indexCount, IndexType.INT32) else null
if (it.key.hasIndexBuffer) {
var offset = 0
val result = ByteBuffer.allocateDirect(4 * indexCount)
result.order(ByteOrder.nativeOrder())
for (fromPrimitive in it.value) {
val fromBuffer = fromPrimitive.geometry.indexBuffer!!
when (fromBuffer.type) {
IndexType.INT16 -> {
val copy = ByteBuffer.allocateDirect(fromBuffer.indexCount * 2)
fromBuffer.read(copy)
copy.rewind()
for (i in 0 until fromBuffer.indexCount) {
val index = (copy.getShort().toInt() and 0xffff) + offset
result.putInt(index)
}
}
IndexType.INT32 -> {
val copy = ByteBuffer.allocateDirect(fromBuffer.indexCount * 4)
fromBuffer.read(copy)
copy.rewind()
for (i in 0 until fromBuffer.indexCount) {
val index = copy.getInt() + offset
result.putInt(index)
}
}
}
offset += fromPrimitive.geometry.vertexCount
}
}
val collapsedGeometry = Geometry(collapsedVertices, collapsedIndices, it.key.drawPrimitive, 0, if (collapsedIndices == null)
vertexCount else indexCount
)
MeshPrimitive(collapsedGeometry, PBRMaterial())
}
}

View File

@@ -0,0 +1,84 @@
package org.openrndr.extra.dnk3.tools
import org.openrndr.draw.*
import org.openrndr.extra.dnk3.*
import org.openrndr.extras.meshgenerators.boxMesh
data class SkyboxMaterial(val cubemap: Cubemap, val intensity: Double = 0.0) : Material {
override val name: String = "skybox"
override var doubleSided: Boolean = false
override var transparent: Boolean = false
override val fragmentID: Int = 0
override fun generateShadeStyle(materialContext: MaterialContext, primitiveContext: PrimitiveContext): ShadeStyle {
return shadeStyle {
vertexTransform = """
vec2 i = vec2(1.0, 0.0);
x_viewMatrix = x_viewNormalMatrix;
""".trimIndent()
val combinerFS = materialContext.pass.combiners.map {
it.generateShader()
}.joinToString("\n")
fragmentPreamble = """
vec4 f_diffuse = vec4(0.0, 0.0, 0.0, 1.0);
vec3 f_specular = vec3(0.0);
vec3 f_ambient = vec3(0.0);
vec3 f_emission = vec3(0.0);
int f_fragmentID = 0;
vec4 m_color = vec4(1.0);
vec4 f_fog = vec4(0.0);
""".trimIndent()
fragmentTransform = """
f_diffuse = texture(p_skybox, va_position);
f_diffuse.rgb *= p_intensity;
""" + combinerFS
suppressDefaultOutput = true
val rt = RenderTarget.active
materialContext.pass.combiners.map {
if (rt is ProgramRenderTarget || materialContext.pass === DefaultPass || materialContext.pass === DefaultOpaquePass || materialContext.pass == DefaultTransparentPass || materialContext.pass == IrradianceProbePass) {
this.output(it.targetOutput, ShadeStyleOutput(0))
} else {
val index = rt.colorAttachmentIndexByName(it.targetOutput)
?: error("attachment ${it.targetOutput} not found")
val type = rt.colorBuffer(index).type
val format = rt.colorBuffer(index).format
this.output(it.targetOutput, ShadeStyleOutput(index, format, type))
}
}
}
}
override fun applyToShadeStyle(context: MaterialContext, shadeStyle: ShadeStyle) {
shadeStyle.parameter("skybox", cubemap)
shadeStyle.parameter("intensity", intensity)
}
override fun hashCode(): Int {
var result = intensity.hashCode()
result = 31 * result + name.hashCode()
result = 31 * result + doubleSided.hashCode()
result = 31 * result + transparent.hashCode()
result = 31 * result + fragmentID
return result
}
}
fun Scene.addSkybox(cubemapUrl: String, size: Double = 100.0, intensity: Double = 1.0) {
val cubemap = Cubemap.fromUrl(cubemapUrl, null, Session.active).apply { generateMipmaps() }
val box = boxMesh(size, size, size, 1, 1, 1, true)
val node = SceneNode()
val material = SkyboxMaterial(cubemap, intensity)
val geometry = Geometry(listOf(box), null, DrawPrimitive.TRIANGLES, 0, box.vertexCount)
val primitive = MeshPrimitive(geometry, material)
val mesh = Mesh(listOf(primitive))
node.entities.add(mesh)
root.children.add(node)
}

View File

@@ -0,0 +1,17 @@
vec3 evaluateSH(vec3 direction, vec3[9] _SH) {
const float c1 = 0.42904276540489171563379376569857; // 4 * Â2.Y22 = 1/4 * sqrt(15.PI)
const float c2 = 0.51166335397324424423977581244463; // 0.5 * Â1.Y10 = 1/2 * sqrt(PI/3)
const float c3 = 0.24770795610037568833406429782001; // Â2.Y20 = 1/16 * sqrt(5.PI)
const float c4 = 0.88622692545275801364908374167057; // Â0.Y00 = 1/2 * sqrt(PI)
float x = direction.x;
float y = direction.y;
float z = direction.z;
return max(vec3(0.0),
_SH[8] * (c1 * (x * x - y * y)) // c1.L22.(x²-y²)
+ _SH[6] * (c3 * (3.0 * z * z - 1)) // c3.L20.(3.z² - 1)
+ _SH[0] * c4 // c4.L00
+ (_SH[4] * x * y + _SH[7] * x * z + _SH[5] * y * z) * 2.0 * c1 // 2.c1.(L2-2.xy + L21.xz + L2-1.yz)
+ (_SH[3] * x + _SH[1] * y + _SH[2] * z) * c2 * 2.0); // 2.c2.(L11.x + L1-1.y + L10.z)
}

View File

@@ -0,0 +1,12 @@
void fetchSH(samplerBuffer btex, int probeID, out vec3[9] _SH) {
int offset = probeID * 9;
_SH[0] = texelFetch(btex, offset).rgb;
_SH[1] = texelFetch(btex, offset+1).rgb;
_SH[2] = texelFetch(btex, offset+2).rgb;
_SH[3] = texelFetch(btex, offset+3).rgb;
_SH[4] = texelFetch(btex, offset+4).rgb;
_SH[5] = texelFetch(btex, offset+5).rgb;
_SH[6] = texelFetch(btex, offset+6).rgb;
_SH[7] = texelFetch(btex, offset+7).rgb;
_SH[8] = texelFetch(btex, offset+8).rgb;
}

View File

@@ -0,0 +1,4 @@
void fetchSH0(samplerBuffer btex, int probeID, out vec3 _SH) {
int offset = probeID * 9;
_SH = texelFetch(btex, offset).rgb;
}

View File

@@ -0,0 +1,26 @@
void gatherSH(samplerBuffer btex, vec3 p, ivec3 probeCounts, vec3 offset, float spacing, out vec3[9] blend) {
vec3[9] c000;
vec3[9] c001;
vec3[9] c010;
vec3[9] c011;
vec3[9] c100;
vec3[9] c101;
vec3[9] c110;
vec3[9] c111;
vec3 f;
ivec3 io = gridCoordinates(p, f, probeCounts, offset, spacing);
fetchSH(btex, gridIndex(io + ivec3(0,0,0), probeCounts), c000);
fetchSH(btex, gridIndex(io + ivec3(0,0,1), probeCounts), c001);
fetchSH(btex, gridIndex(io + ivec3(0,1,0), probeCounts), c010);
fetchSH(btex, gridIndex(io + ivec3(0,1,1), probeCounts), c011);
fetchSH(btex, gridIndex(io + ivec3(1,0,0), probeCounts), c100);
fetchSH(btex, gridIndex(io + ivec3(1,0,1), probeCounts), c101);
fetchSH(btex, gridIndex(io + ivec3(1,1,0), probeCounts), c110);
fetchSH(btex, gridIndex(io + ivec3(1,1,1), probeCounts), c111);
for (int i = 0; i < 9; ++i) {
blend[i] = mix( mix( mix(c000[i], c001[i], f.z), mix(c010[i], c011[i], f.z), f.y), mix( mix(c100[i], c101[i], f.z), mix(c110[i], c111[i], f.z), f.y), f.x);
}
}

View File

@@ -0,0 +1,25 @@
void gatherSH0(samplerBuffer btex, vec3 p, ivec3 probeCounts, vec3 offset, float spacing, out vec3 blend) {
vec3 c000;
vec3 c001;
vec3 c010;
vec3 c011;
vec3 c100;
vec3 c101;
vec3 c110;
vec3 c111;
vec3 f;
ivec3 io = gridCoordinates(p, f, probeCounts, offset, spacing);
fetchSH0(btex, gridIndex(io + ivec3(0,0,0), probeCounts), c000);
fetchSH0(btex, gridIndex(io + ivec3(0,0,1), probeCounts), c001);
fetchSH0(btex, gridIndex(io + ivec3(0,1,0), probeCounts), c010);
fetchSH0(btex, gridIndex(io + ivec3(0,1,1), probeCounts), c011);
fetchSH0(btex, gridIndex(io + ivec3(1,0,0), probeCounts), c100);
fetchSH0(btex, gridIndex(io + ivec3(1,0,1), probeCounts), c101);
fetchSH0(btex, gridIndex(io + ivec3(1,1,0), probeCounts), c110);
fetchSH0(btex, gridIndex(io + ivec3(1,1,1), probeCounts), c111);
blend = mix( mix( mix(c000, c001, f.z), mix(c010, c011, f.z), f.y), mix( mix(c100, c101, f.z), mix(c110, c111, f.z), f.y), f.x);
}

View File

@@ -0,0 +1,15 @@
ivec3 gridCoordinates(vec3 p, out vec3 f, ivec3 probeCounts, vec3 offset, float spacing) {
float x = (p.x - offset.x) / spacing;
float y = (p.y - offset.y)/ spacing;
float z = (p.z - offset.z) / spacing;
int ix = int(floor(x)) + probeCounts.x / 2;
int iy = int(floor(y)) + probeCounts.y / 2;
int iz = int(floor(z)) + probeCounts.z / 2;
f.x = fract((x));
f.y = fract((y));
f.z = fract((z));
return ivec3(ix, iy, iz);
}

View File

@@ -0,0 +1,4 @@
int gridIndex(ivec3 p, ivec3 probeCounts) {
ivec3 c = clamp(p, ivec3(0), probeCounts - ivec3(1));
return c.x + c.y * probeCounts.x + c.z * probeCounts.x * probeCounts.y;
}

View File

@@ -0,0 +1,22 @@
#version 330
uniform samplerCube tex0;
uniform vec3 sideUp;
uniform vec3 sideRight;
uniform vec3 sideNormal;
in vec2 v_texCoord0;
out vec4 o_output;
#define PI 3.1415926536
void main() {
vec3 irradiance = vec3(0.0);
vec2 uv = (v_texCoord0 - vec2(0.5))*2.0;
vec3 normal = normalize(uv.x * sideRight + uv.y * sideUp + sideNormal);
o_output.rgb = texture(tex0, normal).rgb;
o_output.a = 1.0;
}

View File

@@ -0,0 +1,40 @@
#version 330
uniform samplerCube tex0;
uniform vec3 sideUp;
uniform vec3 sideRight;
uniform vec3 sideNormal;
in vec2 v_texCoord0;
out vec4 o_output;
#define PI 3.1415926536
void main() {
vec3 irradiance = vec3(0.0);
vec2 uv = (v_texCoord0 - vec2(0.5))*2.0;
vec3 normal = normalize(uv.x * sideRight + uv.y * sideUp + sideNormal);
vec3 up = vec3(0.0, 1.0, 0.0);
vec3 right = cross(up, normal);
up = cross(normal, right);
float sampleDelta = 0.025;
int nrSamples = 0;
for(float phi = 0.0; phi < 2.0 * PI; phi += sampleDelta) {
for(float theta = 0.0; theta < 0.5 * PI; theta += sampleDelta) {
// spherical to cartesian (in tangent space)
vec3 tangentSample = vec3(sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta));
// tangent space to world
vec3 sampleVec = tangentSample.x * right + tangentSample.y * up + tangentSample.z * normal;
irradiance += texture(tex0, sampleVec).rgb * cos(theta) * sin(theta);
nrSamples++;
}
}
irradiance = PI * irradiance * (1.0 / float(nrSamples));
o_output.rgb = irradiance.rgb;
o_output.a = 1.0;
}

View File

@@ -0,0 +1,11 @@
uniform samplerCube tex0;
in v_texCoord0;
uniform vec2 targetSize;
out o_term0;
out o_term1;
out o_term2;
void main() {
}

View File

@@ -0,0 +1,349 @@
#version 330
// --- varyings ---
in vec2 v_texCoord0;
// --- G buffer ---
uniform sampler2D colors;
uniform sampler2D projDepth;
uniform sampler2D normals;
// --- transforms ---
uniform mat4 projection;
uniform mat4 projectionMatrixInverse;
// --- output ---
layout(location = 0) out vec4 o_color;
// --- parameters ---
uniform float jitterOriginGain;
uniform int iterationLimit;
uniform float distanceLimit;
uniform float gain;
uniform float borderWidth;
float distanceSquared(vec2 a, vec2 b) {
vec2 d = b-a;
return dot(d,d);
}
#pragma import org.openrndr.extra.shaderphrases.phrases.Depth.projectionToViewCoordinate;
#pragma import org.openrndr.extra.shaderphrases.phrases.Depth.projectionToViewDepth;
#pragma import org.openrndr.extra.noise.phrases.NoisePhrasesKt.phraseHash22;
// this is from http://casual-effects.blogspot.nl/2014/08/screen-space-ray-tracing.html
void swap(inout float a, inout float b) {
float temp = a;
a = b;
b = temp;
}
bool traceScreenSpaceRay1
(vec3 csOrigin,
vec3 csDirection,
mat4x4 projectToPixelMatrix,
sampler2D csZBuffer,
vec2 csZBufferSize,
float csZThickness,
float nearPlaneZ,
float stride,
float jitterFraction,
float maxSteps,
in float maxRayTraceDistance,
out vec2 hitPixel,
out vec3 csHitPoint,
out vec3 csHitNormal
// ,out vec3 debugColor
) {
vec3 debugColor = vec3(0);
// Clip ray to a near plane in 3D (doesn't have to be *the* near plane, although that would be a good idea)
float rayLength = ((csOrigin.z + csDirection.z * maxRayTraceDistance) > nearPlaneZ) ?
(nearPlaneZ - csOrigin.z) / csDirection.z :
maxRayTraceDistance;
vec3 csEndPoint = csDirection * rayLength + csOrigin;
// Project into screen space
vec4 H0 = projectToPixelMatrix * vec4(csOrigin, 1.0);
vec4 H1 = projectToPixelMatrix * vec4(csEndPoint, 1.0);
// There are a lot of divisions by w that can be turned into multiplications
// at some minor precision loss...and we need to interpolate these 1/w values
// anyway.
//
// Because the caller was required to clip to the near plane,
// this homogeneous division (projecting from 4D to 2D) is guaranteed
// to succeed.
float k0 = 1.0 / H0.w;
float k1 = 1.0 / H1.w;
// Switch the original points to values that interpolate linearly in 2D
vec3 Q0 = csOrigin * k0;
vec3 Q1 = csEndPoint * k1;
// Screen-space endpoints
vec2 P0 = H0.xy * k0;
vec2 P1 = H1.xy * k1;
// [Optional clipping to frustum sides here]
// Initialize to off screen
hitPixel = vec2(-1.0, -1.0);
// If the line is degenerate, make it cover at least one pixel
// to avoid handling zero-pixel extent as a special case later
P1 += vec2((distanceSquared(P0, P1) < 0.0001) ? 0.01 : 0.0);
vec2 delta = P1 - P0;
// Permute so that the primary iteration is in x to reduce
// large branches later
bool permute = (abs(delta.x) < abs(delta.y));
if (permute) {
// More-vertical line. Create a permutation that swaps x and y in the output
// by directly swizzling the inputs.
delta = delta.yx;
P1 = P1.yx;
P0 = P0.yx;
}
// From now on, "x" is the primary iteration direction and "y" is the secondary one
float stepDirection = sign(delta.x);
float invdx = stepDirection / delta.x;
vec2 dP = vec2(stepDirection, invdx * delta.y);
// Track the derivatives of Q and k
vec3 dQ = (Q1 - Q0) * invdx;
float dk = (k1 - k0) * invdx;
// Because we test 1/2 a texel forward along the ray, on the very last iteration
// the interpolation can go past the end of the ray. Use these bounds to clamp it.
float zMin = min(csEndPoint.z, csOrigin.z);
float zMax = max(csEndPoint.z, csOrigin.z);
// Scale derivatives by the desired pixel stride
dP *= stride; dQ *= stride; dk *= stride;
// Offset the starting values by the jitter fraction
P0 += dP * jitterFraction; Q0 += dQ * jitterFraction; k0 += dk * jitterFraction;
// Slide P from P0 to P1, (now-homogeneous) Q from Q0 to Q1, and k from k0 to k1
vec3 Q = Q0;
float k = k0;
// We track the ray depth at +/- 1/2 pixel to treat pixels as clip-space solid
// voxels. Because the depth at -1/2 for a given pixel will be the same as at
// +1/2 for the previous iteration, we actually only have to compute one value
// per iteration.
float prevZMaxEstimate = csOrigin.z;
float stepCount = 0.0;
float rayZMax = prevZMaxEstimate, rayZMin = prevZMaxEstimate;
float sceneZMax = rayZMax + 1e4;
// P1.x is never modified after this point, so pre-scale it by
// the step direction for a signed comparison
float end = P1.x * stepDirection;
// We only advance the z field of Q in the inner loop, since
// Q.xy is never used until after the loop terminates.
vec2 P;
for (P = P0;
((P.x * stepDirection) <= end) &&
(stepCount < maxSteps) &&
((rayZMax < sceneZMax - csZThickness) ||
(rayZMin > sceneZMax)) &&
(sceneZMax != 0.0);
P += dP, Q.z += dQ.z, k += dk, stepCount += 1.0) {
// The depth range that the ray covers within this loop
// iteration. Assume that the ray is moving in increasing z
// and swap if backwards. Because one end of the interval is
// shared between adjacent iterations, we track the previous
// value and then swap as needed to ensure correct ordering
rayZMin = prevZMaxEstimate;
// Compute the value at 1/2 step into the future
rayZMax = (dQ.z * 0.5 + Q.z) / (dk * 0.5 + k);
// -- this is not in the other implementation
rayZMax = clamp(rayZMax, zMin, zMax);
prevZMaxEstimate = rayZMax;
// Since we don't know if the ray is stepping forward or backward in depth,
// maybe swap. Note that we preserve our original z "max" estimate first.
if (rayZMin > rayZMax) { swap(rayZMin, rayZMax); }
// Camera-space z of the background
hitPixel = permute ? P.yx : P;
vec4 depthData = texelFetch(csZBuffer, ivec2(hitPixel), 0);
sceneZMax = projectionToViewCoordinate(v_texCoord0, depthData.x, projectionMatrixInverse).z;
} // pixel on ray
// Undo the last increment, which ran after the test variables
// were set up.
P -= dP; Q.z -= dQ.z; k -= dk; stepCount -= 1.0;
bool hit = (rayZMax >= sceneZMax - csZThickness) && (rayZMin <= sceneZMax);
// If using non-unit stride and we hit a depth surface...
if ((stride > 1) && hit) {
// Refine the hit point within the last large-stride step
// Retreat one whole stride step from the previous loop so that
// we can re-run that iteration at finer scale
P -= dP; Q.z -= dQ.z; k -= dk; stepCount -= 1.0;
// Take the derivatives back to single-pixel stride
float invStride = 1.0 / stride;
dP *= invStride; dQ.z *= invStride; dk *= invStride;
// For this test, we don't bother checking thickness or passing the end, since we KNOW there will
// be a hit point. As soon as
// the ray passes behind an object, call it a hit. Advance (stride + 1) steps to fully check this
// interval (we could skip the very first iteration, but then we'd need identical code to prime the loop)
float refinementStepCount = 0;
// This is the current sample point's z-value, taken back to camera space
prevZMaxEstimate = Q.z / k;
rayZMin = prevZMaxEstimate;
// Ensure that the FOR-loop test passes on the first iteration since we
// won't have a valid value of sceneZMax to test.
sceneZMax = rayZMin - 1e7;
for (;
(refinementStepCount <= stride*1.4) &&
(rayZMin > sceneZMax) && (sceneZMax != 0.0);
P += dP, Q.z += dQ.z, k += dk, refinementStepCount += 1.0) {
rayZMin = prevZMaxEstimate;
// Compute the ray camera-space Z value at 1/2 fine step (pixel) into the future
rayZMax = (dQ.z * 0.5 + Q.z) / (dk * 0.5 + k);
rayZMax = clamp(rayZMax, zMin, zMax);
prevZMaxEstimate = rayZMax;
rayZMin = min(rayZMax, rayZMin);
hitPixel = permute ? P.yx : P;
vec4 depthData = texelFetch(csZBuffer, ivec2(hitPixel), 0);
sceneZMax = projectionToViewCoordinate(v_texCoord0, depthData.x, projectionMatrixInverse).z;
csHitNormal = texelFetch(normals, ivec2(hitPixel), 0).xyz;
// sceneZMax = texelFetch(csZBuffer, ivec2(hitPixel), 0).r;
}
// Undo the last increment, which happened after the test variables were set up
Q.z -= dQ.z; refinementStepCount -= 1;
// Count the refinement steps as fractions of the original stride. Save a register
// by not retaining invStride until here
stepCount += refinementStepCount / stride;
// debugColor = vec3(refinementStepCount / stride);
} // refinement
Q.xy += dQ.xy * stepCount;
csHitPoint = Q * (1.0 / k);
// Support debugging. This will compile away if debugColor is unused
if ((P.x * stepDirection) > end) {
// Hit the max ray distance -> blue
debugColor = vec3(0,0,1);
} else if (stepCount >= maxSteps) {
// Ran out of steps -> red
debugColor = vec3(1,0,0);
} else if (sceneZMax == 0.0) {
// Went off screen -> yellow
debugColor = vec3(1,1,0);
} else {
// Encountered a valid hit -> green
// ((rayZMax >= sceneZMax - csZThickness) && (rayZMin <= sceneZMax))
debugColor = vec3(0,1,0);
}
// Does the last point discovered represent a valid hit?
return hit;
}
void main() {
vec2 hitPixel = vec2(0.0, 0.0);
vec3 hitPoint = vec3(0.0, 0.0, 0.0);
vec3 hitNormal = vec3(0.0, 0.0, 0.0);
vec2 jitter = abs(hash22(v_texCoord0));
vec2 ts = vec2(textureSize(projDepth, 0).xy);
vec3 viewNormal = normalize(texture(normals, v_texCoord0).xyz);// + (texture(noise, v_texCoord0*0.1).xyz - 0.5) * 0.0;
float depth = texture(projDepth, v_texCoord0).r;
vec3 viewPos = projectionToViewCoordinate(v_texCoord0, depth, projectionMatrixInverse);
vec3 reflected = normalize(reflect(normalize(viewPos), normalize(-viewNormal)));
float angle = abs(dot(reflected, viewNormal));
float frontalFade = clamp(-reflected.z,0, 1);
if ( true ) {
bool hit = traceScreenSpaceRay1(
viewPos,
reflected,
projection,
projDepth,
ts,
0.1,
0.0, // near plane z
1.0,// + projPos.z*2.0, // stride
10.0, // jitterfraction
iterationLimit*8,// + int((1.0-projPos.z)*iterationLimit),
100.0, // max distance
hitPixel,
hitPoint, hitNormal);
float distanceFade = 1.0;//max( 0.0, (distanceLimit -length(hitPoint-viewPos))/ distanceLimit);
vec4 p = projection * vec4(hitPoint, 1.0);
float k = 1.0 / p.w;
vec2 pos = vec2(p.xy*k);
vec2 ad = vec2(ts/2- abs(pos - ts/2));
float borderFade = 1.0; //smoothstep(0, borderWidth, min(ad.x, ad.y));
float l = 0.0;
int l0 = int(l);
int l1 = l0 + 1;
float lf = l - l0;
vec4 reflectedColor0 = texelFetch(colors, ivec2(p.xy*k)/(1<<l0), l0);
vec4 reflectedColor1 = texelFetch(colors, ivec2(p.xy*k)/(1<<l1), l1);
vec4 reflectedColor = reflectedColor0 * (1.0-lf) + reflectedColor1 * lf;
// vec2 uv = vec2(p.xy*k) / textureSize(colors, 0);
//reflectedColor = textureLod(colors, uv, l);
float hitFade = hit? 1.0: 0.0;
float angleFade = 1.0;/// smoothstep(0.0, 0.3, angle);;//angle < 0.5? 0.0 : 1.0;
float faceFade = 1.0; //step(0.00001, dot(-normalize(hitNormal), reflected));
o_color.rgb = (1.0 * reflectedColor.rgb * hitFade * frontalFade * distanceFade * borderFade * angleFade * faceFade) + texture(colors, v_texCoord0).rgb;
o_color.a = 1.0;
} else {
o_color = texture(colors, v_texCoord0).rgba;
o_color.a = 1.0;
}
}

View File

@@ -0,0 +1,34 @@
#version 330
uniform usampler2DMS tex0;
in vec2 v_texCoord0;
out vec4 o_output;
void main() {
ivec2 ts = textureSize(tex0);
ivec2 pixel = ivec2(v_texCoord0 * ts);
ivec2 c = pixel;
ivec2 n = c + ivec2(0, -1);
ivec2 s = c + ivec2(0, 1);
ivec2 w = c + ivec2(-1, 0);
ivec2 e = c + ivec2(1, 0);
float sf = 0.0;
for (int i = 0; i < 8; ++i) {
float f = 1.0;
uint sc = texelFetch(tex0, c, i).r;
uint sn = texelFetch(tex0, n, i).r;
uint ss = texelFetch(tex0, s, i).r;
uint se = texelFetch(tex0, e, i).r;
uint sw = texelFetch(tex0, w, i).r;
if (sc == se) f -= 0.25;
if (sc == sw) f -= 0.25;
if (sc == sn) f -= 0.25;
if (sc == ss) f -= 0.25;
sf+= f;
}
o_output = vec4(vec3(sf/4.0), 1.0);
}

View File

@@ -0,0 +1,33 @@
#version 330
uniform usampler2D tex0;
in vec2 v_texCoord0;
out vec4 o_output;
void main() {
ivec2 ts = textureSize(tex0, 0);
ivec2 pixel = ivec2(v_texCoord0 * ts);
ivec2 c = pixel;
ivec2 n = c + ivec2(0, -1);
ivec2 s = c + ivec2(0, 1);
ivec2 w = c + ivec2(-1, 0);
ivec2 e = c + ivec2(1, 0);
float sf = 0.0;
for (int i = 0; i < 1; ++i) {
float f = 1.0;
uint sc = texelFetch(tex0, c, i).r;
uint sn = texelFetch(tex0, n, i).r;
uint ss = texelFetch(tex0, s, i).r;
uint se = texelFetch(tex0, e, i).r;
uint sw = texelFetch(tex0, w, i).r;
if (sc == se) f -= 0.25;
if (sc == sw) f -= 0.25;
if (sc == sn) f -= 0.25;
if (sc == ss) f -= 0.25;
sf+= f;
}
o_output = vec4(vec3(sf/0.5), 1.0);
}

View File

@@ -0,0 +1,57 @@
#version 330 core
#pragma import org.openrndr.extra.shaderphrases.phrases.Depth.projectionToViewCoordinate;
#pragma import org.openrndr.extra.dnk3.cubemap.SphericalHarmonicsKt.glslFetchSH0;
#pragma import org.openrndr.extra.dnk3.cubemap.SphericalHarmonicsKt.glslGridCoordinates;
#pragma import org.openrndr.extra.dnk3.cubemap.SphericalHarmonicsKt.glslGridIndex;
#pragma import org.openrndr.extra.dnk3.cubemap.SphericalHarmonicsKt.glslGatherSH0;
#pragma import org.openrndr.extra.noise.phrases.NoisePhrasesKt.phraseHash22;
#pragma import org.openrndr.extra.noise.phrases.SimplexKt.phraseSimplex3;
in vec2 v_texCoord0;
uniform sampler2D tex0; // image
uniform sampler2D tex1; // projDepth
uniform samplerBuffer shMap;
uniform ivec3 shMapDimensions;
uniform vec3 shMapOffset;
uniform float shMapSpacing;
uniform mat4 projectionMatrixInverse;
uniform mat4 viewMatrixInverse;
uniform float stepLength;
out vec4 o_output;
void main() {
vec3 inputColor = texture(tex0, v_texCoord0).rgb;
float projDepth = texture(tex1, v_texCoord0).r;
vec3 viewCoordinate = projectionToViewCoordinate(v_texCoord0, projDepth, projectionMatrixInverse);
vec3 worldCoordinate = (viewMatrixInverse * vec4(viewCoordinate, 1.0)).xyz;
vec3 cameraPosition = (viewMatrixInverse * vec4(vec3(0.0), 1.0)).xyz;
// trace in world space
vec3 traverse = cameraPosition - worldCoordinate;
vec3 direction = normalize(traverse);
if (length(traverse) > 10.0) {
traverse = direction*10.0;
worldCoordinate = cameraPosition - traverse;
}
int steps = min(100, int(length(traverse) / 0.1));
vec3 step = traverse / steps;
vec3 marchPosition = worldCoordinate;
vec3 accumulated = inputColor;
float jitter = hash22(v_texCoord0).x;
marchPosition += jitter * step*0.5;
for (int stepIndex = 0; stepIndex < steps; ++stepIndex) {
float density = pow(abs(simplex31(marchPosition*0.25)), 4.0) * 0.1;
vec3 sh0;
gatherSH0(shMap, marchPosition, shMapDimensions, shMapOffset, shMapSpacing, sh0);
accumulated = accumulated * (1.0-density) + sh0 * density;
marchPosition += step;
}
o_output = vec4(accumulated, 1.0);
}