Add orx-jumpflood/README.md
This commit is contained in:
@@ -12,7 +12,7 @@ A growing library of assorted data structures, algorithms and utilities.
|
|||||||
- [`orx-filter-extension`](orx-filter-extension/README.md), `Program` extension method that provides Filter based `extend()`
|
- [`orx-filter-extension`](orx-filter-extension/README.md), `Program` extension method that provides Filter based `extend()`
|
||||||
- [`orx-integral-image`](orx-integral-image/README.md), CPU-based and GPU-based implementation for integral images (summed area tables)
|
- [`orx-integral-image`](orx-integral-image/README.md), CPU-based and GPU-based implementation for integral images (summed area tables)
|
||||||
- [`orx-interval-tree`](orx-interval-tree/README.md), data structure for accelerating point-in-interval queries.
|
- [`orx-interval-tree`](orx-interval-tree/README.md), data structure for accelerating point-in-interval queries.
|
||||||
- `orx-jumpflood`, a filter/shader based implementation of the jump flood algorithm for finding fast approximate (directional) distance fields
|
- [`orx-jumpflood`](orx-jumpflood/README.md), a filter/shader based implementation of the jump flood algorithm for finding fast approximate (directional) distance fields
|
||||||
- `orx-kdtree`, a kd-tree implementation for fast nearest point searches
|
- `orx-kdtree`, a kd-tree implementation for fast nearest point searches
|
||||||
- [`orx-kinect-v1`](orx-kinect-v1/README.md), utilities to use Kinect V1 RGB-D sensors in OPENRNDR programs.
|
- [`orx-kinect-v1`](orx-kinect-v1/README.md), utilities to use Kinect V1 RGB-D sensors in OPENRNDR programs.
|
||||||
- [`orx-mesh-generators`](orx-mesh-generators/README.md), triangular mesh generators
|
- [`orx-mesh-generators`](orx-mesh-generators/README.md), triangular mesh generators
|
||||||
|
|||||||
222
orx-jumpflood/README.md
Normal file
222
orx-jumpflood/README.md
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
# orx-jumpflood
|
||||||
|
|
||||||
|
An OPENRNDR extra that provides GPU accelerated jump flooding functionality.
|
||||||
|
|
||||||
|
[Original jump flooding algorithm](https://www.comp.nus.edu.sg/~tants/jfa.html)
|
||||||
|
|
||||||
|
`orx-jumpflood` focusses on finding 2d distance and directional distance fields.
|
||||||
|
|
||||||
|
## Distance field example
|
||||||
|
|
||||||
|
`distanceFieldFromBitmap()` calculates distances to bitmap contours it stores
|
||||||
|
the distance in red and the original bitmap in green.
|
||||||
|
|
||||||
|
|
||||||
|
````kotlin
|
||||||
|
import org.openrndr.application
|
||||||
|
import org.openrndr.draw.*
|
||||||
|
import org.openrndr.extra.jumpfill.Threshold
|
||||||
|
import org.openrndr.extra.jumpfill.distanceFieldFromBitmap
|
||||||
|
import org.openrndr.ffmpeg.VideoPlayerFFMPEG
|
||||||
|
import org.openrndr.filter.blur.ApproximateGaussianBlur
|
||||||
|
|
||||||
|
fun main() = application {
|
||||||
|
configure {
|
||||||
|
width = 1280
|
||||||
|
height = 720
|
||||||
|
}
|
||||||
|
|
||||||
|
program {
|
||||||
|
val blurFilter = ApproximateGaussianBlur()
|
||||||
|
val blurred = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val thresholdFilter = Threshold()
|
||||||
|
val thresholded = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val distanceField = colorBuffer(width, height, type = ColorType.FLOAT32)
|
||||||
|
|
||||||
|
val videoCopy = renderTarget(width, height) {
|
||||||
|
colorBuffer()
|
||||||
|
}
|
||||||
|
val videoPlayer = VideoPlayerFFMPEG.fromDevice(imageWidth = width, imageHeight = height)
|
||||||
|
videoPlayer.play()
|
||||||
|
|
||||||
|
extend {
|
||||||
|
// -- copy videoplayer output
|
||||||
|
drawer.isolatedWithTarget(videoCopy) {
|
||||||
|
drawer.ortho(videoCopy)
|
||||||
|
videoPlayer.draw(drawer)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- blur the input a bit, this produces less noisy bitmap images
|
||||||
|
blurFilter.sigma = 9.0
|
||||||
|
blurFilter.window = 18
|
||||||
|
blurFilter.apply(videoCopy.colorBuffer(0), blurred)
|
||||||
|
|
||||||
|
// -- threshold the blurred image
|
||||||
|
thresholdFilter.threshold = 0.5
|
||||||
|
thresholdFilter.apply(blurred, thresholded)
|
||||||
|
|
||||||
|
distanceFieldFromBitmap(drawer, thresholded, result = distanceField)
|
||||||
|
|
||||||
|
drawer.isolated {
|
||||||
|
// -- use a shadestyle to visualize the distance field
|
||||||
|
drawer.shadeStyle = shadeStyle {
|
||||||
|
fragmentTransform = """
|
||||||
|
float d = x_fill.r;
|
||||||
|
if (x_fill.g > 0.5) {
|
||||||
|
x_fill.rgb = 1.0 * vec3(cos(d) * 0.5 + 0.5);
|
||||||
|
} else {
|
||||||
|
x_fill.rgb = 0.25 * vec3(1.0 - (cos(d) * 0.5 + 0.5));
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
drawer.image(distanceField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
````
|
||||||
|
|
||||||
|
## Direction field example
|
||||||
|
|
||||||
|
`directionFieldFromBitmap()` calculates directions to bitmap contours it stores
|
||||||
|
x-direction in red, y-direction in green, and the original bitmap in blue.
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
import org.openrndr.application
|
||||||
|
import org.openrndr.draw.*
|
||||||
|
import org.openrndr.extra.jumpfill.Threshold
|
||||||
|
import org.openrndr.extra.jumpfill.directionFieldFromBitmap
|
||||||
|
import org.openrndr.ffmpeg.VideoPlayerFFMPEG
|
||||||
|
import org.openrndr.filter.blur.ApproximateGaussianBlur
|
||||||
|
|
||||||
|
fun main() = application {
|
||||||
|
configure {
|
||||||
|
width = 1280
|
||||||
|
height = 720
|
||||||
|
}
|
||||||
|
|
||||||
|
program {
|
||||||
|
val blurFilter = ApproximateGaussianBlur()
|
||||||
|
val blurred = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val thresholdFilter = Threshold()
|
||||||
|
val thresholded = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val directionField = colorBuffer(width, height, type = ColorType.FLOAT32)
|
||||||
|
|
||||||
|
val videoPlayer = VideoPlayerFFMPEG.fromDevice(imageWidth = width, imageHeight = height)
|
||||||
|
videoPlayer.play()
|
||||||
|
|
||||||
|
val videoCopy = renderTarget(width, height) {
|
||||||
|
colorBuffer()
|
||||||
|
}
|
||||||
|
|
||||||
|
extend {
|
||||||
|
// -- copy videoplayer output
|
||||||
|
drawer.isolatedWithTarget(videoCopy) {
|
||||||
|
drawer.ortho(videoCopy)
|
||||||
|
videoPlayer.draw(drawer)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- blur the input a bit, this produces less noisy bitmap images
|
||||||
|
blurFilter.sigma = 9.0
|
||||||
|
blurFilter.window = 18
|
||||||
|
blurFilter.apply(videoCopy.colorBuffer(0), blurred)
|
||||||
|
|
||||||
|
// -- threshold the blurred image
|
||||||
|
thresholdFilter.threshold = 0.5
|
||||||
|
thresholdFilter.apply(blurred, thresholded)
|
||||||
|
|
||||||
|
directionFieldFromBitmap(drawer, thresholded, result = directionField)
|
||||||
|
|
||||||
|
drawer.isolated {
|
||||||
|
// -- use a shadestyle to visualize the direction field
|
||||||
|
drawer.shadeStyle = shadeStyle {
|
||||||
|
fragmentTransform = """
|
||||||
|
float a = atan(x_fill.r, x_fill.g);
|
||||||
|
if (a < 0) {
|
||||||
|
a += 3.1415926535*2;
|
||||||
|
}
|
||||||
|
if (x_fill.g > 0.5) {
|
||||||
|
x_fill.rgb = 1.0*vec3(cos(a*1.0)*0.5+0.5);
|
||||||
|
} else {
|
||||||
|
x_fill.rgb = 0.25*vec3(cos(a*1.0)*0.5+0.5);
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
drawer.image(directionField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
import org.openrndr.application
|
||||||
|
import org.openrndr.draw.*
|
||||||
|
import org.openrndr.extra.jumpfill.Threshold
|
||||||
|
import org.openrndr.extra.jumpfill.directionFieldFromBitmap
|
||||||
|
import org.openrndr.ffmpeg.VideoPlayerFFMPEG
|
||||||
|
import org.openrndr.filter.blur.ApproximateGaussianBlur
|
||||||
|
|
||||||
|
fun main() = application {
|
||||||
|
configure {
|
||||||
|
width = 1280
|
||||||
|
height = 720
|
||||||
|
}
|
||||||
|
|
||||||
|
program {
|
||||||
|
val blurFilter = ApproximateGaussianBlur()
|
||||||
|
val blurred = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val thresholdFilter = Threshold()
|
||||||
|
val thresholded = colorBuffer(width, height)
|
||||||
|
|
||||||
|
val directionField = colorBuffer(width, height, type = ColorType.FLOAT32)
|
||||||
|
|
||||||
|
val videoPlayer = VideoPlayerFFMPEG.fromDevice(imageWidth = width, imageHeight = height)
|
||||||
|
videoPlayer.play()
|
||||||
|
|
||||||
|
val videoCopy = renderTarget(width, height) {
|
||||||
|
colorBuffer()
|
||||||
|
}
|
||||||
|
|
||||||
|
extend {
|
||||||
|
// -- copy videoplayer output
|
||||||
|
drawer.isolatedWithTarget(videoCopy) {
|
||||||
|
drawer.ortho(videoCopy)
|
||||||
|
videoPlayer.draw(drawer)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- blur the input a bit, this produces less noisy bitmap images
|
||||||
|
blurFilter.sigma = 9.0
|
||||||
|
blurFilter.window = 18
|
||||||
|
blurFilter.apply(videoCopy.colorBuffer(0), blurred)
|
||||||
|
|
||||||
|
// -- threshold the blurred image
|
||||||
|
thresholdFilter.threshold = 0.5
|
||||||
|
thresholdFilter.apply(blurred, thresholded)
|
||||||
|
|
||||||
|
directionFieldFromBitmap(drawer, thresholded, result = directionField)
|
||||||
|
|
||||||
|
drawer.isolated {
|
||||||
|
// -- use a shadestyle to visualize the direction field
|
||||||
|
drawer.shadeStyle = shadeStyle {
|
||||||
|
fragmentTransform = """
|
||||||
|
float a = atan(x_fill.r, x_fill.g);
|
||||||
|
if (a < 0) {
|
||||||
|
a += 3.1415926535*2;
|
||||||
|
}
|
||||||
|
if (x_fill.g > 0.5) {
|
||||||
|
x_fill.rgb = 1.0*vec3(cos(a*1.0)*0.5+0.5);
|
||||||
|
} else {
|
||||||
|
x_fill.rgb = 0.25*vec3(cos(a*1.0)*0.5+0.5);
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
}
|
||||||
|
drawer.image(directionField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
Reference in New Issue
Block a user