fun main() = application {
program {
val face = loadFace("data/fonts/default.otf")
val shape = face.glyphForCharacter('a').shape(750.0)
val contours = shape.contours
val normals = contours.map { c ->
List(100) {
val t = it / 99.0
LineSegment(
c.position(t) - c.normal(t) * 2.0,
c.position(t) - c.normal(t) * 10.0
)
}
}.flatten()
extend(Screenshots())
extend {
drawer.clear(ColorRGBa.WHITE)
drawer.translate(drawer.bounds.center - shape.bounds.center)
drawer.fill = ColorRGBa.BLACK
drawer.stroke = null
drawer.shape(shape)
drawer.stroke = ColorRGBa.BLACK
drawer.strokeWeight = 2.0
drawer.lineSegments(normals)
}
}
}
In this example I get the shape for the character a, get the contours out of it, then sample 100 points on each contour getting positions and normals. Note that it is possible to get those points evenly distributed but I can show that later.
It would make me happy to see images of any kind of experiment using this feature
Oh wow this is great! I started trying to implement it in my experiment, works great for converting the letter into shape, displaying the shape as well:
EDIT:
I managed to apply it to your example filtering points inside the shape, only it seems that “inside” is just what is inside the eye of the letter:
A workaround for only the inner points being detected is this:
val letter2 = Shape(letter.contours.map { it.reversed })
val insidePoints = pts.filter { it in letter2 }
You can print it.winding for the contours to see why. It seems like the outer contour has COUNTER_CLOCKWISE and the inner one CLOCKWISE but it should be the other way around.