Construct meshes independently of mesh libraries (renamed to "mesh

packs").

This fits in better with the way WebRender does things. It simplifies
the code too.
This commit is contained in:
Patrick Walton 2018-03-07 14:06:54 -08:00
parent cffff886c3
commit 64c818e530
21 changed files with 494 additions and 785 deletions

View File

@ -20,7 +20,7 @@ import {Atlas, ATLAS_SIZE, AtlasGlyph, GlyphKey} from './atlas';
import PathfinderBufferTexture from "./buffer-texture";
import {CameraView, PerspectiveCamera} from "./camera";
import {UniformMap} from './gl-utils';
import {PathfinderMeshData} from "./meshes";
import {PathfinderMeshPack, PathfinderPackedMeshes} from "./meshes";
import {PathTransformBuffers, Renderer} from './renderer';
import {ShaderMap, ShaderProgramSource} from "./shader-loader";
import SSAAStrategy from "./ssaa-strategy";
@ -133,8 +133,8 @@ class ThreeDController extends DemoAppController<ThreeDView> {
atlasGlyphs!: AtlasGlyph[];
atlas!: Atlas;
baseMeshes!: PathfinderMeshData;
private expandedMeshes!: PathfinderMeshData[];
baseMeshes!: PathfinderMeshPack;
private expandedMeshes!: PathfinderPackedMeshes[];
private monumentPromise!: Promise<MonumentSide[]>;
@ -291,7 +291,7 @@ class ThreeDController extends DemoAppController<ThreeDView> {
this.expandedMeshes = this.meshDescriptors.map(meshDescriptor => {
const glyphIndex = _.sortedIndexOf(glyphsNeeded, meshDescriptor.glyphID);
return this.baseMeshes.expand([glyphIndex + 1]);
return new PathfinderPackedMeshes(this.baseMeshes, [glyphIndex + 1]);
});
this.view.then(view => view.attachMeshes(this.expandedMeshes));
@ -402,7 +402,7 @@ class ThreeDRenderer extends Renderer {
}
protected get objectCount(): number {
return this.meshes == null ? 0 : this.meshes.length;
return this.meshBuffers == null ? 0 : this.meshBuffers.length;
}
private cubeVertexPositionBuffer: WebGLBuffer;
@ -442,7 +442,7 @@ class ThreeDRenderer extends Renderer {
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, CUBE_INDICES, gl.STATIC_DRAW);
}
attachMeshes(expandedMeshes: PathfinderMeshData[]) {
attachMeshes(expandedMeshes: PathfinderPackedMeshes[]) {
super.attachMeshes(expandedMeshes);
this.renderAtlasGlyphs(this.renderContext.appController.atlasGlyphs);
@ -864,7 +864,9 @@ class ThreeDRenderer extends Renderer {
glmatrix.vec2.create());
const atlasRenderer = new ThreeDAtlasRenderer(this.renderContext, atlasGlyphs);
atlasRenderer.attachMeshes([this.renderContext.appController.baseMeshes]);
const baseMeshes = this.renderContext.appController.baseMeshes;
const expandedMeshes = new PathfinderPackedMeshes(baseMeshes);
atlasRenderer.attachMeshes([expandedMeshes]);
atlasRenderer.renderAtlas();
this.glyphTexCoords = atlasRenderer.glyphTexCoords;
this.glyphSizes = atlasRenderer.glyphSizes;

View File

@ -18,7 +18,7 @@ import {AppController, DemoAppController, setSwitchInputsValue} from "./app-cont
import PathfinderBufferTexture from './buffer-texture';
import {OrthographicCamera} from './camera';
import {UniformMap} from './gl-utils';
import {PathfinderMeshData} from "./meshes";
import {PathfinderMeshPack, PathfinderPackedMeshes} from "./meshes";
import {PathTransformBuffers, Renderer} from './renderer';
import {ShaderMap, ShaderProgramSource} from "./shader-loader";
import SSAAStrategy from './ssaa-strategy';
@ -104,7 +104,7 @@ class BenchmarkAppController extends DemoAppController<BenchmarkTestView> {
private resultsPartitioningTimeLabel!: HTMLSpanElement;
private glyphStore!: GlyphStore;
private baseMeshes!: PathfinderMeshData;
private baseMeshes!: PathfinderMeshPack;
private expandedMeshes!: ExpandedMeshData;
private size!: number;
@ -242,7 +242,7 @@ class BenchmarkAppController extends DemoAppController<BenchmarkTestView> {
this.svgLoader.partition().then(meshes => {
this.view.then(view => {
view.recreateRenderer();
view.attachMeshes([meshes]);
view.attachMeshes([new PathfinderPackedMeshes(meshes)]);
view.initCameraBounds(this.svgLoader.svgViewBox);
});
});
@ -482,7 +482,7 @@ class BenchmarkTextRenderer extends Renderer {
}
protected get objectCount(): number {
return this.meshes == null ? 0 : this.meshes.length;
return this.meshBuffers == null ? 0 : this.meshBuffers.length;
}
private _pixelsPerEm: number = 32.0;
@ -504,7 +504,7 @@ class BenchmarkTextRenderer extends Renderer {
this.camera.onZoom = () => renderContext.setDirty();
}
attachMeshes(meshes: PathfinderMeshData[]): void {
attachMeshes(meshes: PathfinderPackedMeshes[]): void {
super.attachMeshes(meshes);
this.uploadPathColors(1);

View File

@ -15,10 +15,10 @@ import {Font} from 'opentype.js';
import {AppController} from "./app-controller";
import {OrthographicCamera} from "./camera";
import {FilePickerView} from './file-picker';
import {B_QUAD_UPPER_RIGHT_VERTEX_OFFSET} from "./meshes";
import {B_QUAD_UPPER_RIGHT_VERTEX_OFFSET, PathfinderMeshPack} from "./meshes";
import {B_QUAD_LOWER_LEFT_VERTEX_OFFSET, B_QUAD_UPPER_CONTROL_POINT_VERTEX_OFFSET} from "./meshes";
import {B_QUAD_LOWER_RIGHT_VERTEX_OFFSET} from "./meshes";
import {B_QUAD_LOWER_CONTROL_POINT_VERTEX_OFFSET, PathfinderMeshData} from "./meshes";
import {B_QUAD_LOWER_CONTROL_POINT_VERTEX_OFFSET, PathfinderPackedMeshes} from "./meshes";
import {B_QUAD_SIZE, B_QUAD_UPPER_LEFT_VERTEX_OFFSET} from "./meshes";
import {BUILTIN_SVG_URI, SVGLoader} from './svg-loader';
import {BUILTIN_FONT_URI, TextRun} from "./text";
@ -86,7 +86,7 @@ interface NormalsTable<T> {
}
class MeshDebuggerAppController extends AppController {
meshes: PathfinderMeshData | null = null;
meshes: PathfinderPackedMeshes | null = null;
protected readonly defaultFile: string = FONT;
@ -146,7 +146,7 @@ class MeshDebuggerAppController extends AppController {
protected loadPath(opentypeGlyph?: opentype.Glyph | null) {
window.jQuery(this.openModal).modal('hide');
let promise: Promise<PathfinderMeshData>;
let promise: Promise<PathfinderMeshPack>;
if (this.file instanceof PathfinderFont && this.fileData != null) {
if (opentypeGlyph == null) {
@ -163,7 +163,7 @@ class MeshDebuggerAppController extends AppController {
}
promise.then(meshes => {
this.meshes = meshes;
this.meshes = new PathfinderPackedMeshes(meshes);
this.view.attachMeshes();
});
}
@ -278,7 +278,7 @@ class MeshDebuggerView extends PathfinderView {
};
// Draw B-quads.
for (let bQuadIndex = 0; bQuadIndex < meshes.bQuadVertexPositionCount; bQuadIndex++) {
for (let bQuadIndex = 0; bQuadIndex < meshes.bQuadVertexPositions.length; bQuadIndex++) {
const bQuadStartOffset = (B_QUAD_SIZE * bQuadIndex) / UINT32_SIZE;
const upperLeftPosition = getPosition(bQuadVertexPositions, bQuadIndex, 0);
@ -341,7 +341,7 @@ class MeshDebuggerView extends PathfinderView {
drawSegmentVertices(context,
new Float32Array(meshes.stencilSegments),
new Float32Array(meshes.stencilNormals),
meshes.stencilSegmentCount,
meshes.count('stencilSegments'),
[0, 2],
1,
3,

View File

@ -15,7 +15,7 @@ import {expectNotNull, FLOAT32_SIZE, panic, PathfinderError, Range, UINT16_SIZE}
import {UINT32_MAX, UINT32_SIZE, UINT8_SIZE, unwrapNull, unwrapUndef} from './utils';
interface BufferTypeFourCCTable {
[fourCC: string]: keyof Meshes<void>;
[fourCC: string]: keyof MeshLike<void>;
}
interface PathRangeTypeFourCCTable {
@ -26,18 +26,10 @@ interface RangeToCountTable {
[rangeKey: string]: keyof MeshDataCounts;
}
interface RangeToRangeBufferTable {
[rangeKey: string]: keyof Meshes<void>;
}
type PathIDBufferTable = Partial<MeshLike<PackedMeshBufferType>>;
interface ArrayLike<T> {
[index: number]: T;
}
interface VertexExpansionDescriptor<T> {
expanded: T[];
original: ArrayLike<T>;
size: number;
interface ArrayLike {
readonly length: number;
}
interface VertexCopyResult {
@ -51,6 +43,10 @@ type PrimitiveType = 'Uint16' | 'Uint32' | 'Float32';
type PrimitiveTypeArray = Float32Array | Uint16Array | Uint32Array;
type MeshBufferType = keyof MeshLike<void>;
type PackedMeshBufferType = keyof PackedMeshLike<void>;
interface MeshBufferTypeDescriptor {
type: PrimitiveType;
size: number;
@ -79,18 +75,18 @@ const INDEX_SIZE: number = 4;
const B_QUAD_VERTEX_POSITION_SIZE: number = 12 * 4;
const B_VERTEX_POSITION_SIZE: number = 4 * 2;
const MESH_TYPES: Meshes<MeshBufferTypeDescriptor> = {
const MESH_TYPES: PackedMeshLike<MeshBufferTypeDescriptor> = {
bBoxPathIDs: { type: 'Uint16', size: 1 },
bBoxes: { type: 'Float32', size: 20 },
bQuadVertexInteriorIndices: { type: 'Uint32', size: 1 },
bQuadVertexPositionPathIDs: { type: 'Uint16', size: 6 },
bQuadVertexPositions: { type: 'Float32', size: 12 },
bQuadVertexPositionPathIDs: { type: 'Uint16', size: 1 },
bQuadVertexPositions: { type: 'Float32', size: 2 },
stencilNormals: { type: 'Float32', size: 6 },
stencilSegmentPathIDs: { type: 'Uint16', size: 1 },
stencilSegments: { type: 'Float32', size: 6 },
};
const BUFFER_TYPES: Meshes<BufferType> = {
const BUFFER_TYPES: PackedMeshLike<BufferType> = {
bBoxPathIDs: 'ARRAY_BUFFER',
bBoxes: 'ARRAY_BUFFER',
bQuadVertexInteriorIndices: 'ELEMENT_ARRAY_BUFFER',
@ -105,7 +101,9 @@ const EDGE_BUFFER_NAMES = ['UpperLine', 'UpperCurve', 'LowerLine', 'LowerCurve']
const RIFF_FOURCC: string = 'RIFF';
const MESH_LIBRARY_FOURCC: string = 'PFML';
const MESH_PACK_FOURCC: string = 'PFMP';
const MESH_FOURCC: string = 'mesh';
// Must match the FourCCs in `pathfinder_partitioner::mesh_library::MeshLibrary::serialize_into()`.
const BUFFER_TYPE_FOURCCS: BufferTypeFourCCTable = {
@ -116,15 +114,6 @@ const BUFFER_TYPE_FOURCCS: BufferTypeFourCCTable = {
sseg: 'stencilSegments',
};
// Must match the FourCCs in
// `pathfinder_partitioner::mesh_library::MeshLibrary::serialize_into::write_path_ranges()`.
const PATH_RANGE_TYPE_FOURCCS: PathRangeTypeFourCCTable = {
bbox: 'bBoxPathRanges',
bqii: 'bQuadVertexInteriorIndexPathRanges',
bqvp: 'bQuadVertexPositionPathRanges',
sseg: 'stencilSegmentPathRanges',
};
const RANGE_TO_COUNT_TABLE: RangeToCountTable = {
bBoxPathRanges: 'bBoxCount',
bQuadVertexInteriorIndexPathRanges: 'bQuadVertexInteriorIndexCount',
@ -132,33 +121,56 @@ const RANGE_TO_COUNT_TABLE: RangeToCountTable = {
stencilSegmentPathRanges: 'stencilSegmentCount',
};
const RANGE_TO_RANGE_BUFFER_TABLE: RangeToRangeBufferTable = {
bBoxPathRanges: 'bBoxPathIDs',
bQuadVertexPositionPathRanges: 'bQuadVertexPositionPathIDs',
stencilSegmentPathRanges: 'stencilSegmentPathIDs',
const INDEX_TYPE_DESCRIPTOR_TABLE: {[P in MeshBufferType]?: IndexTypeDescriptor} = {
bQuadVertexInteriorIndices: {
bufferType: 'bQuadVertexPositions',
},
};
const RANGE_KEYS: Array<keyof PathRanges> = [
'bQuadVertexPositionPathRanges',
'bQuadVertexInteriorIndexPathRanges',
'bBoxPathRanges',
'stencilSegmentPathRanges',
];
const PATH_ID_BUFFER_TABLE: PathIDBufferTable = {
bBoxes: 'bBoxPathIDs',
bQuadVertexPositions: 'bQuadVertexPositionPathIDs',
stencilSegments: 'stencilSegmentPathIDs',
};
const PATH_RANGE_TO_BUFFER_TYPE_TABLE: {[P in keyof PathRanges]: MeshBufferType} = {
bBoxPathRanges: 'bBoxes',
bQuadVertexInteriorIndexPathRanges: 'bQuadVertexInteriorIndices',
bQuadVertexPositionPathRanges: 'bQuadVertexPositions',
stencilSegmentPathRanges: 'stencilSegments',
};
type BufferType = 'ARRAY_BUFFER' | 'ELEMENT_ARRAY_BUFFER';
export interface Meshes<T> {
readonly bQuadVertexPositions: T;
readonly bQuadVertexInteriorIndices: T;
readonly bBoxes: T;
readonly stencilSegments: T;
readonly stencilNormals: T;
export interface MeshBuilder<T> {
bQuadVertexPositions: T;
bQuadVertexInteriorIndices: T;
bBoxes: T;
stencilSegments: T;
stencilNormals: T;
}
bQuadVertexPositionPathIDs: T;
export interface PackedMeshBuilder<T> extends MeshBuilder<T> {
bBoxPathIDs: T;
bQuadVertexPositionPathIDs: T;
stencilSegmentPathIDs: T;
}
export type MeshLike<T> = {
readonly [P in keyof MeshBuilder<void>]: T;
};
export type PackedMeshLike<T> = {
readonly [P in keyof PackedMeshBuilder<void>]: T;
};
interface PathRanges {
readonly bBoxPathRanges: Range[];
readonly bQuadVertexInteriorIndexPathRanges: Range[];
readonly bQuadVertexPositionPathRanges: Range[];
readonly stencilSegmentPathRanges: Range[];
}
interface MeshDataCounts {
readonly bQuadVertexPositionCount: number;
readonly bQuadVertexInteriorIndexCount: number;
@ -166,43 +178,21 @@ interface MeshDataCounts {
readonly stencilSegmentCount: number;
}
interface PathRanges {
bQuadVertexPositionPathRanges: Range[];
bQuadVertexInteriorIndexPathRanges: Range[];
bBoxPathRanges: Range[];
stencilSegmentPathRanges: Range[];
interface IndexTypeDescriptor {
bufferType: MeshBufferType;
}
export class PathfinderMeshData implements Meshes<ArrayBuffer>, MeshDataCounts, PathRanges {
readonly bQuadVertexPositions!: ArrayBuffer;
readonly bQuadVertexInteriorIndices!: ArrayBuffer;
readonly bBoxes!: ArrayBuffer;
readonly bBoxSigns!: ArrayBuffer;
readonly bBoxIndices!: ArrayBuffer;
readonly stencilSegments!: ArrayBuffer;
readonly stencilNormals!: ArrayBuffer;
export class PathfinderMeshPack {
meshes: PathfinderMesh[];
readonly bQuadVertexPositionCount: number;
readonly bQuadVertexInteriorIndexCount: number;
readonly bBoxCount: number;
readonly stencilSegmentCount: number;
constructor(meshes: ArrayBuffer) {
this.meshes = [];
bQuadVertexPositionPathIDs!: ArrayBuffer;
bBoxPathIDs!: ArrayBuffer;
stencilSegmentPathIDs!: ArrayBuffer;
bQuadVertexPositionPathRanges!: Range[];
bQuadVertexInteriorIndexPathRanges!: Range[];
bBoxPathRanges!: Range[];
stencilSegmentPathRanges!: Range[];
constructor(meshes: ArrayBuffer | Meshes<ArrayBuffer>, optionalRanges?: PathRanges) {
if (meshes instanceof ArrayBuffer) {
// RIFF encoded data.
if (toFourCC(meshes, 0) !== RIFF_FOURCC)
panic("Supplied array buffer is not a mesh library (no RIFF header)!");
if (toFourCC(meshes, 8) !== MESH_LIBRARY_FOURCC)
panic("Supplied array buffer is not a mesh library (no PFML header)!");
if (toFourCC(meshes, 8) !== MESH_PACK_FOURCC)
panic("Supplied array buffer is not a mesh library (no PFMP header)!");
let offset = 12;
while (offset < meshes.byteLength) {
@ -211,319 +201,165 @@ export class PathfinderMeshData implements Meshes<ArrayBuffer>, MeshDataCounts,
const startOffset = offset + 8;
const endOffset = startOffset + chunkLength;
if (BUFFER_TYPE_FOURCCS.hasOwnProperty(fourCC))
this[BUFFER_TYPE_FOURCCS[fourCC]] = meshes.slice(startOffset, endOffset);
else if (fourCC === 'prng')
this.readPathRanges(meshes.slice(startOffset, endOffset));
if (fourCC === MESH_FOURCC)
this.meshes.push(new PathfinderMesh(meshes.slice(startOffset, endOffset)));
offset = endOffset;
}
} else {
for (const bufferName of Object.keys(BUFFER_TYPES) as Array<keyof Meshes<void>>)
this[bufferName] = meshes[bufferName];
const ranges = unwrapUndef(optionalRanges);
for (const range of Object.keys(RANGE_TO_COUNT_TABLE) as Array<keyof PathRanges>)
this[range] = ranges[range];
}
}
this.bQuadVertexPositionCount = this.bQuadVertexPositions.byteLength /
B_QUAD_VERTEX_POSITION_SIZE;
this.bQuadVertexInteriorIndexCount = this.bQuadVertexInteriorIndices.byteLength /
INDEX_SIZE;
this.bBoxCount = this.bBoxes.byteLength / (FLOAT32_SIZE * 6);
this.stencilSegmentCount = this.stencilSegments.byteLength / (FLOAT32_SIZE * 6);
export class PathfinderMesh implements MeshLike<ArrayBuffer> {
bQuadVertexPositions!: ArrayBuffer;
bQuadVertexInteriorIndices!: ArrayBuffer;
bBoxes!: ArrayBuffer;
stencilSegments!: ArrayBuffer;
stencilNormals!: ArrayBuffer;
this.rebuildPathIDBuffers();
}
expand(pathIDs: number[]): PathfinderMeshData {
const tempOriginalBuffers: any = {}, tempExpandedArrays: any = {};
for (const key of Object.keys(BUFFER_TYPES) as Array<keyof Meshes<void>>) {
const arrayConstructor = PRIMITIVE_TYPE_ARRAY_CONSTRUCTORS[MESH_TYPES[key].type];
tempOriginalBuffers[key] = new arrayConstructor(this[key]);
tempExpandedArrays[key] = [];
}
const tempOriginalRanges: Partial<PathRanges> = {};
const tempExpandedRanges: Partial<PathRanges> = {};
for (const key of Object.keys(RANGE_TO_COUNT_TABLE) as Array<keyof PathRanges>) {
tempOriginalRanges[key] = this[key];
const newExpandedRanges = [];
for (const pathIndex of pathIDs)
newExpandedRanges.push(new Range(0, 0));
tempExpandedRanges[key] = newExpandedRanges;
}
const originalBuffers: Meshes<PrimitiveTypeArray> = tempOriginalBuffers;
const originalRanges: PathRanges = tempOriginalRanges as PathRanges;
const expandedArrays: Meshes<number[]> = tempExpandedArrays;
const expandedRanges: PathRanges = tempExpandedRanges as PathRanges;
for (let newPathIndex = 0; newPathIndex < pathIDs.length; newPathIndex++) {
const expandedPathID = newPathIndex + 1;
const originalPathID = pathIDs[newPathIndex];
// Copy over B-quad vertex positions.
const bQuadVertexCopyResult = copyVertices(['bQuadVertexPositions'],
'bQuadVertexPositionPathRanges',
expandedArrays,
expandedRanges,
originalBuffers,
originalRanges,
expandedPathID,
originalPathID);
if (bQuadVertexCopyResult == null)
continue;
const firstExpandedBQuadVertexIndex = bQuadVertexCopyResult.expandedStartIndex;
const firstBQuadVertexIndex = bQuadVertexCopyResult.originalStartIndex;
const lastBQuadVertexIndex = bQuadVertexCopyResult.originalEndIndex;
// Copy over B-vertex indices.
copyIndices(expandedArrays.bQuadVertexInteriorIndices,
expandedRanges.bQuadVertexInteriorIndexPathRanges,
originalBuffers.bQuadVertexInteriorIndices as Uint32Array,
firstExpandedBQuadVertexIndex * 6,
firstBQuadVertexIndex * 6,
lastBQuadVertexIndex * 6,
expandedPathID);
// Copy over B-boxes.
const bBoxVertexCopyResult = copyVertices(['bBoxes'],
'bBoxPathRanges',
expandedArrays,
expandedRanges,
originalBuffers,
originalRanges,
expandedPathID,
originalPathID);
if (bBoxVertexCopyResult == null)
continue;
const firstExpandedBBoxIndex = bBoxVertexCopyResult.expandedStartIndex;
const firstBBoxIndex = bBoxVertexCopyResult.originalStartIndex;
const lastBBoxIndex = bBoxVertexCopyResult.originalEndIndex;
// Copy over segments.
copySegments(['stencilSegments', 'stencilNormals'],
'stencilSegmentPathRanges',
expandedArrays,
expandedRanges,
originalBuffers,
originalRanges,
expandedPathID,
originalPathID);
}
const tempExpandedBuffers: any = {};
for (const key of Object.keys(MESH_TYPES) as Array<keyof Meshes<void>>) {
const bufferType = MESH_TYPES[key].type;
const arrayConstructor = PRIMITIVE_TYPE_ARRAY_CONSTRUCTORS[bufferType];
const expandedBuffer = new ArrayBuffer(expandedArrays[key].length *
sizeOfPrimitive(bufferType));
(new arrayConstructor(expandedBuffer)).set(expandedArrays[key]);
tempExpandedBuffers[key] = expandedBuffer;
}
const expandedBuffers = tempExpandedBuffers as Meshes<ArrayBuffer>;
return new PathfinderMeshData(expandedBuffers, expandedRanges);
}
private readPathRanges(meshes: ArrayBuffer): void {
constructor(data: ArrayBuffer) {
let offset = 0;
while (offset < meshes.byteLength) {
const fourCC = toFourCC(meshes, offset);
const chunkLength = readUInt32(meshes, offset + 4);
while (offset < data.byteLength) {
const fourCC = toFourCC(data, offset);
const chunkLength = readUInt32(data, offset + 4);
const startOffset = offset + 8;
const endOffset = startOffset + chunkLength;
if (PATH_RANGE_TYPE_FOURCCS.hasOwnProperty(fourCC)) {
const key = PATH_RANGE_TYPE_FOURCCS[fourCC];
const ranges = new Uint32Array(meshes.slice(startOffset, endOffset));
this[key] = _.chunk(ranges, 2).map(range => new Range(range[0], range[1]));
}
if (BUFFER_TYPE_FOURCCS.hasOwnProperty(fourCC))
this[BUFFER_TYPE_FOURCCS[fourCC]] = data.slice(startOffset, endOffset);
offset = endOffset;
}
}
private rebuildPathIDBuffers(): void {
for (const rangeKey of Object.keys(RANGE_TO_COUNT_TABLE) as
Array<keyof RangeToCountTable>) {
if (!RANGE_TO_RANGE_BUFFER_TABLE.hasOwnProperty(rangeKey))
continue;
const rangeBufferKey = RANGE_TO_RANGE_BUFFER_TABLE[rangeKey];
const instanceCount = this[RANGE_TO_COUNT_TABLE[rangeKey]];
const ranges = this[rangeKey as keyof PathRanges];
const meshType = MESH_TYPES[rangeBufferKey];
const fieldCount = meshType.size;
const destBuffer = new Uint16Array(instanceCount * fieldCount);
let destIndex = 0;
for (let pathIndex = 0; pathIndex < ranges.length; pathIndex++) {
const range = ranges[pathIndex];
for (let subindex = range.start; subindex < range.end; subindex++) {
for (let fieldIndex = 0; fieldIndex < fieldCount; fieldIndex++) {
destBuffer[destIndex] = pathIndex + 1;
destIndex++;
for (const type of Object.keys(BUFFER_TYPE_FOURCCS) as Array<keyof MeshLike<void>>) {
if (this[type] == null)
this[type] = new ArrayBuffer(0);
}
}
}
(this as any)[rangeBufferKey] = destBuffer.buffer as ArrayBuffer;
}
}
}
export class PathfinderPackedMeshes implements PackedMeshLike<PrimitiveTypeArray>, PathRanges {
readonly bBoxes!: Float32Array;
readonly bQuadVertexInteriorIndices!: Uint32Array;
readonly bQuadVertexPositions!: Float32Array;
readonly stencilSegments!: Float32Array;
readonly stencilNormals!: Float32Array;
export class PathfinderMeshBuffers implements Meshes<WebGLBuffer>, PathRanges {
readonly bQuadVertexPositions!: WebGLBuffer;
readonly bQuadVertexPositionPathIDs!: WebGLBuffer;
readonly bQuadVertexInteriorIndices!: WebGLBuffer;
readonly bBoxes!: WebGLBuffer;
readonly bBoxSigns!: WebGLBuffer;
readonly bBoxIndices!: WebGLBuffer;
readonly bBoxPathIDs!: WebGLBuffer;
readonly stencilSegments!: WebGLBuffer;
readonly stencilSegmentPathIDs!: WebGLBuffer;
readonly stencilNormals!: WebGLBuffer;
readonly bBoxPathIDs!: Uint16Array;
readonly bQuadVertexPositionPathIDs!: Uint16Array;
readonly stencilSegmentPathIDs!: Uint16Array;
readonly bQuadVertexPositionPathRanges!: Range[];
readonly bQuadVertexInteriorIndexPathRanges!: Range[];
readonly bBoxPathRanges!: Range[];
readonly bQuadVertexInteriorIndexPathRanges!: Range[];
readonly bQuadVertexPositionPathRanges!: Range[];
readonly stencilSegmentPathRanges!: Range[];
constructor(gl: WebGLRenderingContext, meshData: PathfinderMeshData) {
for (const bufferName of Object.keys(BUFFER_TYPES) as Array<keyof Meshes<void>>) {
/// NB: Mesh indices are 1-indexed.
constructor(meshPack: PathfinderMeshPack, meshIndices?: number[]) {
if (meshIndices == null)
meshIndices = meshPack.meshes.map((value, index) => index + 1);
const meshData: PackedMeshBuilder<number[]> = {
bBoxPathIDs: [],
bBoxes: [],
bQuadVertexInteriorIndices: [],
bQuadVertexPositionPathIDs: [],
bQuadVertexPositions: [],
stencilNormals: [],
stencilSegmentPathIDs: [],
stencilSegments: [],
};
const pathRanges: PathRanges = {
bBoxPathRanges: [],
bQuadVertexInteriorIndexPathRanges: [],
bQuadVertexPositionPathRanges: [],
stencilSegmentPathRanges: [],
};
for (let destMeshIndex = 0; destMeshIndex < meshIndices.length; destMeshIndex++) {
const srcMeshIndex = meshIndices[destMeshIndex];
const mesh = meshPack.meshes[srcMeshIndex - 1];
for (const pathRangeType of Object.keys(pathRanges) as Array<keyof PathRanges>) {
const bufferType = PATH_RANGE_TO_BUFFER_TYPE_TABLE[pathRangeType];
const startIndex = bufferCount(meshData, bufferType);
pathRanges[pathRangeType].push(new Range(startIndex, startIndex));
}
for (const indexType of Object.keys(BUFFER_TYPES) as MeshBufferType[]) {
if (BUFFER_TYPES[indexType] !== 'ELEMENT_ARRAY_BUFFER')
continue;
const indexTypeDescriptor = unwrapUndef(INDEX_TYPE_DESCRIPTOR_TABLE[indexType]);
const offset = bufferCount(meshData, indexTypeDescriptor.bufferType);
for (const index of new Uint32Array(mesh[indexType]))
meshData[indexType].push(index + offset);
}
for (const bufferType of Object.keys(BUFFER_TYPES) as MeshBufferType[]) {
if (BUFFER_TYPES[bufferType] !== 'ARRAY_BUFFER')
continue;
meshData[bufferType].push(...new Float32Array(mesh[bufferType]));
const pathIDBufferType = PATH_ID_BUFFER_TABLE[bufferType];
if (pathIDBufferType != null) {
const length = bufferCount(meshData, bufferType);
while (meshData[pathIDBufferType].length < length)
meshData[pathIDBufferType].push(destMeshIndex + 1);
}
}
for (const pathRangeType of Object.keys(PATH_RANGE_TO_BUFFER_TYPE_TABLE) as
Array<keyof PathRanges>) {
const bufferType = PATH_RANGE_TO_BUFFER_TYPE_TABLE[pathRangeType];
const endIndex = bufferCount(meshData, bufferType);
unwrapUndef(_.last(pathRanges[pathRangeType])).end = endIndex;
}
}
for (const bufferType of Object.keys(BUFFER_TYPES) as PackedMeshBufferType[]) {
const arrayCtor = PRIMITIVE_TYPE_ARRAY_CONSTRUCTORS[MESH_TYPES[bufferType].type];
this[bufferType] = (new arrayCtor(meshData[bufferType])) as any;
}
_.assign(this, pathRanges);
}
count(bufferType: MeshBufferType): number {
return bufferCount(this, bufferType);
}
}
export class PathfinderPackedMeshBuffers implements PackedMeshLike<WebGLBuffer>, PathRanges {
readonly bBoxes!: WebGLBuffer;
readonly bQuadVertexInteriorIndices!: WebGLBuffer;
readonly bQuadVertexPositions!: WebGLBuffer;
readonly stencilSegments!: WebGLBuffer;
readonly stencilNormals!: WebGLBuffer;
readonly bBoxPathIDs!: WebGLBuffer;
readonly bQuadVertexPositionPathIDs!: WebGLBuffer;
readonly stencilSegmentPathIDs!: WebGLBuffer;
readonly bBoxPathRanges!: Range[];
readonly bQuadVertexInteriorIndexPathRanges!: Range[];
readonly bQuadVertexPositionPathRanges!: Range[];
readonly stencilSegmentPathRanges!: Range[];
constructor(gl: WebGLRenderingContext, packedMeshes: PathfinderPackedMeshes) {
for (const bufferName of Object.keys(BUFFER_TYPES) as PackedMeshBufferType[]) {
const bufferType = gl[BUFFER_TYPES[bufferName]];
const buffer = expectNotNull(gl.createBuffer(), "Failed to create buffer!");
gl.bindBuffer(bufferType, buffer);
gl.bufferData(bufferType, meshData[bufferName], gl.STATIC_DRAW);
gl.bufferData(bufferType, packedMeshes[bufferName], gl.STATIC_DRAW);
this[bufferName] = buffer;
}
for (const rangeName of RANGE_KEYS)
this[rangeName] = meshData[rangeName];
}
}
function copyVertices(vertexBufferNames: Array<keyof Meshes<void>>,
rangesName: keyof PathRanges,
expandedMeshes: Meshes<number[]>,
expandedRanges: PathRanges,
originalMeshes: Meshes<PrimitiveTypeArray>,
originalRanges: PathRanges,
expandedPathID: number,
originalPathID: number):
VertexCopyResult | null {
const originalRange = originalRanges[rangesName][originalPathID - 1];
const firstExpandedVertexIndex = _.reduce(expandedRanges[rangesName],
(maxIndex, range) => Math.max(maxIndex, range.end),
0);
for (let originalVertexIndex = originalRange.start;
originalVertexIndex < originalRange.end;
originalVertexIndex++) {
for (const vertexBufferName of vertexBufferNames) {
const expanded = expandedMeshes[vertexBufferName];
const original = originalMeshes[vertexBufferName];
const size = MESH_TYPES[vertexBufferName].size;
for (let elementIndex = 0; elementIndex < size; elementIndex++) {
const globalIndex = size * originalVertexIndex + elementIndex;
expanded.push(original[globalIndex]);
for (const rangeName of Object.keys(PATH_RANGE_TO_BUFFER_TYPE_TABLE) as
Array<keyof PathRanges>) {
this[rangeName] = packedMeshes[rangeName];
}
}
}
const lastExpandedVertexIndex = firstExpandedVertexIndex + originalRange.length;
expandedRanges[rangesName][expandedPathID - 1] = new Range(firstExpandedVertexIndex,
lastExpandedVertexIndex);
return {
expandedEndIndex: lastExpandedVertexIndex,
expandedStartIndex: firstExpandedVertexIndex,
originalEndIndex: originalRange.end,
originalStartIndex: originalRange.start,
};
}
function copyIndices(destIndices: number[],
destRanges: Range[],
srcIndices: Uint32Array,
firstExpandedIndex: number,
firstIndex: number,
lastIndex: number,
expandedPathID: number,
validateIndex?: (indexIndex: number) => boolean) {
if (firstIndex === lastIndex)
return;
// FIXME(pcwalton): Speed this up using the original ranges.
let indexIndex = srcIndices.findIndex(index => index >= firstIndex && index < lastIndex);
if (indexIndex < 0)
return;
const firstDestIndex = destIndices.length;
const indexDelta = firstExpandedIndex - firstIndex;
while (indexIndex < srcIndices.length) {
const index = srcIndices[indexIndex];
if (validateIndex == null || validateIndex(indexIndex)) {
if (index < firstIndex || index >= lastIndex)
break;
destIndices.push(index + indexDelta);
} else {
destIndices.push(index);
}
indexIndex++;
}
const lastDestIndex = destIndices.length;
destRanges[expandedPathID - 1] = new Range(firstDestIndex, lastDestIndex);
}
function copySegments(segmentBufferNames: Array<keyof Meshes<void>>,
rangesName: keyof PathRanges,
expandedMeshes: Meshes<number[]>,
expandedRanges: PathRanges,
originalMeshes: Meshes<PrimitiveTypeArray>,
originalRanges: PathRanges,
expandedPathID: number,
originalPathID: number):
void {
const originalRange = originalRanges[rangesName][originalPathID - 1];
const firstExpandedSegmentIndex = _.reduce(expandedRanges[rangesName],
(maxIndex, range) => Math.max(maxIndex, range.end),
0);
for (let originalSegmentIndex = originalRange.start;
originalSegmentIndex < originalRange.end;
originalSegmentIndex++) {
for (const segmentBufferName of segmentBufferNames) {
if (originalMeshes[segmentBufferName].length === 0)
continue;
const size = MESH_TYPES[segmentBufferName].size;
for (let fieldIndex = 0; fieldIndex < size; fieldIndex++) {
const srcIndex = size * originalSegmentIndex + fieldIndex;
expandedMeshes[segmentBufferName].push(originalMeshes[segmentBufferName][srcIndex]);
}
}
}
const lastExpandedSegmentIndex = firstExpandedSegmentIndex + originalRange.length;
expandedRanges[rangesName][expandedPathID - 1] = new Range(firstExpandedSegmentIndex,
lastExpandedSegmentIndex);
function bufferCount(mesh: MeshLike<ArrayLike>, bufferType: MeshBufferType): number {
return mesh[bufferType].length / MESH_TYPES[bufferType].size;
}
function sizeOfPrimitive(primitiveType: PrimitiveType): number {

View File

@ -19,7 +19,7 @@ import {DemoAppController, setSwitchInputsValue} from "./app-controller";
import {SUBPIXEL_GRANULARITY} from './atlas';
import {OrthographicCamera} from './camera';
import {UniformMap} from './gl-utils';
import {PathfinderMeshData} from './meshes';
import {PathfinderMeshPack, PathfinderPackedMeshBuffers, PathfinderPackedMeshes} from './meshes';
import {PathTransformBuffers, Renderer} from "./renderer";
import {ShaderMap, ShaderProgramSource} from "./shader-loader";
import SSAAStrategy from './ssaa-strategy';
@ -116,7 +116,7 @@ class ReferenceTestAppController extends DemoAppController<ReferenceTestView> {
}
private glyphStore!: GlyphStore;
private baseMeshes!: PathfinderMeshData;
private baseMeshes!: PathfinderMeshPack;
private expandedMeshes!: ExpandedMeshData;
private fontSizeInput!: HTMLInputElement;
@ -515,7 +515,7 @@ class ReferenceTestAppController extends DemoAppController<ReferenceTestView> {
this.svgLoader.partition().then(meshes => {
this.view.then(view => {
view.recreateRenderer();
view.attachMeshes([meshes]);
view.attachMeshes([new PathfinderPackedMeshes(meshes)]);
view.initCameraBounds(this.svgLoader.svgViewBox);
});
});
@ -688,7 +688,7 @@ class ReferenceTestTextRenderer extends Renderer {
}
protected get objectCount(): number {
return this.meshes == null ? 0 : this.meshes.length;
return this.meshBuffers == null ? 0 : this.meshBuffers.length;
}
protected get usedSizeFactor(): glmatrix.vec2 {
@ -730,7 +730,7 @@ class ReferenceTestTextRenderer extends Renderer {
this.camera.onZoom = () => renderContext.setDirty();
}
attachMeshes(meshes: PathfinderMeshData[]): void {
attachMeshes(meshes: PathfinderPackedMeshes[]): void {
super.attachMeshes(meshes);
this.uploadPathColors(1);

View File

@ -18,7 +18,7 @@ import {NoAAStrategy, StemDarkeningMode, SubpixelAAType} from './aa-strategy';
import {AAOptions} from './app-controller';
import PathfinderBufferTexture from "./buffer-texture";
import {UniformMap} from './gl-utils';
import {PathfinderMeshBuffers, PathfinderMeshData} from "./meshes";
import {PathfinderPackedMeshBuffers, PathfinderPackedMeshes} from "./meshes";
import {ShaderMap} from './shader-loader';
import {FLOAT32_SIZE, Range, UINT16_SIZE, UINT32_SIZE, unwrapNull, unwrapUndef} from './utils';
import {RenderContext, Timings} from "./view";
@ -43,8 +43,8 @@ export abstract class Renderer {
readonly pathTransformBufferTextures: Array<PathTransformBuffers<PathfinderBufferTexture>>;
meshes: PathfinderMeshBuffers[] | null;
meshData: PathfinderMeshData[] | null;
meshBuffers: PathfinderPackedMeshBuffers[] | null;
meshes: PathfinderPackedMeshes[] | null;
lastTimings: Timings;
@ -65,7 +65,7 @@ export abstract class Renderer {
}
get meshesAttached(): boolean {
return this.meshes != null && this.meshData != null;
return this.meshBuffers != null && this.meshes != null;
}
abstract get isMulticolor(): boolean;
@ -100,8 +100,8 @@ export abstract class Renderer {
constructor(renderContext: RenderContext) {
this.renderContext = renderContext;
this.meshData = null;
this.meshes = null;
this.meshBuffers = null;
this.lastTimings = { rendering: 0, compositing: 0 };
@ -121,10 +121,12 @@ export abstract class Renderer {
this.antialiasingStrategy.setFramebufferSize(this);
}
attachMeshes(meshes: PathfinderMeshData[]): void {
attachMeshes(meshes: PathfinderPackedMeshes[]): void {
const renderContext = this.renderContext;
this.meshData = meshes;
this.meshes = meshes.map(meshes => new PathfinderMeshBuffers(renderContext.gl, meshes));
this.meshes = meshes;
this.meshBuffers = meshes.map(meshes => {
return new PathfinderPackedMeshBuffers(renderContext.gl, meshes);
});
unwrapNull(this.antialiasingStrategy).attachMeshes(this);
}
@ -134,7 +136,7 @@ export abstract class Renderer {
redraw(): void {
const renderContext = this.renderContext;
if (this.meshes == null)
if (this.meshBuffers == null)
return;
this.clearDestFramebuffer();
@ -211,7 +213,7 @@ export abstract class Renderer {
aaOptions.stemDarkening);
this.antialiasingStrategy.init(this);
if (this.meshData != null)
if (this.meshes != null)
this.antialiasingStrategy.attachMeshes(this);
this.antialiasingStrategy.setFramebufferSize(this);
@ -359,9 +361,9 @@ export abstract class Renderer {
}
pathRangeForObject(objectIndex: number): Range {
if (this.meshes == null)
if (this.meshBuffers == null)
return new Range(0, 0);
const bVertexPathRanges = this.meshes[objectIndex].bQuadVertexPositionPathRanges;
const bVertexPathRanges = this.meshBuffers[objectIndex].bQuadVertexPositionPathRanges;
return new Range(1, bVertexPathRanges.length + 1);
}
@ -446,7 +448,7 @@ export abstract class Renderer {
}
private directlyRenderObject(pass: number, objectIndex: number): void {
if (this.meshes == null || this.meshData == null)
if (this.meshBuffers == null || this.meshes == null)
return;
const renderContext = this.renderContext;
@ -463,8 +465,8 @@ export abstract class Renderer {
const pathRange = this.pathRangeForObject(objectIndex);
const meshIndex = this.meshIndexForObject(objectIndex);
const meshes = this.meshes![meshIndex];
const meshData = this.meshData![meshIndex];
const meshes = this.meshBuffers![meshIndex];
const meshData = this.meshes![meshIndex];
// Set up implicit cover state.
gl.depthFunc(gl.GREATER);
@ -624,15 +626,15 @@ export abstract class Renderer {
}
private initImplicitCoverCurveVAO(objectIndex: number, instanceRange: Range): void {
if (this.meshes == null)
if (this.meshBuffers == null)
return;
const renderContext = this.renderContext;
const gl = renderContext.gl;
const meshIndex = this.meshIndexForObject(objectIndex);
const meshes = this.meshes[meshIndex];
const meshData = unwrapNull(this.meshData)[meshIndex];
const meshes = this.meshBuffers[meshIndex];
const meshData = unwrapNull(this.meshes)[meshIndex];
const directCurveProgramName = this.directCurveProgramName();
const directCurveProgram = renderContext.shaderPrograms[directCurveProgramName];
@ -666,14 +668,14 @@ export abstract class Renderer {
instanceRange: Range,
renderingMode: DirectRenderingMode):
void {
if (this.meshes == null)
if (this.meshBuffers == null)
return;
const renderContext = this.renderContext;
const gl = renderContext.gl;
const meshIndex = this.meshIndexForObject(objectIndex);
const meshes = this.meshes[meshIndex];
const meshes = this.meshBuffers[meshIndex];
const directInteriorProgramName = this.directInteriorProgramName(renderingMode);
const directInteriorProgram = renderContext.shaderPrograms[directInteriorProgramName];

View File

@ -13,7 +13,7 @@ import * as _ from 'lodash';
import {DemoAppController} from './app-controller';
import {OrthographicCamera} from "./camera";
import {PathfinderMeshData} from "./meshes";
import {PathfinderPackedMeshes} from "./meshes";
import {ShaderMap, ShaderProgramSource} from './shader-loader';
import {BUILTIN_SVG_URI, SVGLoader} from './svg-loader';
import {SVGRenderer} from './svg-renderer';
@ -30,7 +30,7 @@ class SVGDemoController extends DemoAppController<SVGDemoView> {
protected readonly builtinFileURI: string = BUILTIN_SVG_URI;
private meshes!: PathfinderMeshData;
private meshes!: PathfinderPackedMeshes;
start() {
super.start();
@ -43,7 +43,7 @@ class SVGDemoController extends DemoAppController<SVGDemoView> {
protected fileLoaded(fileData: ArrayBuffer) {
this.loader.loadFile(fileData);
this.loader.partition().then(meshes => {
this.meshes = meshes;
this.meshes = new PathfinderPackedMeshes(meshes);
this.meshesReceived();
});
}

View File

@ -13,7 +13,7 @@ import * as glmatrix from 'gl-matrix';
import * as _ from 'lodash';
import 'path-data-polyfill.js';
import {parseServerTiming, PathfinderMeshData} from "./meshes";
import {parseServerTiming, PathfinderMeshPack, PathfinderPackedMeshes} from "./meshes";
import {lerp, panic, Range, unwrapNull, unwrapUndef} from "./utils";
export const BUILTIN_SVG_URI: string = "/svg/demo";
@ -126,7 +126,7 @@ export class SVGLoader {
this.attachSVG(svgElement);
}
partition(pathIndex?: number | undefined): Promise<PathfinderMeshData> {
partition(pathIndex?: number | undefined): Promise<PathfinderMeshPack> {
// Make the request.
const paths = pathIndex == null ? this.paths : [this.paths[pathIndex]];
let time = 0;
@ -141,7 +141,7 @@ export class SVGLoader {
}).then(response => {
time = parseServerTiming(response.headers);
return response.arrayBuffer();
}).then(buffer => new PathfinderMeshData(buffer));
}).then(buffer => new PathfinderMeshPack(buffer));
}
private attachSVG(svgElement: SVGSVGElement) {

View File

@ -15,7 +15,7 @@ import {NoAAStrategy} from './aa-strategy';
import {SubpixelAAType} from './aa-strategy';
import {OrthographicCamera} from "./camera";
import {UniformMap} from './gl-utils';
import {PathfinderMeshData} from './meshes';
import {PathfinderPackedMeshes} from './meshes';
import {PathTransformBuffers, Renderer} from "./renderer";
import {ShaderMap} from './shader-loader';
import SSAAStrategy from './ssaa-strategy';
@ -114,7 +114,7 @@ export abstract class SVGRenderer extends Renderer {
return boundingRectsBuffer;
}
attachMeshes(meshes: PathfinderMeshData[]): void {
attachMeshes(meshes: PathfinderPackedMeshes[]): void {
super.attachMeshes(meshes);
this.uploadPathColors(1);
this.uploadPathTransforms(1);

View File

@ -23,7 +23,7 @@ import {CameraView, OrthographicCamera} from "./camera";
import {createFramebuffer, createFramebufferColorTexture} from './gl-utils';
import {createFramebufferDepthTexture, QUAD_ELEMENTS, setTextureParameters} from './gl-utils';
import {UniformMap} from './gl-utils';
import {PathfinderMeshBuffers, PathfinderMeshData} from './meshes';
import {PathfinderMeshPack, PathfinderPackedMeshBuffers, PathfinderPackedMeshes} from './meshes';
import {PathfinderShaderProgram, ShaderMap, ShaderProgramSource} from './shader-loader';
import SSAAStrategy from './ssaa-strategy';
import {calculatePixelRectForGlyph, PathfinderFont} from "./text";
@ -133,7 +133,7 @@ class TextDemoController extends DemoAppController<TextDemoView> {
private _atlas: Atlas;
private meshes!: PathfinderMeshData;
private meshes!: PathfinderPackedMeshes;
private _fontSize!: number;
private _rotationAngle!: number;
@ -278,13 +278,13 @@ class TextDemoController extends DemoAppController<TextDemoView> {
});
}
private expandMeshes(meshes: PathfinderMeshData, glyphCount: number): PathfinderMeshData {
private expandMeshes(meshes: PathfinderMeshPack, glyphCount: number): PathfinderPackedMeshes {
const pathIDs = [];
for (let glyphIndex = 0; glyphIndex < glyphCount; glyphIndex++) {
for (let subpixel = 0; subpixel < SUBPIXEL_GRANULARITY; subpixel++)
pathIDs.push(glyphIndex + 1);
}
return meshes.expand(pathIDs);
return new PathfinderPackedMeshes(meshes, pathIDs);
}
get atlas(): Atlas {

View File

@ -131,7 +131,7 @@ export abstract class TextRenderer extends Renderer {
}
protected get objectCount(): number {
return this.meshes == null ? 0 : this.meshes.length;
return this.meshBuffers == null ? 0 : this.meshBuffers.length;
}
protected get extraEmboldenAmount(): glmatrix.vec2 {

View File

@ -14,7 +14,8 @@ import * as _ from 'lodash';
import * as opentype from "opentype.js";
import {Metrics} from 'opentype.js';
import {B_QUAD_SIZE, parseServerTiming, PathfinderMeshData} from "./meshes";
import {B_QUAD_SIZE, parseServerTiming, PathfinderMeshPack} from "./meshes";
import {PathfinderPackedMeshes} from "./meshes";
import {assert, lerp, panic, UINT32_MAX, UINT32_SIZE, unwrapNull} from "./utils";
export const BUILTIN_FONT_URI: string = "/otf/demo";
@ -39,11 +40,11 @@ export const MAX_STEM_DARKENING_PIXELS_PER_EM: number = 72.0;
const PARTITION_FONT_ENDPOINT_URI: string = "/partition-font";
export interface ExpandedMeshData {
meshes: PathfinderMeshData;
meshes: PathfinderPackedMeshes;
}
export interface PartitionResult {
meshes: PathfinderMeshData;
meshes: PathfinderMeshPack;
time: number;
}
@ -213,7 +214,7 @@ export class TextFrame {
this.font = font;
}
expandMeshes(meshes: PathfinderMeshData, glyphIDs: number[]): ExpandedMeshData {
expandMeshes(meshes: PathfinderMeshPack, glyphIDs: number[]): ExpandedMeshData {
const pathIDs = [];
for (const textRun of this.runs) {
for (const glyphID of textRun.glyphIDs) {
@ -225,7 +226,7 @@ export class TextFrame {
}
return {
meshes: meshes.expand(pathIDs),
meshes: new PathfinderPackedMeshes(meshes, pathIDs),
};
}
@ -296,7 +297,7 @@ export class GlyphStore {
return response.arrayBuffer();
}).then(buffer => {
return {
meshes: new PathfinderMeshData(buffer),
meshes: new PathfinderMeshPack(buffer),
time: time,
};
});

View File

@ -16,7 +16,7 @@ import {AAOptions} from './app-controller';
import PathfinderBufferTexture from './buffer-texture';
import {Camera} from "./camera";
import {EXTDisjointTimerQuery, QUAD_ELEMENTS, UniformMap} from './gl-utils';
import {PathfinderMeshBuffers, PathfinderMeshData} from './meshes';
import {PathfinderPackedMeshBuffers, PathfinderPackedMeshes} from './meshes';
import {Renderer} from './renderer';
import {PathfinderShaderProgram, SHADER_NAMES, ShaderMap} from './shader-loader';
import {ShaderProgramSource, UnlinkedShaderProgram} from './shader-loader';
@ -157,8 +157,8 @@ export abstract class DemoView extends PathfinderView implements RenderContext {
atlasRenderingTimerQuery!: WebGLQuery;
compositingTimerQuery!: WebGLQuery;
meshes: PathfinderMeshBuffers[];
meshData: PathfinderMeshData[];
meshes: PathfinderPackedMeshBuffers[];
meshData: PathfinderPackedMeshes[];
get colorAlphaFormat(): ColorAlphaFormat {
// On macOS, RGBA framebuffers seem to cause driver stalls when switching between rendering
@ -193,7 +193,7 @@ export abstract class DemoView extends PathfinderView implements RenderContext {
this.wantsScreenshot = false;
}
attachMeshes(meshes: PathfinderMeshData[]): void {
attachMeshes(meshes: PathfinderPackedMeshes[]): void {
this.renderer.attachMeshes(meshes);
this.setDirty();
}

View File

@ -472,7 +472,7 @@ export class MCAAStrategy extends XCAAStrategy {
}
protected initVAOForObject(renderer: Renderer, objectIndex: number): void {
if (renderer.meshes == null || renderer.meshData == null)
if (renderer.meshBuffers == null || renderer.meshes == null)
return;
const renderContext = renderer.renderContext;
@ -488,13 +488,13 @@ export class MCAAStrategy extends XCAAStrategy {
const vao = this.vao;
renderContext.vertexArrayObjectExt.bindVertexArrayOES(vao);
const bBoxRanges = renderer.meshData[meshIndex].bBoxPathRanges;
const bBoxRanges = renderer.meshes[meshIndex].bBoxPathRanges;
const offset = calculateStartFromIndexRanges(pathRange, bBoxRanges);
gl.useProgram(shaderProgram.program);
gl.bindBuffer(gl.ARRAY_BUFFER, renderer.renderContext.quadPositionsBuffer);
gl.vertexAttribPointer(attributes.aTessCoord, 2, gl.FLOAT, false, FLOAT32_SIZE * 2, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, renderer.meshes[meshIndex].bBoxes);
gl.bindBuffer(gl.ARRAY_BUFFER, renderer.meshBuffers[meshIndex].bBoxes);
gl.vertexAttribPointer(attributes.aRect,
4,
gl.FLOAT,
@ -539,7 +539,7 @@ export class MCAAStrategy extends XCAAStrategy {
renderContext.instancedArraysExt.vertexAttribDivisorANGLE(attributes.aDUVDY, 1);
renderContext.instancedArraysExt.vertexAttribDivisorANGLE(attributes.aSignMode, 1);
gl.bindBuffer(gl.ARRAY_BUFFER, renderer.meshes[meshIndex].bBoxPathIDs);
gl.bindBuffer(gl.ARRAY_BUFFER, renderer.meshBuffers[meshIndex].bBoxPathIDs);
gl.vertexAttribPointer(attributes.aPathID,
1,
gl.UNSIGNED_SHORT,
@ -562,7 +562,7 @@ export class MCAAStrategy extends XCAAStrategy {
objectIndex: number,
shaderProgram: PathfinderShaderProgram):
void {
if (renderer.meshes == null || renderer.meshData == null)
if (renderer.meshBuffers == null || renderer.meshes == null)
return;
const renderContext = renderer.renderContext;
@ -586,7 +586,7 @@ export class MCAAStrategy extends XCAAStrategy {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, renderContext.quadElementsBuffer);
const bBoxRanges = renderer.meshData[meshIndex].bBoxPathRanges;
const bBoxRanges = renderer.meshes[meshIndex].bBoxPathRanges;
const count = calculateCountFromIndexRanges(pathRange, bBoxRanges);
renderContext.instancedArraysExt
@ -634,7 +634,7 @@ export class StencilAAAStrategy extends XCAAStrategy {
const renderContext = renderer.renderContext;
const gl = renderContext.gl;
if (renderer.meshData == null)
if (renderer.meshes == null)
return;
// Antialias.
@ -650,7 +650,7 @@ export class StencilAAAStrategy extends XCAAStrategy {
renderContext.vertexArrayObjectExt.bindVertexArrayOES(this.vao);
// FIXME(pcwalton): Only render the appropriate instances.
const count = renderer.meshData[0].stencilSegmentCount;
const count = renderer.meshes[0].count('stencilSegments');
renderContext.instancedArraysExt
.drawElementsInstancedANGLE(gl.TRIANGLES, 6, gl.UNSIGNED_BYTE, 0, count);
@ -701,7 +701,7 @@ export class StencilAAAStrategy extends XCAAStrategy {
}
private createVAO(renderer: Renderer): void {
if (renderer.meshes == null || renderer.meshData == null)
if (renderer.meshBuffers == null || renderer.meshes == null)
return;
const renderContext = renderer.renderContext;
@ -713,9 +713,9 @@ export class StencilAAAStrategy extends XCAAStrategy {
this.vao = renderContext.vertexArrayObjectExt.createVertexArrayOES();
renderContext.vertexArrayObjectExt.bindVertexArrayOES(this.vao);
const vertexPositionsBuffer = renderer.meshes[0].stencilSegments;
const vertexNormalsBuffer = renderer.meshes[0].stencilNormals;
const pathIDsBuffer = renderer.meshes[0].stencilSegmentPathIDs;
const vertexPositionsBuffer = renderer.meshBuffers[0].stencilSegments;
const vertexNormalsBuffer = renderer.meshBuffers[0].stencilNormals;
const pathIDsBuffer = renderer.meshBuffers[0].stencilSegmentPathIDs;
gl.useProgram(program.program);
gl.bindBuffer(gl.ARRAY_BUFFER, renderContext.quadPositionsBuffer);

View File

@ -46,7 +46,7 @@ use lyon_path::iterator::PathIter;
use pathfinder_font_renderer::{FontContext, FontInstance, GlyphImage};
use pathfinder_font_renderer::{GlyphKey, SubpixelOffset};
use pathfinder_partitioner::FillRule;
use pathfinder_partitioner::mesh_library::MeshLibrary;
use pathfinder_partitioner::mesh_pack::MeshPack;
use pathfinder_partitioner::partitioner::Partitioner;
use pathfinder_path_utils::cubic_to_quadratic::CubicToQuadraticTransformer;
use pathfinder_path_utils::stroke::{StrokeStyle, StrokeToFillIter};
@ -75,15 +75,15 @@ use rsvg::{Handle, HandleExt};
const SUGGESTED_JSON_SIZE_LIMIT: u64 = 32 * 1024 * 1024;
const MESH_LIBRARY_CACHE_SIZE: usize = 16;
const MESH_PACK_CACHE_SIZE: usize = 16;
const CUBIC_TO_QUADRATIC_APPROX_TOLERANCE: f32 = 5.0;
static NEXT_FONT_KEY: AtomicUsize = ATOMIC_USIZE_INIT;
lazy_static! {
static ref MESH_LIBRARY_CACHE: Mutex<LruCache<MeshLibraryCacheKey, PartitionResponder>> = {
Mutex::new(LruCache::new(MESH_LIBRARY_CACHE_SIZE))
static ref MESH_PACK_CACHE: Mutex<LruCache<MeshPackCacheKey, PartitionResponder>> = {
Mutex::new(LruCache::new(MESH_PACK_CACHE_SIZE))
};
}
@ -125,7 +125,7 @@ static BUILTIN_SVGS: [(&'static str, &'static str); 4] = [
];
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
struct MeshLibraryCacheKey {
struct MeshPackCacheKey {
builtin_font_name: String,
glyph_ids: Vec<u32>,
}
@ -282,25 +282,36 @@ struct PathPartitioningResult {
}
impl PathPartitioningResult {
fn compute(partitioner: &mut Partitioner,
fn compute(pack: &mut MeshPack,
path_descriptors: &[PathDescriptor],
paths: &[Vec<PathEvent>])
paths: &[Vec<PathEvent>],
approx_tolerance: Option<f32>)
-> PathPartitioningResult {
let timestamp_before = Instant::now();
for (path, path_descriptor) in paths.iter().zip(path_descriptors.iter()) {
path.iter().for_each(|event| partitioner.builder_mut().path_event(*event));
partitioner.partition((path_descriptor.path_index + 1) as u16,
path_descriptor.fill_rule);
partitioner.builder_mut().build_and_reset();
let mut partitioner = Partitioner::new();
if let Some(tolerance) = approx_tolerance {
partitioner.builder_mut().set_approx_tolerance(tolerance);
}
partitioner.library_mut().optimize();
path.iter().for_each(|event| partitioner.builder_mut().path_event(*event));
partitioner.partition(path_descriptor.fill_rule);
partitioner.builder_mut().build_and_reset();
partitioner.mesh_mut().push_stencil_segments(
CubicToQuadraticTransformer::new(path.iter().cloned(),
CUBIC_TO_QUADRATIC_APPROX_TOLERANCE));
partitioner.mesh_mut().push_stencil_normals(
CubicToQuadraticTransformer::new(path.iter().cloned(),
CUBIC_TO_QUADRATIC_APPROX_TOLERANCE));
pack.push(partitioner.into_mesh());
}
let time_elapsed = timestamp_before.elapsed();
let mut data_buffer = Cursor::new(vec![]);
drop(partitioner.library().serialize_into(&mut data_buffer));
drop(pack.serialize_into(&mut data_buffer));
PathPartitioningResult {
encoded_data: Arc::new(data_buffer.into_inner()),
@ -428,7 +439,7 @@ fn partition_font(request: Json<PartitionFontRequest>) -> Result<PartitionRespon
// Check the cache.
let cache_key = match request.face {
FontRequestFace::Builtin(ref builtin_font_name) => {
Some(MeshLibraryCacheKey {
Some(MeshPackCacheKey {
builtin_font_name: (*builtin_font_name).clone(),
glyph_ids: request.glyphs.iter().map(|glyph| glyph.id).collect(),
})
@ -437,7 +448,7 @@ fn partition_font(request: Json<PartitionFontRequest>) -> Result<PartitionRespon
};
if let Some(ref cache_key) = cache_key {
if let Ok(mut mesh_library_cache) = MESH_LIBRARY_CACHE.lock() {
if let Ok(mut mesh_library_cache) = MESH_PACK_CACHE.lock() {
if let Some(cache_entry) = mesh_library_cache.get_mut(cache_key) {
return Ok((*cache_entry).clone())
}
@ -477,7 +488,7 @@ fn partition_font(request: Json<PartitionFontRequest>) -> Result<PartitionRespon
paths.push(Transform2DPathIter::new(glyph_outline.iter(),
&glyph.transform).collect())
}
Err(_) => continue,
Err(_) => paths.push(vec![]),
};
path_descriptors.push(PathDescriptor {
@ -487,22 +498,11 @@ fn partition_font(request: Json<PartitionFontRequest>) -> Result<PartitionRespon
}
// Partition the decoded glyph outlines.
let mut library = MeshLibrary::new();
for (stored_path_index, path_descriptor) in path_descriptors.iter().enumerate() {
library.push_stencil_segments(
(path_descriptor.path_index + 1) as u16,
CubicToQuadraticTransformer::new(paths[stored_path_index].iter().cloned(),
CUBIC_TO_QUADRATIC_APPROX_TOLERANCE));
library.push_stencil_normals(
(path_descriptor.path_index + 1) as u16,
CubicToQuadraticTransformer::new(paths[stored_path_index].iter().cloned(),
CUBIC_TO_QUADRATIC_APPROX_TOLERANCE));
}
let mut partitioner = Partitioner::new(library);
let path_partitioning_result = PathPartitioningResult::compute(&mut partitioner,
let mut pack = MeshPack::new();
let path_partitioning_result = PathPartitioningResult::compute(&mut pack,
&path_descriptors,
&paths);
&paths,
None);
// Build the response.
let elapsed_ms = path_partitioning_result.elapsed_ms();
@ -512,7 +512,7 @@ fn partition_font(request: Json<PartitionFontRequest>) -> Result<PartitionRespon
};
if let Some(cache_key) = cache_key {
if let Ok(mut mesh_library_cache) = MESH_LIBRARY_CACHE.lock() {
if let Ok(mut mesh_library_cache) = MESH_PACK_CACHE.lock() {
mesh_library_cache.insert(cache_key, responder.clone());
}
}
@ -529,7 +529,7 @@ fn partition_svg_paths(request: Json<PartitionSvgPathsRequest>)
// commands.
let mut paths = vec![];
let mut path_descriptors = vec![];
let mut partitioner = Partitioner::new(MeshLibrary::new());
let mut pack = MeshPack::new();
let mut path_index = 0;
for path in &request.paths {
@ -583,12 +583,12 @@ fn partition_svg_paths(request: Json<PartitionSvgPathsRequest>)
// Compute approximation tolerance.
let tolerance = f32::max(request.view_box_width, request.view_box_height) * 0.001;
partitioner.builder_mut().set_approx_tolerance(tolerance);
// Partition the paths.
let path_partitioning_result = PathPartitioningResult::compute(&mut partitioner,
let path_partitioning_result = PathPartitioningResult::compute(&mut pack,
&path_descriptors,
&paths);
&paths,
Some(tolerance));
// Return the response.
let elapsed_ms = path_partitioning_result.elapsed_ms();

View File

@ -12,7 +12,6 @@ bincode = "0.8"
bit-vec = "0.4"
byteorder = "1.2"
env_logger = "0.4"
euclid = "0.17"
half = "1.0"
log = "0.3"
lyon_geom = "0.10"
@ -20,5 +19,9 @@ lyon_path = "0.10"
serde = "1.0"
serde_derive = "1.0"
[dependencies.euclid]
version = "0.17"
features = ["serde"]
[dependencies.pathfinder_path_utils]
path = "../path-utils"

View File

@ -41,7 +41,8 @@ use euclid::Point2D;
use std::u32;
pub mod builder;
pub mod mesh_library;
pub mod mesh;
pub mod mesh_pack;
pub mod partitioner;
/// The fill rule.

View File

@ -1,4 +1,4 @@
// pathfinder/partitioner/src/mesh_library.rs
// pathfinder/partitioner/src/mesh.rs
//
// Copyright © 2018 The Pathfinder Project Developers.
//
@ -8,24 +8,18 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use bincode::{self, Infinite};
use byteorder::{LittleEndian, WriteBytesExt};
use euclid::approxeq::ApproxEq;
use euclid::{Point2D, Rect, Size2D, Vector2D};
use lyon_path::PathEvent;
use pathfinder_path_utils::normals::PathNormals;
use pathfinder_path_utils::segments::{self, SegmentIter};
use serde::Serialize;
use std::f32;
use std::io::{self, ErrorKind, Seek, SeekFrom, Write};
use std::ops::Range;
use std::u32;
use {BQuad, BQuadVertexPositions, BVertexLoopBlinnData};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MeshLibrary {
pub path_ranges: Vec<PathRanges>,
pub struct Mesh {
pub b_quads: Vec<BQuad>,
// FIXME(pcwalton): Merge with `b_vertex_positions` below.
pub b_quad_vertex_positions: Vec<BQuadVertexPositions>,
@ -37,11 +31,10 @@ pub struct MeshLibrary {
pub stencil_normals: Vec<StencilNormals>,
}
impl MeshLibrary {
impl Mesh {
#[inline]
pub fn new() -> MeshLibrary {
MeshLibrary {
path_ranges: vec![],
pub fn new() -> Mesh {
Mesh {
b_quads: vec![],
b_quad_vertex_positions: vec![],
b_quad_vertex_interior_indices: vec![],
@ -54,7 +47,6 @@ impl MeshLibrary {
}
pub fn clear(&mut self) {
self.path_ranges.clear();
self.b_quads.clear();
self.b_quad_vertex_positions.clear();
self.b_quad_vertex_interior_indices.clear();
@ -65,14 +57,6 @@ impl MeshLibrary {
self.stencil_normals.clear();
}
pub(crate) fn ensure_path_ranges(&mut self, path_id: u16) -> &mut PathRanges {
let path_index = (path_id as usize) - 1;
while path_index >= self.path_ranges.len() {
self.path_ranges.push(PathRanges::new())
}
&mut self.path_ranges[path_index]
}
pub(crate) fn add_b_vertex(&mut self,
position: &Point2D<f32>,
loop_blinn_data: &BVertexLoopBlinnData) {
@ -224,41 +208,7 @@ impl MeshLibrary {
const LL: u32 = 5;
}
/// Reverses interior indices so that they draw front-to-back.
///
/// This enables early Z optimizations.
pub fn optimize(&mut self) {
reverse_indices(&mut self.path_ranges,
&mut self.b_quad_vertex_interior_indices,
|path_ranges| path_ranges.b_quad_vertex_interior_indices.clone(),
|path_ranges, new_range| {
path_ranges.b_quad_vertex_interior_indices = new_range
});
fn reverse_indices<G, S>(path_ranges: &mut [PathRanges],
indices: &mut Vec<u32>,
mut getter: G,
mut setter: S)
where G: FnMut(&PathRanges) -> Range<u32>,
S: FnMut(&mut PathRanges, Range<u32>) {
let mut new_indices = Vec::with_capacity(indices.len());
for path_range in path_ranges.iter_mut().rev() {
let old_range = getter(path_range);
let old_range = (old_range.start as usize)..(old_range.end as usize);
let new_start_index = new_indices.len() as u32;
new_indices.extend_from_slice(&indices[old_range]);
let new_end_index = new_indices.len() as u32;
setter(path_range, new_start_index..new_end_index);
}
*indices = new_indices
}
}
pub fn push_stencil_segments<I>(&mut self, path_id: u16, stream: I)
where I: Iterator<Item = PathEvent> {
let first_segment_index = self.stencil_segments.len() as u32;
pub fn push_stencil_segments<I>(&mut self, stream: I) where I: Iterator<Item = PathEvent> {
let segment_iter = SegmentIter::new(stream);
for segment in segment_iter {
match segment {
@ -282,17 +232,11 @@ impl MeshLibrary {
segments::Segment::EndSubpath(..) => {}
}
}
let last_segment_index = self.stencil_segments.len() as u32;
let path_ranges = self.ensure_path_ranges(path_id);
path_ranges.stencil_segments = first_segment_index..last_segment_index;
}
/// Computes vertex normals necessary for emboldening and/or stem darkening. This is intended
/// for stencil-and-cover.
pub fn push_stencil_normals<I>(&mut self, _path_id: u16, stream: I)
where I: Iterator<Item = PathEvent> {
pub fn push_stencil_normals<I>(&mut self, stream: I) where I: Iterator<Item = PathEvent> {
let mut normals = PathNormals::new();
normals.add_path(stream);
self.stencil_normals.extend(normals.normals().iter().map(|normals| {
@ -303,163 +247,6 @@ impl MeshLibrary {
}
}))
}
/// Writes this mesh library to a RIFF file.
///
/// RIFF is a dead-simple extensible binary format documented here:
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ee415713(v=vs.85).aspx
pub fn serialize_into<W>(&self, writer: &mut W) -> io::Result<()> where W: Write + Seek {
// `PFML` for "Pathfinder Mesh Library".
try!(writer.write_all(b"RIFF\0\0\0\0PFML"));
// NB: The RIFF spec requires that all chunks be padded to an even byte offset. However,
// for us, this is guaranteed by construction because each instance of all of the data that
// we're writing has a byte size that is a multiple of 4. So we don't bother with doing it
// explicitly here.
try!(write_chunk(writer, b"prng", |writer| write_path_ranges(writer, &self.path_ranges)));
try!(write_simple_chunk(writer, b"bqua", &self.b_quads));
try!(write_simple_chunk(writer, b"bqvp", &self.b_quad_vertex_positions));
try!(write_simple_chunk(writer, b"bqii", &self.b_quad_vertex_interior_indices));
try!(write_simple_chunk(writer, b"bbox", &self.b_boxes));
try!(write_simple_chunk(writer, b"sseg", &self.stencil_segments));
try!(write_simple_chunk(writer, b"snor", &self.stencil_normals));
let total_length = try!(writer.seek(SeekFrom::Current(0)));
try!(writer.seek(SeekFrom::Start(4)));
try!(writer.write_u32::<LittleEndian>((total_length - 8) as u32));
return Ok(());
fn write_chunk<W, F>(writer: &mut W, tag: &[u8; 4], mut closure: F) -> io::Result<()>
where W: Write + Seek, F: FnMut(&mut W) -> io::Result<()> {
try!(writer.write_all(tag));
try!(writer.write_all(b"\0\0\0\0"));
let start_position = try!(writer.seek(SeekFrom::Current(0)));
try!(closure(writer));
let end_position = try!(writer.seek(SeekFrom::Current(0)));
try!(writer.seek(SeekFrom::Start(start_position - 4)));
try!(writer.write_u32::<LittleEndian>((end_position - start_position) as u32));
try!(writer.seek(SeekFrom::Start(end_position)));
Ok(())
}
fn write_simple_chunk<W, T>(writer: &mut W, tag: &[u8; 4], data: &[T]) -> io::Result<()>
where W: Write + Seek, T: Serialize {
write_chunk(writer, tag, |writer| {
for datum in data {
try!(bincode::serialize_into(writer, datum, Infinite).map_err(|_| {
io::Error::from(ErrorKind::Other)
}));
}
Ok(())
})
}
fn write_path_ranges<W>(writer: &mut W, path_ranges: &[PathRanges]) -> io::Result<()>
where W: Write + Seek {
try!(write_path_range(writer, b"bqua", path_ranges, |ranges| &ranges.b_quads));
try!(write_path_range(writer,
b"bqvp",
path_ranges,
|ranges| &ranges.b_quad_vertex_positions));
try!(write_path_range(writer,
b"bqii",
path_ranges,
|ranges| &ranges.b_quad_vertex_interior_indices));
try!(write_path_range(writer, b"bver", path_ranges, |ranges| &ranges.b_vertices));
try!(write_path_range(writer, b"bbox", path_ranges, |ranges| &ranges.b_boxes));
try!(write_path_range(writer,
b"sseg",
path_ranges,
|ranges| &ranges.stencil_segments));
Ok(())
}
fn write_path_range<W, F>(writer: &mut W,
tag: &[u8; 4],
all_path_ranges: &[PathRanges],
mut get_range: F)
-> io::Result<()>
where W: Write + Seek, F: FnMut(&PathRanges) -> &Range<u32> {
write_chunk(writer, tag, |writer| {
for path_ranges in all_path_ranges {
let range = get_range(path_ranges);
try!(writer.write_u32::<LittleEndian>(range.start));
try!(writer.write_u32::<LittleEndian>(range.end));
}
Ok(())
})
}
}
pub(crate) fn snapshot_lengths(&self) -> MeshLibraryLengths {
MeshLibraryLengths {
b_quads: self.b_quads.len() as u32,
b_quad_vertex_positions: self.b_quad_vertex_positions.len() as u32,
b_quad_vertex_interior_indices: self.b_quad_vertex_interior_indices.len() as u32,
b_vertices: self.b_vertex_positions.len() as u32,
b_boxes: self.b_boxes.len() as u32,
}
}
#[inline]
pub fn next_path_id(&self) -> u16 {
(self.path_ranges.len() + 1) as u16
}
}
#[derive(Debug, Clone)]
pub struct MeshLibraryCoverIndices {
pub interior_indices: Vec<u32>,
pub curve_indices: Vec<u32>,
}
pub(crate) struct MeshLibraryLengths {
pub(crate) b_quads: u32,
b_quad_vertex_positions: u32,
b_quad_vertex_interior_indices: u32,
b_vertices: u32,
b_boxes: u32,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PathRanges {
pub b_quads: Range<u32>,
pub b_quad_vertex_positions: Range<u32>,
pub b_quad_vertex_interior_indices: Range<u32>,
pub b_vertices: Range<u32>,
pub b_boxes: Range<u32>,
pub segment_lines: Range<u32>,
pub segment_curves: Range<u32>,
pub stencil_segments: Range<u32>,
}
impl PathRanges {
fn new() -> PathRanges {
PathRanges {
b_quads: 0..0,
b_quad_vertex_positions: 0..0,
b_quad_vertex_interior_indices: 0..0,
b_vertices: 0..0,
b_boxes: 0..0,
segment_lines: 0..0,
segment_curves: 0..0,
stencil_segments: 0..0,
}
}
pub(crate) fn set_partitioning_lengths(&mut self,
start: &MeshLibraryLengths,
end: &MeshLibraryLengths) {
self.b_quads = start.b_quads..end.b_quads;
self.b_quad_vertex_positions = start.b_quad_vertex_positions..end.b_quad_vertex_positions;
self.b_quad_vertex_interior_indices =
start.b_quad_vertex_interior_indices..end.b_quad_vertex_interior_indices;
self.b_vertices = start.b_vertices..end.b_vertices;
self.b_boxes = start.b_boxes..end.b_boxes;
}
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]

View File

@ -0,0 +1,92 @@
// pathfinder/partitioner/src/mesh_pack.rs
//
// Copyright © 2018 The Pathfinder Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use bincode::{self, Infinite};
use byteorder::{LittleEndian, WriteBytesExt};
use mesh::Mesh;
use serde::Serialize;
use std::io::{self, ErrorKind, Seek, SeekFrom, Write};
use std::u32;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MeshPack {
pub meshes: Vec<Mesh>,
}
impl MeshPack {
#[inline]
pub fn new() -> MeshPack {
MeshPack {
meshes: vec![],
}
}
#[inline]
pub fn push(&mut self, mesh: Mesh) {
self.meshes.push(mesh)
}
/// Writes this mesh pack to a RIFF file.
///
/// RIFF is a dead-simple extensible binary format documented here:
/// https://msdn.microsoft.com/en-us/library/windows/desktop/ee415713(v=vs.85).aspx
pub fn serialize_into<W>(&self, writer: &mut W) -> io::Result<()> where W: Write + Seek {
// `PFMP` for "Pathfinder Mesh Pack".
try!(writer.write_all(b"RIFF\0\0\0\0PFMP"));
// NB: The RIFF spec requires that all chunks be padded to an even byte offset. However,
// for us, this is guaranteed by construction because each instance of all of the data that
// we're writing has a byte size that is a multiple of 4. So we don't bother with doing it
// explicitly here.
for mesh in &self.meshes {
try!(write_chunk(writer, b"mesh", |writer| {
try!(write_simple_chunk(writer, b"bqua", &mesh.b_quads));
try!(write_simple_chunk(writer, b"bqvp", &mesh.b_quad_vertex_positions));
try!(write_simple_chunk(writer, b"bqii", &mesh.b_quad_vertex_interior_indices));
try!(write_simple_chunk(writer, b"bbox", &mesh.b_boxes));
try!(write_simple_chunk(writer, b"sseg", &mesh.stencil_segments));
try!(write_simple_chunk(writer, b"snor", &mesh.stencil_normals));
Ok(())
}));
}
let total_length = try!(writer.seek(SeekFrom::Current(0)));
try!(writer.seek(SeekFrom::Start(4)));
try!(writer.write_u32::<LittleEndian>((total_length - 8) as u32));
return Ok(());
fn write_chunk<W, F>(writer: &mut W, tag: &[u8; 4], mut closure: F) -> io::Result<()>
where W: Write + Seek, F: FnMut(&mut W) -> io::Result<()> {
try!(writer.write_all(tag));
try!(writer.write_all(b"\0\0\0\0"));
let start_position = try!(writer.seek(SeekFrom::Current(0)));
try!(closure(writer));
let end_position = try!(writer.seek(SeekFrom::Current(0)));
try!(writer.seek(SeekFrom::Start(start_position - 4)));
try!(writer.write_u32::<LittleEndian>((end_position - start_position) as u32));
try!(writer.seek(SeekFrom::Start(end_position)));
Ok(())
}
fn write_simple_chunk<W, T>(writer: &mut W, tag: &[u8; 4], data: &[T]) -> io::Result<()>
where W: Write + Seek, T: Serialize {
write_chunk(writer, tag, |writer| {
for datum in data {
try!(bincode::serialize_into(writer, datum, Infinite).map_err(|_| {
io::Error::from(ErrorKind::Other)
}));
}
Ok(())
})
}
}
}

View File

@ -21,7 +21,7 @@ use std::ops::{Add, AddAssign};
use std::u32;
use builder::Builder;
use mesh_library::MeshLibrary;
use mesh::Mesh;
use {BQuad, BVertexLoopBlinnData, BVertexKind, FillRule};
const MAX_B_QUAD_SUBDIVISIONS: u8 = 8;
@ -30,12 +30,10 @@ const INTERSECTION_TOLERANCE: f32 = 0.001;
pub struct Partitioner {
path: Builder,
path_id: u16,
library: MeshLibrary,
fill_rule: FillRule,
mesh: Mesh,
heap: BinaryHeap<Point>,
visited_points: BitVec,
active_edges: Vec<ActiveEdge>,
@ -44,13 +42,12 @@ pub struct Partitioner {
impl Partitioner {
#[inline]
pub fn new(library: MeshLibrary) -> Partitioner {
pub fn new() -> Partitioner {
Partitioner {
path: Builder::new(),
path_id: 0,
fill_rule: FillRule::Winding,
library: library,
mesh: Mesh::new(),
heap: BinaryHeap::new(),
visited_points: BitVec::new(),
@ -60,18 +57,18 @@ impl Partitioner {
}
#[inline]
pub fn library(&self) -> &MeshLibrary {
&self.library
pub fn mesh(&self) -> &Mesh {
&self.mesh
}
#[inline]
pub fn library_mut(&mut self) -> &mut MeshLibrary {
&mut self.library
pub fn mesh_mut(&mut self) -> &mut Mesh {
&mut self.mesh
}
#[inline]
pub fn into_library(self) -> MeshLibrary {
self.library
pub fn into_mesh(self) -> Mesh {
self.mesh
}
#[inline]
@ -84,31 +81,23 @@ impl Partitioner {
&mut self.path
}
pub fn partition(&mut self, path_id: u16, fill_rule: FillRule) {
pub fn partition(&mut self, fill_rule: FillRule) {
self.path.end_subpath();
self.heap.clear();
self.active_edges.clear();
self.path_id = path_id;
self.fill_rule = fill_rule;
// FIXME(pcwalton): Right now, this bit vector uses too much memory.
self.visited_points = BitVec::from_elem(self.path.endpoints.len(), false);
let start_lengths = self.library.snapshot_lengths();
self.init_heap();
while self.process_next_point() {}
debug_assert_eq!(self.library.b_vertex_loop_blinn_data.len(),
self.library.b_vertex_positions.len());
let end_lengths = self.library.snapshot_lengths();
let path_ranges = self.library.ensure_path_ranges(path_id);
path_ranges.set_partitioning_lengths(&start_lengths, &end_lengths);
debug_assert_eq!(self.mesh.b_vertex_loop_blinn_data.len(),
self.mesh.b_vertex_positions.len());
}
fn process_next_point(&mut self) -> bool {
@ -193,15 +182,15 @@ impl Partitioner {
// If we already made a B-vertex point for this endpoint, reuse it instead of making a
// new one.
let old_left_position =
self.library.b_vertex_positions[active_edge.left_vertex_index as usize];
self.mesh.b_vertex_positions[active_edge.left_vertex_index as usize];
let should_update = (endpoint_position - old_left_position).square_length() >
f32::approx_epsilon();
if should_update {
active_edge.left_vertex_index = self.library.b_vertex_loop_blinn_data.len() as u32;
active_edge.left_vertex_index = self.mesh.b_vertex_loop_blinn_data.len() as u32;
active_edge.control_point_vertex_index = active_edge.left_vertex_index + 1;
// FIXME(pcwalton): Normal
self.library.add_b_vertex(&endpoint_position,
self.mesh.add_b_vertex(&endpoint_position,
&BVertexLoopBlinnData::new(active_edge.endpoint_kind()));
active_edge.toggle_parity();
@ -231,18 +220,18 @@ impl Partitioner {
}
Some(ref control_point_position) => {
self.active_edges[active_edge_index as usize].control_point_vertex_index =
self.library.b_vertex_loop_blinn_data.len() as u32;
self.mesh.b_vertex_loop_blinn_data.len() as u32;
let left_vertex_index = self.active_edges[active_edge_index as usize]
.left_vertex_index;
let control_point_b_vertex_loop_blinn_data = BVertexLoopBlinnData::control_point(
&self.library.b_vertex_positions[left_vertex_index as usize],
&self.mesh.b_vertex_positions[left_vertex_index as usize],
&control_point_position,
&new_point.position,
bottom);
// FIXME(pcwalton): Normal
self.library.add_b_vertex(control_point_position,
self.mesh.add_b_vertex(control_point_position,
&control_point_b_vertex_loop_blinn_data);
}
}
@ -318,13 +307,13 @@ impl Partitioner {
let new_active_edges = &mut self.active_edges[next_active_edge_index as usize..
next_active_edge_index as usize + 2];
let left_vertex_index = self.library.b_vertex_loop_blinn_data.len() as u32;
let left_vertex_index = self.mesh.b_vertex_loop_blinn_data.len() as u32;
new_active_edges[0].left_vertex_index = left_vertex_index;
new_active_edges[1].left_vertex_index = left_vertex_index;
// FIXME(pcwalton): Normal
let position = self.path.endpoints[endpoint_index as usize].to;
self.library.add_b_vertex(&position,
self.mesh.add_b_vertex(&position,
&BVertexLoopBlinnData::new(BVertexKind::Endpoint0));
new_active_edges[0].toggle_parity();
@ -360,7 +349,7 @@ impl Partitioner {
None => new_active_edges[0].control_point_vertex_index = u32::MAX,
Some(control_point_position) => {
new_active_edges[0].control_point_vertex_index =
self.library.b_vertex_loop_blinn_data.len() as u32;
self.mesh.b_vertex_loop_blinn_data.len() as u32;
let right_vertex_position =
self.path.endpoints[new_active_edges[0].right_endpoint_index as usize].to;
@ -371,7 +360,7 @@ impl Partitioner {
false);
// FIXME(pcwalton): Normal
self.library.add_b_vertex(&control_point_position,
self.mesh.add_b_vertex(&control_point_position,
&control_point_b_vertex_loop_blinn_data)
}
}
@ -380,7 +369,7 @@ impl Partitioner {
None => new_active_edges[1].control_point_vertex_index = u32::MAX,
Some(control_point_position) => {
new_active_edges[1].control_point_vertex_index =
self.library.b_vertex_loop_blinn_data.len() as u32;
self.mesh.b_vertex_loop_blinn_data.len() as u32;
let right_vertex_position =
self.path.endpoints[new_active_edges[1].right_endpoint_index as usize].to;
@ -391,7 +380,7 @@ impl Partitioner {
true);
// FIXME(pcwalton): Normal
self.library.add_b_vertex(&control_point_position,
self.mesh.add_b_vertex(&control_point_position,
&control_point_b_vertex_loop_blinn_data)
}
}
@ -521,16 +510,16 @@ impl Partitioner {
upper_subdivision: &SubdividedActiveEdge,
lower_subdivision: &SubdividedActiveEdge,
iteration: u8) {
let upper_shape = upper_subdivision.shape(&self.library.b_vertex_loop_blinn_data);
let lower_shape = lower_subdivision.shape(&self.library.b_vertex_loop_blinn_data);
let upper_shape = upper_subdivision.shape(&self.mesh.b_vertex_loop_blinn_data);
let lower_shape = lower_subdivision.shape(&self.mesh.b_vertex_loop_blinn_data);
// Make sure the convex hulls of the two curves do not intersect. If they do, subdivide and
// recurse.
if iteration < MAX_B_QUAD_SUBDIVISIONS {
// TODO(pcwalton): Handle concave-line convex hull intersections.
if let (Some(upper_curve), Some(lower_curve)) =
(upper_subdivision.to_curve(&self.library.b_vertex_positions),
lower_subdivision.to_curve(&self.library.b_vertex_positions)) {
(upper_subdivision.to_curve(&self.mesh.b_vertex_positions),
lower_subdivision.to_curve(&self.mesh.b_vertex_positions)) {
// TODO(pcwalton): Handle concave-concave convex hull intersections.
if upper_shape == Shape::Concave &&
lower_curve.baseline()
@ -542,7 +531,7 @@ impl Partitioner {
0.5,
false);
let midpoint_x =
self.library
self.mesh
.b_vertex_positions[upper_left_subsubdivision.middle_point as usize].x;
let (lower_left_subsubdivision, lower_right_subsubdivision) =
self.subdivide_active_edge_again_at_x(&lower_subdivision,
@ -572,7 +561,7 @@ impl Partitioner {
0.5,
true);
let midpoint_x =
self.library
self.mesh
.b_vertex_positions[lower_left_subsubdivision.middle_point as usize].x;
let (upper_left_subsubdivision, upper_right_subsubdivision) =
self.subdivide_active_edge_again_at_x(&upper_subdivision,
@ -602,13 +591,13 @@ impl Partitioner {
{
let upper_active_edge = &mut self.active_edges[upper_active_edge_index as usize];
self.library.b_vertex_loop_blinn_data[upper_subdivision.middle_point as usize] =
self.mesh.b_vertex_loop_blinn_data[upper_subdivision.middle_point as usize] =
BVertexLoopBlinnData::new(upper_active_edge.endpoint_kind());
upper_active_edge.toggle_parity();
}
{
let lower_active_edge = &mut self.active_edges[lower_active_edge_index as usize];
self.library.b_vertex_loop_blinn_data[lower_subdivision.middle_point as usize] =
self.mesh.b_vertex_loop_blinn_data[lower_subdivision.middle_point as usize] =
BVertexLoopBlinnData::new(lower_active_edge.endpoint_kind());
lower_active_edge.toggle_parity();
}
@ -622,7 +611,7 @@ impl Partitioner {
self.update_vertex_normals_for_new_b_quad(&b_quad);
self.library.add_b_quad(&b_quad);
self.mesh.add_b_quad(&b_quad);
}
fn subdivide_active_edge_again_at_t(&mut self,
@ -630,14 +619,14 @@ impl Partitioner {
t: f32,
bottom: bool)
-> (SubdividedActiveEdge, SubdividedActiveEdge) {
let curve = subdivision.to_curve(&self.library.b_vertex_positions)
let curve = subdivision.to_curve(&self.mesh.b_vertex_positions)
.expect("subdivide_active_edge_again_at_t(): not a curve!");
let (left_curve, right_curve) = curve.assume_monotonic().split(t);
let left_control_point_index = self.library.b_vertex_positions.len() as u32;
let left_control_point_index = self.mesh.b_vertex_positions.len() as u32;
let midpoint_index = left_control_point_index + 1;
let right_control_point_index = midpoint_index + 1;
self.library.b_vertex_positions.extend([
self.mesh.b_vertex_positions.extend([
left_curve.segment().ctrl,
left_curve.segment().to,
right_curve.segment().ctrl,
@ -645,7 +634,7 @@ impl Partitioner {
// Initially, assume that the parity is false. We will modify the Loop-Blinn data later if
// that is incorrect.
self.library.b_vertex_loop_blinn_data.extend([
self.mesh.b_vertex_loop_blinn_data.extend([
BVertexLoopBlinnData::control_point(&left_curve.segment().from,
&left_curve.segment().ctrl,
&left_curve.segment().to,
@ -675,7 +664,7 @@ impl Partitioner {
x: f32,
bottom: bool)
-> (SubdividedActiveEdge, SubdividedActiveEdge) {
let curve = subdivision.to_curve(&self.library.b_vertex_positions)
let curve = subdivision.to_curve(&self.mesh.b_vertex_positions)
.expect("subdivide_active_edge_again_at_x(): not a curve!");
let t = curve.assume_monotonic().solve_t_for_x(x);
self.subdivide_active_edge_again_at_t(subdivision, t, bottom)
@ -741,7 +730,7 @@ impl Partitioner {
fn solve_active_edge_t_for_x(&self, x: f32, active_edge: &ActiveEdge) -> f32 {
let left_vertex_position =
&self.library.b_vertex_positions[active_edge.left_vertex_index as usize];
&self.mesh.b_vertex_positions[active_edge.left_vertex_index as usize];
let right_endpoint_position =
&self.path.endpoints[active_edge.right_endpoint_index as usize].to;
match active_edge.control_point_vertex_index {
@ -752,7 +741,7 @@ impl Partitioner {
}.solve_t_for_x(x)
}
control_point_vertex_index => {
let control_point = &self.library
let control_point = &self.mesh
.b_vertex_positions[control_point_vertex_index as usize];
let segment = QuadraticBezierSegment {
from: *left_vertex_position,
@ -770,7 +759,7 @@ impl Partitioner {
fn sample_active_edge(&self, t: f32, active_edge: &ActiveEdge) -> Point2D<f32> {
let left_vertex_position =
&self.library.b_vertex_positions[active_edge.left_vertex_index as usize];
&self.mesh.b_vertex_positions[active_edge.left_vertex_index as usize];
let right_endpoint_position =
&self.path.endpoints[active_edge.right_endpoint_index as usize].to;
match active_edge.control_point_vertex_index {
@ -780,7 +769,7 @@ impl Partitioner {
.to_point()
}
control_point_vertex_index => {
let control_point = &self.library
let control_point = &self.mesh
.b_vertex_positions[control_point_vertex_index as usize];
QuadraticBezierSegment {
from: *left_vertex_position,
@ -803,11 +792,11 @@ impl Partitioner {
}
let upper_left_vertex_position =
&self.library.b_vertex_positions[upper_active_edge.left_vertex_index as usize];
&self.mesh.b_vertex_positions[upper_active_edge.left_vertex_index as usize];
let upper_right_endpoint_position =
&self.path.endpoints[upper_active_edge.right_endpoint_index as usize].to;
let lower_left_vertex_position =
&self.library.b_vertex_positions[lower_active_edge.left_vertex_index as usize];
&self.mesh.b_vertex_positions[lower_active_edge.left_vertex_index as usize];
let lower_right_endpoint_position =
&self.path.endpoints[lower_active_edge.right_endpoint_index as usize].to;
@ -827,7 +816,7 @@ impl Partitioner {
(upper_control_point_vertex_index, u32::MAX) => {
let upper_control_point =
&self.library.b_vertex_positions[upper_control_point_vertex_index as usize];
&self.mesh.b_vertex_positions[upper_control_point_vertex_index as usize];
let (upper_curve, _) = QuadraticBezierSegment {
from: *upper_left_vertex_position,
ctrl: *upper_control_point,
@ -842,7 +831,7 @@ impl Partitioner {
(u32::MAX, lower_control_point_vertex_index) => {
let lower_control_point =
&self.library.b_vertex_positions[lower_control_point_vertex_index as usize];
&self.mesh.b_vertex_positions[lower_control_point_vertex_index as usize];
let (lower_curve, _) = QuadraticBezierSegment {
from: *lower_left_vertex_position,
ctrl: *lower_control_point,
@ -857,9 +846,9 @@ impl Partitioner {
(upper_control_point_vertex_index, lower_control_point_vertex_index) => {
let upper_control_point =
&self.library.b_vertex_positions[upper_control_point_vertex_index as usize];
&self.mesh.b_vertex_positions[upper_control_point_vertex_index as usize];
let lower_control_point =
&self.library.b_vertex_positions[lower_control_point_vertex_index as usize];
&self.mesh.b_vertex_positions[lower_control_point_vertex_index as usize];
let (upper_curve, _) = QuadraticBezierSegment {
from: *upper_left_vertex_position,
ctrl: *upper_control_point,
@ -880,7 +869,7 @@ impl Partitioner {
fn should_subdivide_active_edge_at(&self, active_edge_index: u32, x: f32) -> bool {
let left_curve_left = self.active_edges[active_edge_index as usize].left_vertex_index;
let left_point_position = self.library.b_vertex_positions[left_curve_left as usize];
let left_point_position = self.mesh.b_vertex_positions[left_curve_left as usize];
x - left_point_position.x >= f32::approx_epsilon()
}
@ -891,7 +880,7 @@ impl Partitioner {
subdivision_type: SubdivisionType)
-> SubdividedActiveEdge {
let left_curve_left = self.active_edges[active_edge_index as usize].left_vertex_index;
let left_point_position = self.library.b_vertex_positions[left_curve_left as usize];
let left_point_position = self.mesh.b_vertex_positions[left_curve_left as usize];
let t = self.solve_active_edge_t_for_x(x, &self.active_edges[active_edge_index as usize]);
@ -907,17 +896,17 @@ impl Partitioner {
.lerp(right_point.to_vector(), t);
// FIXME(pcwalton): Normal
active_edge.left_vertex_index = self.library.b_vertex_loop_blinn_data.len() as u32;
self.library.add_b_vertex(&middle_point.to_point(),
active_edge.left_vertex_index = self.mesh.b_vertex_loop_blinn_data.len() as u32;
self.mesh.add_b_vertex(&middle_point.to_point(),
&BVertexLoopBlinnData::new(active_edge.endpoint_kind()));
left_curve_control_point_vertex_index = u32::MAX;
}
_ => {
let left_endpoint_position =
self.library.b_vertex_positions[active_edge.left_vertex_index as usize];
self.mesh.b_vertex_positions[active_edge.left_vertex_index as usize];
let control_point_position =
self.library
self.mesh
.b_vertex_positions[active_edge.control_point_vertex_index as usize];
let right_endpoint_position =
self.path.endpoints[active_edge.right_endpoint_index as usize].to;
@ -928,20 +917,20 @@ impl Partitioner {
}.split(t);
left_curve_control_point_vertex_index =
self.library.b_vertex_loop_blinn_data.len() as u32;
self.mesh.b_vertex_loop_blinn_data.len() as u32;
active_edge.left_vertex_index = left_curve_control_point_vertex_index + 1;
active_edge.control_point_vertex_index = left_curve_control_point_vertex_index + 2;
// FIXME(pcwalton): Normals
self.library
self.mesh
.add_b_vertex(&left_curve.ctrl,
&BVertexLoopBlinnData::control_point(&left_curve.from,
&left_curve.ctrl,
&left_curve.to,
bottom));
self.library.add_b_vertex(&left_curve.to,
self.mesh.add_b_vertex(&left_curve.to,
&BVertexLoopBlinnData::new(active_edge.endpoint_kind()));
self.library
self.mesh
.add_b_vertex(&right_curve.ctrl,
&BVertexLoopBlinnData::control_point(&right_curve.from,
&right_curve.ctrl,
@ -1004,8 +993,8 @@ impl Partitioner {
fn calculate_normal_for_edge(&self, left_vertex_index: u32, right_vertex_index: u32)
-> VertexNormal {
let left_vertex_position = &self.library.b_vertex_positions[left_vertex_index as usize];
let right_vertex_position = &self.library.b_vertex_positions[right_vertex_index as usize];
let left_vertex_position = &self.mesh.b_vertex_positions[left_vertex_index as usize];
let right_vertex_position = &self.mesh.b_vertex_positions[right_vertex_index as usize];
VertexNormal::new(left_vertex_position, right_vertex_position)
}

View File

@ -39,7 +39,7 @@ use lyon_path::PathEvent;
use lyon_path::builder::{FlatPathBuilder, PathBuilder};
use pathfinder_font_renderer::{FontContext, FontInstance, GlyphKey, SubpixelOffset};
use pathfinder_partitioner::FillRule;
use pathfinder_partitioner::mesh_library::MeshLibrary;
use pathfinder_partitioner::mesh_pack::MeshPack;
use pathfinder_partitioner::partitioner::Partitioner;
use std::ffi::CString;
use std::fs::File;
@ -81,12 +81,12 @@ fn convert_font(font_path: &Path, output_path: &Path) -> Result<(), ()> {
let mut font_context = try!(FontContext::new());
try!(font_context.add_font_from_memory(&(), Arc::new(font_data), 0));
let font_instance = FontInstance {
font_key: font_key,
font_key: (),
size: Au::from_f64_px(FONT_SIZE),
};
let mut paths: Vec<(u16, Vec<PathEvent>)> = vec![];
let mut partitioner = Partitioner::new(MeshLibrary::new());
let mut mesh_pack = MeshPack::new();
for glyph_index in 0..glyph_count {
let glyph_key = GlyphKey::new(glyph_index, SubpixelOffset(0));
@ -96,24 +96,20 @@ fn convert_font(font_path: &Path, output_path: &Path) -> Result<(), ()> {
Err(_) => continue,
};
let mut partitioner = Partitioner::new();
let path_index = (glyph_index + 1) as u16;
partitioner.library_mut().push_segments(path_index, path.iter());
partitioner.library_mut().push_normals(path_index, path.iter());
partitioner.library_mut().push_stencil_segments(path_index, path.iter());
partitioner.mesh_mut().push_stencil_segments(path.iter());
path.iter().for_each(|event| partitioner.builder_mut().path_event(event));
partitioner.partition(FillRule::Winding);
partitioner.builder_mut().build_and_reset();
paths.push((path_index, path.iter().collect()));
mesh_pack.push(partitioner.into_mesh());
}
for (glyph_index, path) in paths {
path.into_iter().for_each(|event| partitioner.builder_mut().path_event(event));
partitioner.partition(glyph_index, FillRule::Winding);
partitioner.builder_mut().build_and_reset();
}
partitioner.library_mut().optimize();
let mut output_file = try!(File::create(output_path).map_err(drop));
partitioner.library().serialize_into(&mut output_file).map_err(drop)
mesh_pack.serialize_into(&mut output_file).map_err(drop)
}
pub fn main() {