diff --git a/src/config/WorldEnv.ts b/src/config/WorldEnv.ts index d953bcc..b3d32dd 100644 --- a/src/config/WorldEnv.ts +++ b/src/config/WorldEnv.ts @@ -81,6 +81,7 @@ export class WorldEnv { // eslint-disable-next-line @typescript-eslint/no-unused-vars dataEncoder: (blockType: BlockType, _blockMode?: BlockMode) => blockType || BlockType.NONE, + dataDecoder: (rawData: number) => rawData || BlockType.NONE, } schematics: { diff --git a/src/datacontainers/ChunkContainer.ts b/src/datacontainers/ChunkContainer.ts index 50c2e43..e66ce11 100644 --- a/src/datacontainers/ChunkContainer.ts +++ b/src/datacontainers/ChunkContainer.ts @@ -49,15 +49,22 @@ export class ChunkContainer { axisOrder: ChunkAxisOrder // local data encoder (defaulting to global) dataEncoder: (blockType: BlockType, _blockMode?: BlockMode) => number + dataDecoder: (rawVal: number) => number + // global version static get dataEncoder() { return WorldEnv.current.chunks.dataEncoder } + static get dataDecoder() { + return WorldEnv.current.chunks.dataDecoder + } + constructor( boundsOrChunkKey: Box3 | ChunkKey = new Box3(), margin = 0, customDataEncoder = ChunkContainer.dataEncoder, + customDataDecoder = ChunkContainer.dataDecoder, axisOrder = ChunkAxisOrder.ZXY, ) { //, bitLength = BitLength.Uint16) { @@ -76,6 +83,7 @@ export class ChunkContainer { this.id = chunkId } this.dataEncoder = customDataEncoder + this.dataDecoder = customDataDecoder this.adjustChunkBounds(bounds) // this.rawData = getArrayConstructor(bitLength) } diff --git a/src/datacontainers/ChunksIndexer.ts b/src/datacontainers/ChunksIndexer.ts deleted file mode 100644 index d80647d..0000000 --- a/src/datacontainers/ChunksIndexer.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { Box2, Vector2 } from 'three' - -import { WorldUtils } from '../index' -import { WorldEnv } from '../config/WorldEnv' -import { asPatchBounds } from '../utils/convert' -import { PatchKey } from '../utils/types' -// import { GroundSurfaceChunkset, UndegroundChunkset } from '../processing/ChunksProcessing' - -export class PatchIndexer { - patchLookup: Record = {} - - get indexedKeys() { - return Object.keys(this.patchLookup) - } - - get indexedElements() { - return Object.values(this.patchLookup) - } - - // sortKeysAroundPos(patchKeys: PatchKey[], pos: Vector2) { - // const sortedKeys = patchKeys.map(patchKey => new PatchBase(patchKey)) - // .sort((p1, p2) => { - // // const b1 = asPatchBounds(k1, patchDims) - // // const b2 = asPatchBounds(k2, patchDims) - // const c1 = p1.bounds.getCenter(new Vector2()); - // const c2 = p2.bounds.getCenter(new Vector2()) - // const d1 = c1.distanceTo(pos) - // const d2 = c2.distanceTo(pos) - // return d1 - d2 - // }) - // .map(p => p.key) - // return sortedKeys - // } - - genPatchKeysAroundPos(pos: Vector2, rad: number) { - const center = pos.clone().floor() - const dims = new Vector2(rad, rad).multiplyScalar(2) - // const sphere = new Sphere(center, rad) - const bounds = new Box2().setFromCenterAndSize(center, dims) - const patchKeys = WorldUtils.convert - .getPatchIds(bounds, WorldEnv.current.patchDimensions) - .sort((v1, v2) => v1.distanceTo(pos) - v2.distanceTo(pos)) - .map(patchId => WorldUtils.convert.serializePatchId(patchId)) - return patchKeys - } - - // index patch & chunk keys found within radius around pos - getIndexingChanges(pos: Vector2, rad: number) { - const patchKeys = this.genPatchKeysAroundPos(pos, rad) - const newKeys = patchKeys.filter(patchKey => !this.patchLookup[patchKey]) - newKeys.sort((k1, k2) => { - const b1 = asPatchBounds(k1, WorldEnv.current.patchDimensions) - const b2 = asPatchBounds(k2, WorldEnv.current.patchDimensions) - const c1 = b1.getCenter(new Vector2()) - const c2 = b2.getCenter(new Vector2()) - return c1.distanceTo(pos) - c2.distanceTo(pos) - }) - // clear previous index and override with new patch/chunk keys - const patchLookup: Record = {} - for (const patchKey of patchKeys) { - const existing = this.patchLookup[patchKey] - if (existing) patchLookup[patchKey] = existing - } - this.patchLookup = patchLookup - return newKeys - } -} diff --git a/src/datacontainers/PatchesIndexer.ts b/src/datacontainers/PatchesIndexer.ts deleted file mode 100644 index 516b99f..0000000 --- a/src/datacontainers/PatchesIndexer.ts +++ /dev/null @@ -1,361 +0,0 @@ -/** - * Allows precaching patches around position, with new patches automatically computed - * when position is updated - * Previous patches can be simply removed, or kept until condition is met: - * cache size exceeds, LRU, .. - */ -import { Box2, Vector2 } from 'three' - -import { PatchBlock, PatchKey } from '../utils/types' -import { getPatchId, getPatchIds, serializePatchId } from '../utils/convert' -import { WorldEnv } from '../index' -import { GroundPatch } from '../processing/GroundPatch' - -import { PatchBase } from './PatchBase' - -/** - * Structure for storing either contiguous (map) or sparse (cache) generic patches - * Provides utility to rebuild patch index around position and radius - */ -export abstract class PatchesIndexer> { - patchLookup: Record = {} - patchDimensions - - constructor() { - this.patchDimensions = WorldEnv.current.patchDimensions - } - - abstract patchConstructor: (key: PatchKey) => T - - get keys() { - return Object.keys(this.patchLookup) - } - - get patches() { - return Object.values(this.patchLookup) - } - - getOverlappingPatches(inputBounds: Box2) { - return this.patches.filter(patch => patch.isOverlapping(inputBounds)) - } - - findPatch(blockPos: Vector2) { - // const res = this.patches.find(patch => patch.containsPoint(blockPos)) - // return res - blockPos.floor() - // compute patch key from which blocks belongs to - const patchId = getPatchId(blockPos, this.patchDimensions) - const patchKey = serializePatchId(patchId) - // look for patch in cache - const patch = this.patchLookup[patchKey] - return patch - } - - /** - * Will output new patch index (without changing current) containing both unchanged and - * created patches. Deprecated items can be found by comparing with previous index. - * Callee decide which action to take depending on patch category: - * - fill new instances - * - clean up deprecated patches - * @param cacheBounds - * @returns - */ - rebuildIndexAroundPosAndRad(center: Vector2, radius: number) { - center = center.clone().floor() - const dims = new Vector2(radius, radius).multiplyScalar(2) - const bounds = new Box2().setFromCenterAndSize(center, dims) - const patchLookup: Record = {} - const patchKeys = getPatchIds(bounds, this.patchDimensions).map(patchId => - serializePatchId(patchId), - ) - let changeDetected = false - patchKeys.forEach(key => { - changeDetected = changeDetected || !this.patchLookup[key] - patchLookup[key] = this.patchLookup[key] || this.patchConstructor(key) - }) - return changeDetected ? patchLookup : null - } -} - -/** - * Returns block from cache if found, and precache near blocks if needed - * If not found will compute patch containing block first, - * and return a promise that will resolve once patch is available in cache - * @param blockPos - * @param params - */ -export class GroundContainer extends PatchesIndexer { - patchConstructor = (key: string) => new GroundPatch(key) - - get emptyPatches() { - const emptyPatches = this.patches.filter(patch => patch.isEmpty) - return emptyPatches - } - - patchGen() { - const pendingRequests = this.emptyPatches.map(async patch => { - await patch.bake() - // await patch.retrieveOvergroundItems() - return patch - }) - return pendingRequests // await Promise.all(pendingRequests) - } - - async *patchOtfGen() { - for await (const patch of this.emptyPatches) { - await patch.bake() - // await patch.retrieveOvergroundItems() - yield patch - } - } -} - -export class GroundCache extends GroundContainer { - // eslint-disable-next-line no-use-before-define - static singleton: GroundCache - - static get instance() { - this.singleton = this.singleton || new GroundCache() - return this.singleton - } - - /** - * Query block from cache and/or trigger refill request if required - * @param blockPos - * @returns - */ - queryPrecachedBlock( - pos: Vector2, - params = { - precacheRadius: 0, - cacheMissing: false, - }, - ) { - const block = this.findPatch(pos)?.getBlock(pos) - - const precacheBlocks: () => Promise = async () => { - this.patchLookup = - this.rebuildIndexAroundPosAndRad(pos, params.precacheRadius) || - this.patchLookup - await this.patchGen() - return GroundCache.instance.queryPrecachedBlock(pos) as PatchBlock - } - // conditions to trigger prechache request are block is missing or radius provided - const pendingReq = - ((!block && params.cacheMissing) || params.precacheRadius > 0) && - precacheBlocks() - return block || (pendingReq as Promise) - } - - /** - * Override default behavior to handle deprecated patches cleanup, - */ - // override rebuildIndexAroundPosAndRad(center: Vector2, radius: number) { - - // } -} - -// export class GroundMap extends GroundPatchesContainer { -// mapBounds: Box2 = new Box2() - -/** - * Override default behavior to reset patch index, - */ -// override rebuildIndexAroundPosAndRad(center: Vector2, radius: number) { - -// } - -// adjustBounds(bounds: Box2) { -// this.bounds = bounds -// // rebuild patch index -// this.rebuildPatchIndex(bounds) -// this.loadEmpty() -// } - -// getBlock(blockPos: Vector3) { -// return this.findPatch(blockPos)?.getBlock(blockPos, false) -// } - -// getGroundBlock(globalPos: Vector3) { -// const { bbox } = this -// let blockRes -// globalPos.y = bbox.getCenter(new Vector3()).y -// if (bbox.containsPoint(globalPos)) { -// const patch = this.findPatch(globalPos) -// if (patch) { -// const localPos = globalPos.clone().sub(patch.bbox.min) -// blockRes = patch.getBlock(localPos) as BlockData -// } -// } else { -// const batchRes = WorldComputeApi.instance.computeBlocksBatch([globalPos]) -// const blockRes = batchRes instanceof Promise ? batchRes.then(batchRes => batchRes[0]) : batchRes[0] -// if (!blockRes) { -// console.log(blockRes) -// } -// } -// return blockRes -// } - -// async getUpperBlock(globalPos: Vector3) { -// const block = await this.getGroundBlock(globalPos) -// if (block) { -// const blocksBuffer = (await WorldApi.instance.call( -// WorldApiName.OvergroundBufferCompute, -// [block.pos], -// )) as BlockType[] -// const lastBlockIndex = blocksBuffer.findLastIndex(elt => elt) -// if (lastBlockIndex >= 0) { -// block.pos.y += lastBlockIndex -// block.type = blocksBuffer[lastBlockIndex] as BlockType -// } -// } -// return block -// } - -// setBlock(globalPos: Vector3, block: BlockData) { -// // find patch containing point in cache -// const patch = this.findPatch(globalPos) -// if (patch) { -// const localPos = globalPos.clone().sub(patch.bbox.min) -// patch.setBlock(localPos, block.type) -// } else { -// console.log(globalPos) -// } -// return block -// } - -// *iterAllPatchesBlocks() { -// for (const patch of this.availablePatches) { -// const blocks = patch.iterOverBlocks(undefined, false, false) -// for (const block of blocks) { -// yield block -// } -// } -// } -// } - -// get count() { -// return Object.keys(this.patchLookup).length -// } - -// get patchKeys() { -// return Object.keys(this.patchLookup) -// } - -// get availablePatches() { -// return Object.values(this.patchLookup).filter(val => val) as T[] -// } - -// get missingPatchKeys() { -// return Object.keys(this.patchLookup).filter( -// key => !this.patchLookup[key], -// ) as PatchKey[] -// } - -// autoFill(fillingVal=0){ -// this.patchKeys.forEach(key=>this.patchLookup[key] = new GroundPatch(key)) -// this.availablePatches.forEach(patch=>patch.iterOverBlocks) -// } - -// populateFromExisting(patches: T[], cloneObjects = false) { -// // const { min, max } = this.bbox -// patches -// .filter(patch => this.patchLookup[patch.key] !== undefined) -// .forEach(patch => { -// this.patchLookup[patch.key] = cloneObjects -// ? patch // (patch.duplicate() as T) -// : patch -// // min.y = Math.min(patch.bbox.min.y, min.y) -// // max.y = Math.max(patch.bbox.max.y, max.y) -// }) -// } - -// compareWith(otherContainer: PatchesMap) { -// const patchKeysDiff: Record = {} -// // added keys e.g. keys in current container but not found in other -// Object.keys(this.patchLookup) -// .filter(patchKey => otherContainer.patchLookup[patchKey] === undefined) -// .forEach(patchKey => (patchKeysDiff[patchKey] = true)) -// // missing keys e.g. found in other container but not in current -// Object.keys(otherContainer.patchLookup) -// .filter(patchKey => this.patchLookup[patchKey] === undefined) -// .forEach(patchKey => (patchKeysDiff[patchKey] = false)) -// return patchKeysDiff -// } - -// getAllPatchesEntities(skipDuplicate = true) { -// const entities: EntityData[] = [] -// for (const patch of this.availablePatches) { -// patch.entities.forEach(entity => { -// if (!skipDuplicate || !entities.find(ent => ent.bbox.equals(entity.bbox))) { -// entities.push(entity) -// } -// }) -// } -// return entities -// } - -// getMergedRows(zRowIndex: number) { -// const sortedPatchesRows = this.availablePatches -// .filter( -// patch => zRowIndex >= patch.bbox.min.z && zRowIndex <= patch.bbox.min.z, -// ) -// .sort((p1, p2) => p1.bbox.min.x - p2.bbox.min.x) -// .map(patch => patch.getBlocksRow(zRowIndex)) -// const mergedRows = sortedPatchesRows.reduce((arr1, arr2) => { -// const mergedArray = new Uint32Array(arr1.length + arr2.length) -// mergedArray.set(arr1) -// mergedArray.set(arr2, arr1.length) -// return mergedArray -// }) -// return mergedRows -// } - -// iterMergedRows() { -// const { min, max } = this.patchRange -// for (let zPatchIndex = min.z; zPatchIndex <= max.z; zPatchIndex++) { -// for (let zRowIndex = min.z; zRowIndex < max.z; zRowIndex++) {} -// } -// } - -// getMergedCols(xColIndex: number) { - -// } - -// mergedLinesIteration() { -// const { min, max } = this.bbox -// for (let x = min.x; x < max.x; x++) { -// for (let z = min.z; z < max.z; z++) { - -// } -// } -// } - -// toMergedContainer() { -// const mergedBox = this.availablePatches.map(patch => patch.bbox) -// .reduce((merge, bbox) => merge.union(bbox), new Box3()) -// // const mergedContainer = -// } - -// static fromMergedContainer() { - -// } -// mergeBlocks(blocksContainer: BlocksContainer) { -// // // for each patch override with blocks from blocks container -// this.availablePatches.forEach(patch => { -// const blocksIter = patch.iterOverBlocks(blocksContainer.bbox) -// for (const target_block of blocksIter) { -// const source_block = blocksContainer.getBlock(target_block.pos, false) -// if (source_block && source_block.pos.y > 0 && target_block.index) { -// let block_type = source_block.type ? BlockType.SAND : BlockType.NONE -// block_type = -// source_block.type === BlockType.TREE_TRUNK -// ? BlockType.TREE_TRUNK -// : block_type -// const block_level = blocksContainer.bbox.min.y // source_block?.pos.y -// patch.writeBlock(target_block.index, block_level, block_type) -// // console.log(source_block?.pos.y) -// } -// } -// }) -// } diff --git a/src/factory/ChunksFactory.ts b/src/factory/ChunksFactory.ts index b2ff3cb..b85bf2e 100644 --- a/src/factory/ChunksFactory.ts +++ b/src/factory/ChunksFactory.ts @@ -32,19 +32,19 @@ export class GroundChunk extends ChunkContainer { const undegroundDepth = 4 const bedrock = this.dataEncoder(BlockType.BEDROCK) const bedrockIce = this.dataEncoder(BlockType.ICE) - const { biome, landscapeIndex, flags } = block.data + const { biome, landIndex, flags } = block.data const blockLocalPos = block.localPos as Vector3 - const landscapeConf = Biome.instance.mappings[biome].nth(landscapeIndex) - const groundConf = landscapeConf.data + const biomeLand = Biome.instance.mappings[biome].nth(landIndex) + const landConf = biomeLand.data const groundFlags = parseGroundFlags(flags) const blockType = - highlightPatchBorders(blockLocalPos, groundConf.type) || groundConf.type + highlightPatchBorders(blockLocalPos, landConf.type) || landConf.type const blockMode = groundFlags.boardMode ? BlockMode.CHECKERBOARD : BlockMode.REGULAR const groundSurface = this.dataEncoder(blockType, blockMode) const undergroundLayer = this.dataEncoder( - groundConf.subtype || BlockType.BEDROCK, + landConf.subtype || BlockType.BEDROCK, ) // generate ground buffer const buffSize = MathUtils.clamp(block.data.level - ymin, 0, ymax - ymin) diff --git a/src/index.ts b/src/index.ts index da41bf8..a511232 100644 --- a/src/index.ts +++ b/src/index.ts @@ -4,10 +4,14 @@ export { BlockMode } from './utils/types' // Processing export { ProcessingTask } from './processing/TaskProcessing' export { BatchProcess } from './processing/BatchProcessing' -export { BlocksBatch } from './processing/BlocksBatch' -export { BoardContainer, BlockCategory } from './processing/BoardProcessing' export { - ChunksBatch, + BlocksProcessing, + BlocksProcessingRecipe, + BlockProcessor, +} from './processing/BlocksProcessing' +export { BoardProcessor, BlockCategory } from './processing/BoardProcessing' +export { + ViewChunksBatch, LowerChunksBatch, UpperChunksBatch, } from './processing/ChunksBatch' @@ -15,13 +19,13 @@ export { PseudoDistributionMap, DistributionProfile, } from './processing/RandomDistributionMap' -export { ChunkSet } from './processing/ChunksProcessing' +export { ChunksProcessor } from './processing/ChunksProcessing' // Procgen export { Biome, BiomeType, BlockType } from './procgen/Biome' export { Heightmap } from './procgen/Heightmap' export { DensityVolume } from './procgen/DensityVolume' // Data structures -export { GroundContainer, GroundCache } from './datacontainers/PatchesIndexer' +// export { GroundContainer, GroundCache } from './datacontainers/PatchesIndexer' export { ChunkContainer } from './datacontainers/ChunkContainer' // Factory export { ItemsInventory } from './factory/ItemsFactory' diff --git a/src/processing/BatchProcessing.ts b/src/processing/BatchProcessing.ts index db429e3..4a98f0a 100644 --- a/src/processing/BatchProcessing.ts +++ b/src/processing/BatchProcessing.ts @@ -10,22 +10,25 @@ export class BatchProcess { processed = 0 status = ProcessingState.Waiting batchId - onTaskCompleted = (taskRes: any) => taskRes - onBatchSuspended: any + elapsedTime = 0 + totalTime = 0 - constructor(batch: T[], onTaskCompleted?: any) { + constructor(batch: T[]) { this.processingQueue = batch this.count = batch.length this.batchId = BatchProcess.batchCount++ - this.onTaskCompleted = onTaskCompleted || this.onTaskCompleted - BatchProcess.batchQueue.push(this) } - static async processNextBatch() { - const { nextBatch } = this - if (nextBatch && !this.isBusy) { - nextBatch.status = ProcessingState.Pending - await nextBatch.run() + static async enqueueBatch(batch: BatchProcess) { + this.batchQueue.push(batch) + this.processNextBatch(batch) + } + + static async processNextBatch(batch?: BatchProcess) { + const nextBatch = batch || this.nextBatch + if (nextBatch && nextBatch.processingQueue.length > 0 && !this.isBusy) { + // nextBatch.status = ProcessingState.Pending + await nextBatch.start() nextBatch.status = nextBatch.isDone ? ProcessingState.Done : nextBatch.status @@ -47,6 +50,22 @@ export class BatchProcess { ) } + static cleanTerminated() { + this.batchQueue = this.batchQueue.filter(item => !item.isTerminated) + } + + // once batch in the queue will be automatically processed by workers + enqueue( + onTaskCompleted?: any, + onBatchCompleted?: any, + onBatchSuspended?: any, + ) { + this.onTaskCompleted = onTaskCompleted || this.onTaskCompleted + this.onBatchCompleted = onBatchCompleted || this.onBatchCompleted + this.onBatchSuspended = onBatchSuspended || this.onBatchSuspended + BatchProcess.enqueueBatch(this) + } + get isDone() { return !this.nextTask && this.pendingTasks.length === 0 // && this.processed === this.count //&& this.cancelled === false } @@ -73,6 +92,28 @@ export class BatchProcess { ) } + get finishedTask() { + return this.processingQueue.filter( + task => task.processingState === ProcessingState.Done, + ) + } + + async start(onTaskCompleted = this.onTaskCompleted) { + this.status = ProcessingState.Pending + const startTime = Date.now() + const pendingBatch = new Promise(resolve => { + while ( + ProcessingTask.workerPool.tasks.length === 0 && + this.leftTasks.length > 0 + ) + this.processNextTask(onTaskCompleted, resolve) + }) + await pendingBatch + // this.status = this.isDone ? ProcessingState.Done : ProcessingState.Interrupted + this.elapsedTime += Date.now() - startTime + this.isDone ? this.onBatchCompleted() : this.onBatchSuspended() + } + suspend(): Promise | undefined { if (this.status === ProcessingState.Pending) { // this.status = ProcessingState.Suspended @@ -93,44 +134,19 @@ export class BatchProcess { } } - async run(onTaskCompleted = this.onTaskCompleted) { - const startTime = Date.now() - const pendingBatch = new Promise(resolve => { - while ( - ProcessingTask.workerPool.tasks.length === 0 && - this.leftTasks.length > 0 - ) - this.processNextTask(onTaskCompleted, resolve) - }) - await pendingBatch - // this.status = this.isDone ? ProcessingState.Done : ProcessingState.Interrupted - const elapsedTime = Date.now() - startTime - const log_end = - this.leftTasks.length > 0 - ? `, ${this.leftTasks.length} tasks left in the queue` - : `` - const log = this.isDone - ? `${this.processed} tasks processed in ${elapsedTime} ms ` - : `was suspended after ${this.processed} tasks processed in ${elapsedTime}ms` + - log_end - this.printLog(log) - // BatchProcess.processNextBatch() - // BatchProcess.cleanTerminated() - } - processNextTask(onTaskCompleted: any, onBatchTerminated: any) { const { nextTask } = this if (nextTask) { const pendingTask = nextTask.delegate() // const taskRes = await pendingTask - pendingTask.then(taskRes => { + pendingTask.then(() => { this.processed++ // this.printLog(`processed: ${this.processed}, left: ${this.leftTasks.length}`) - onTaskCompleted(taskRes) + onTaskCompleted(nextTask) if (this.isTerminated) { this.status === ProcessingState.Suspended - ? onBatchTerminated() - : this.onBatchSuspended?.() + ? this.onBatchSuspended?.() + : onBatchTerminated() } else if (this.status === ProcessingState.Pending) this.processNextTask(onTaskCompleted, onBatchTerminated) }) @@ -142,7 +158,24 @@ export class BatchProcess { console.log(logPrefix + log) } - static cleanTerminated() { - this.batchQueue = this.batchQueue.filter(item => !item.isTerminated) + onTaskCompleted(taskRes: any) { + return taskRes + } + + onBatchCompleted() { + this.printLog( + `${this.processed} tasks processed in ${this.elapsedTime} ms `, + ) + BatchProcess.processNextBatch() + } + + onBatchSuspended(val?: any) { + let log = `was suspended after ${this.processed} tasks processed in ${this.elapsedTime}ms` + log += + this.leftTasks.length > 0 + ? `, ${this.leftTasks.length} tasks left in the queue` + : `` + this.printLog(log) + return val } } diff --git a/src/processing/BlocksBatch.ts b/src/processing/BlocksBatch.ts deleted file mode 100644 index d21afd7..0000000 --- a/src/processing/BlocksBatch.ts +++ /dev/null @@ -1,127 +0,0 @@ -import { Vector2 } from 'three' - -import { WorldEnv, WorldUtils, Biome, ProcessingTask } from '../index' -import { serializePatchId, getPatchId, asVect3 } from '../utils/convert' -import { PatchKey, GroundBlock, Block, BlockData } from '../utils/types' - -import { GroundBlockData, GroundPatch } from './GroundPatch' - -export type BlocksBatchArgs = { - posBatch: Vector2[] -} - -export type BlocksBatchProcessingParams = { - groundLevel: false -} - -const defaultProcessingParams: BlocksBatchProcessingParams = { - groundLevel: false, -} - -export class BlocksBatch extends ProcessingTask { - localPatchCache: Record = {} - patchIndex: Record = {} - blocks: any[] = [] - input: Vector2[] = [] - output: any[] = [] - constructor(posBatch: Vector2[]) { - super() - // sort input blocks by patch - // const blocksByPatch: Record = {} - for (const pos of posBatch) { - const patchId = getPatchId(pos, WorldEnv.current.patchDimensions) - const patchKey = serializePatchId(patchId) - this.patchIndex[patchKey] = this.patchIndex[patchKey] || [] - this.patchIndex[patchKey]?.push(pos) - } - this.input = posBatch - } - - asBatch() { - const posBatch: Vector2[] = [] - Object.values(this.patchIndex).forEach(posArr => posBatch.push(...posArr)) - return posBatch - } - - initCache() { - for (const patchKey of Object.keys(this.patchIndex)) { - const groundLayer = new GroundPatch(patchKey) - groundLayer.preprocess() - this.localPatchCache[patchKey] = groundLayer - } - } - - override get inputs() { - return [this.input] - } - - override reconcile(stubs: GroundBlock[]) { - return stubs.map(pos => { - // blockStub.pos = WorldUtils.convert.parseThreeStub(blockStub.pos) - return WorldUtils.convert.parseThreeStub(pos) - }) as GroundBlock[] - } - - override async process(processingParams = defaultProcessingParams) { - const { groundLevel } = processingParams - console.log(groundLevel) - this.initCache() - - // const groundBlocksData = this.input.map(pos => { - const batchOutput = this.input.map(pos => { - const patchId = getPatchId(pos, WorldEnv.current.patchDimensions) - const patchKey = serializePatchId(patchId) - const groundPatch = this.localPatchCache[patchKey] - const groundData = groundPatch?.computeGroundBlock(asVect3(pos)) - // return groundData - // }).filter(val => val) as GroundBlockData[] - // const batchOutput = groundBlocksData.map(groundData => { - const { biome, landscapeIndex, level } = groundData as GroundBlockData - const landscapeConf = Biome.instance.mappings[biome].nth(landscapeIndex) - const groundConf = landscapeConf.data - const blockData: BlockData = { - level, - type: groundConf.type, - } - const block: Block = { - pos: asVect3(pos), - data: blockData, - } - return block - // override with last block if specified - // if (params.includeEntitiesBlocks) { - // const lastBlockData = await queryLastBlockData(blockPos) - // block.data = - // lastBlockData.level > 0 && lastBlockData.type - // ? lastBlockData - // : (block.data as any) - // } - // return block //blockData?.level || 0//getHeight() - }) - this.output = batchOutput - - // this.blocks = blocksBatch - // return blocksBatch - // const blocksBatch = blockPosBatch.map((pos) => { - // const blockPos = asVect3(pos) - // const blockData = computeGroundBlock(blockPos) - // const { spawnableItems } = blockData - // const queriedLoc = new Box2().setFromPoints([asVect2(blockPos)]) - // queriedLoc.max.addScalar(1) - // false && includeEntitiesBlocks && spawnableItems.forEach(itemType => { - // // several (overlapping) objects may be found at queried position - // const [spawnedEntity] = ItemsInventory.querySpawnedEntities(itemType, queriedLoc) - // const lastBlockIndex = blocksBuffer?.findLastIndex(elt => elt) - // if (blocksBuffer && lastBlockIndex && lastBlockIndex >= 0) { - // blockData.level += lastBlockIndex - // blockData.type = blocksBuffer[lastBlockIndex] as BlockType - // } - // }) - } - - toStub() { - return this.output - } -} - -ProcessingTask.registeredObjects[BlocksBatch.name] = BlocksBatch diff --git a/src/processing/BlocksProcessing.ts b/src/processing/BlocksProcessing.ts new file mode 100644 index 0000000..ac7d3e5 --- /dev/null +++ b/src/processing/BlocksProcessing.ts @@ -0,0 +1,361 @@ +import { Box2, Vector2, Vector3 } from 'three' + +import { + WorldEnv, + Biome, + ProcessingTask, + ChunkContainer, + DensityVolume, +} from '../index' +import { + serializePatchId, + getPatchId, + asVect3, + asVect2, + parseThreeStub, +} from '../utils/convert' +import { PatchKey, GroundBlock, Block, BlockData } from '../utils/types' + +import { GroundBlockData, GroundPatch } from './GroundPatch' +import { ItemsBaker } from './ItemsProcessing' + +export type BlocksBatchArgs = { + posBatch: Vector2[] +} + +export enum BlocksProcessingRecipe { + Ground, + Overground, + Underground, + Peak, + Floor, // returns floor if requested block is empty in down direction, otherwise look for closest floor in up direction + Ceiling, + Nearest, // nearest ground, floor or ceiling +} + +export type BlocksProcessingParams = { + recipe: BlocksProcessingRecipe +} + +const defaultProcessingParams: BlocksProcessingParams = { + recipe: BlocksProcessingRecipe.Ground, +} + +export type BuildCache = {} + +/** + * requires: ground patch + * provides: ground block + */ +const bakeGroundBlock = (pos: Vector3, groundPatch: GroundPatch) => { + const groundData = groundPatch?.computeGroundBlock(pos) + // return groundData + // }).filter(val => val) as GroundBlockData[] + // const batchOutput = groundBlocksData.map(groundData => { + const { biome, landIndex, level } = groundData as GroundBlockData + const landscapeConf = Biome.instance.mappings[biome].nth(landIndex) + const groundConf = landscapeConf.data + const blockData: BlockData = { + level, + type: groundConf.type, + } + pos.y = level + const block: Block = { + pos, + data: blockData, + } + return block +} + +/** + * needs: ground block + * provides: all block above ground surface + * @param pos + */ +const bakeOvergroundBlocks = async (groundBlock: Block) => { + const queriedLoc = new Box2().setFromPoints([asVect2(groundBlock.pos)]) + queriedLoc.max.addScalar(1) + const itemsProcessor = new ItemsBaker(queriedLoc, groundBlock.pos) + const overgroundBlocks = await itemsProcessor.queryIsolatedPoint() + return overgroundBlocks +} + +/** + * all blocks below ground surface + */ +const bakeUndergroundBlocks = (groundBlock: Block) => { + const groundLevel = groundBlock.pos.y + const groundPos = asVect2(groundBlock.pos) + const undergroundBlocks: number[] = [] + for (let y = 0; y < groundLevel; y++) { + const isEmptyBlock = DensityVolume.instance.getBlockDensity( + asVect3(groundPos, y), + groundLevel + 20, + ) + undergroundBlocks.push(isEmptyBlock ? 0 : 1) + } + return undergroundBlocks +} + +/** + * needs: overground blocks + * provides: highest overground block + * usage: LOD + */ +const bakePeakBlock = ( + groundBlock: Block, + overgroundBlocks: number[], +) => { + const lastIndex = overgroundBlocks.findLastIndex(elt => elt) + const lastBlockType = lastIndex >= 0 && overgroundBlocks[lastIndex] + const blockType = lastBlockType + ? ChunkContainer.dataDecoder(lastBlockType) + : groundBlock.data.type + groundBlock.data.level += lastIndex + groundBlock.data.type = blockType + return groundBlock + // const blockPos = asVect3(pos) + // const blockData = computeGroundBlock(blockPos) + // const { spawnableItems } = blockData + + // false && includeEntitiesBlocks && spawnableItems.forEach(itemType => { + // // several (overlapping) objects may be found at queried position + // const [spawnedEntity] = ItemsInventory.querySpawnedEntities(itemType, queriedLoc) + // const lastBlockIndex = blocksBuffer?.findLastIndex(elt => elt) + // if (blocksBuffer && lastBlockIndex && lastBlockIndex >= 0) { + // blockData.level += lastBlockIndex + // blockData.type = blocksBuffer[lastBlockIndex] as BlockType + // } + + // override with last block if specified + // if (params.includeEntitiesBlocks) { + // const lastBlockData = await queryLastBlockData(blockPos) + // block.data = + // lastBlockData.level > 0 && lastBlockData.type + // ? lastBlockData + // : (block.data as any) + // } + // return block //blockData?.level || 0//getHeight() +} + +/** + * to avoid spawning above schematics like trees returned block should not be + * above schematic block or not at greater distance from ground surface + * usage: random spawn above floor surface + * + */ +// start with normal query to find ground level, +// - if requested pos is above ground: returns ground pos provided there is no +// schematic blocks at given location or schematics blocks are lesser than predefined value +// - if requested pos is below ground: look down or up for closest empty block +// stop iterating in up direction if reaching ground surface with schematic block +// offset from requested pos +const bakeFloorBlock = ( + groundBlock: Block, + initialBlockLevel: number, +) => { + const groundLevel = groundBlock.pos.y + const groundPos = asVect2(groundBlock.pos) + + const isEmptyBlock = (level: number) => + DensityVolume.instance.getBlockDensity( + asVect3(groundPos, level), + groundLevel + 20, + ) + let currentLevel = initialBlockLevel + // above ground level + if (currentLevel > groundLevel) { + currentLevel = groundLevel + } + // below ground level + else { + // if current block not empty, find first empty below + while (!isEmptyBlock(currentLevel) && currentLevel-- >= 0); + // then look below for last empty block + while (isEmptyBlock(currentLevel) && currentLevel-- >= 0); + } + // groundBlock.pos.y = currentLevel + groundBlock.data.level = currentLevel + return groundBlock + // const y = 0 + // const groundLevel = 0 + // let offset = 0 + // let done = false + // while (!done) { + // // look above + // if (y + offset < groundLevel) { + // // if found return offset + // } + // // look below + // if (y - offset > 0) { + // // if found return - offset + // block.pos.y = y + // const isEmptyBlock = DensityVolume.instance.getBlockDensity( + // block.pos, + // groundLevel + 20, + // ) + // } + // offset++ + // } +} + +/** + * needs: + * provides: nearest ceiling block + */ +const bakeCeilingBlock = ( + groundBlock: Block, + requestedBlockLevel: number, +) => { + console.log(groundBlock) + console.log(requestedBlockLevel) +} + +type BuildArtefacts = { + groundPatch: GroundPatch + groundBlock?: Block + overgroundBlocks?: number[] + undergroundBlocks?: number[] +} + +const getPatchKey = (inputPos: Vector2) => { + const patchId = getPatchId(inputPos, WorldEnv.current.patchDimensions) + const patchKey = serializePatchId(patchId) + return patchKey +} + +const getGroundPatch = (patchKey: PatchKey) => { + const groundLayer = new GroundPatch(patchKey) + groundLayer.preprocess() + return groundLayer +} + +export class BlockProcessor { + requestedPos: Vector3 + buildStack: BuildArtefacts + + constructor(requestedPos: Vector3, groundPatch?: GroundPatch) { + this.requestedPos = requestedPos + const patchKey = getPatchKey(asVect2(requestedPos)) + groundPatch = groundPatch || getGroundPatch(patchKey) + this.buildStack = { groundPatch } + } + + getGroundBlock() { + const { requestedPos } = this + const { groundBlock, groundPatch } = this.buildStack + return groundBlock || bakeGroundBlock(requestedPos, groundPatch) + } + + async getOvergroundBlocks() { + return ( + this.buildStack.overgroundBlocks || + (await bakeOvergroundBlocks(this.getGroundBlock())) + ) + } + + getUndergroundBlocks() { + return ( + this.buildStack.undergroundBlocks || + bakeUndergroundBlocks(this.getGroundBlock()) + ) + } + + async getPeakBlock() { + // build requirements + const overgroundBlocks = await this.getOvergroundBlocks() + const peakBlock = bakePeakBlock(this.getGroundBlock(), overgroundBlocks) + return peakBlock + } + + getFloorBlock() { + const groundBlock = this.getGroundBlock() + const initialBlockLevel = this.requestedPos.y // groundBlock.data.level + 1 + const floorBlock = bakeFloorBlock(groundBlock, initialBlockLevel) + return floorBlock + } + + getCeilingBlock() { + const groundBlock = this.getGroundBlock() + const floorBlock = bakeCeilingBlock(groundBlock, groundBlock.data.level + 1) + return floorBlock + } + + bakeRecipe = async (recipe: BlocksProcessingRecipe) => { + let result + if (recipe === BlocksProcessingRecipe.Ground) { + result = this.getGroundBlock() + } else if (recipe === BlocksProcessingRecipe.Peak) { + result = await this.getPeakBlock() + } else if (recipe === BlocksProcessingRecipe.Floor) { + result = this.getFloorBlock() + } else if (recipe === BlocksProcessingRecipe.Ceiling) { + result = this.getCeilingBlock() + } + // else if (recipe === BlocksProcessingRecipe.Overground) { + // result = await this.getOvergroundBlocks() + // } else if (recipe === BlocksProcessingRecipe.Underground) { + // result = this.getUndergroundBlocks() + // } + return result + } +} + +/** + * Surface blocks + */ +export class BlocksProcessing extends ProcessingTask { + buildCache: Record = {} + blocks: any[] = [] + input: Vector3[] = [] + output: any[] = [] + constructor(posBatch: Vector3[]) { + super() + this.input = posBatch + } + + override get inputs() { + return [this.input] + } + + /** + * requires: + * - requestedPos + * - cache + * provides: + * - ground patch + */ + getGroundPatch(requestedPos: Vector2) { + const patchKey = getPatchKey(requestedPos) + // look for existing patch in current cache + const groundPatch = this.buildCache[patchKey] + // if not existing build and insert in cache + return groundPatch || getGroundPatch(patchKey) + } + + override async process(processingParams = defaultProcessingParams) { + const { recipe } = processingParams + const pendingBlocks = this.input.map(async pos => { + const groundPatch = this.getGroundPatch(asVect2(pos)) + const blockProcessor = new BlockProcessor(pos, groundPatch) + const block = await blockProcessor.bakeRecipe(recipe) + return block as Block + }) + const batchOutput = await Promise.all(pendingBlocks) + return batchOutput + } + + override reconcile(stubs: GroundBlock[]) { + return stubs.map(blockStub => { + blockStub.pos = parseThreeStub(blockStub.pos) + // return WorldUtils.convert.parseThreeStub(pos) + return blockStub + }) as GroundBlock[] + } + + toStub() { + return this.output + } +} + +ProcessingTask.registeredObjects[BlocksProcessing.name] = BlocksProcessing diff --git a/src/processing/BoardProcessing.ts b/src/processing/BoardProcessing.ts index 7e13081..7855171 100644 --- a/src/processing/BoardProcessing.ts +++ b/src/processing/BoardProcessing.ts @@ -5,31 +5,33 @@ import { asBox3, asVect2, asVect3, - parsePatchKey, - serializeChunkId, + getPatchId, + getUpperScalarId, + serializePatchId, } from '../utils/convert' import { - BlockType, WorldEnv, ChunkContainer, - BlockMode, - WorldUtils, - ChunkSet, + BatchProcess, + ChunksProcessor, + BlockType, } from '../index' import { ProcLayer } from '../procgen/ProcLayer' -import { ChunkKey, PatchKey } from '../utils/types' -import { PatchIndexer } from '../datacontainers/ChunksIndexer' +import { BlockMode, ChunkId, PatchKey } from '../utils/types' import { DataContainer, PatchBase, PatchElement, } from '../datacontainers/PatchBase' +import { copySourceToTargetPatch } from '../utils/data' import { - ItemsChunkLayer, + ItemsBaker, ItemsProcessingParams, - ItemsProcessMode, + ItemsProcessingRecipes, } from './ItemsProcessing' +import { ProcessingTask } from './TaskProcessing' +import { ChunksProcessingParams } from './ChunksProcessing' // import { UndegroundChunkset } from './ChunksProcessing' export enum BlockCategory { @@ -53,16 +55,10 @@ export type BoardParams = { export type BoardStub = { bounds: Box2 content: Uint8Array + elevation?: number } -type ChunkIndex = Record -type BoardCacheData = { - itemsLayer: ItemsChunkLayer - chunkIndex: ChunkIndex -} - -const getChunkYId = (y: number) => - Math.floor(y / WorldEnv.current.chunkDimensions.y) +const { patchSize, patchDimensions } = WorldEnv.current class BoardPatch extends PatchBase implements DataContainer { rawData: Uint8Array @@ -108,90 +104,118 @@ class BoardPatch extends PatchBase implements DataContainer { } } -type BoardContent = { - chunk: ChunkContainer - patch: BoardPatch -} +/** + * Will handle chunks processing and storing used to build board + */ +export class BoardCache extends BatchProcess { + centerPatchId = new Vector2(NaN, NaN) + patchRange = 0 + + constructor(centerPatchId: Vector2, patchRange: number) { + super([]) + this.centerPatchId = centerPatchId + this.patchRange = patchRange + } -export class BoardCache extends PatchIndexer { - center = new Vector3() - radius: number - thickness: number - constructor(radius: number, thickness: number) { - super() - this.radius = radius - this.thickness = thickness + // override onTaskCompleted(task: ProcessingTask) { + // console.log(task) + // } + + override onBatchCompleted(): void { + console.log(`BoardCache: ready!`) + } + + get itemsProcessors() { + return this.finishedTask.filter( + task => task instanceof ItemsBaker, + ) as ItemsBaker[] } - get cachedItems() { - return Object.values(this.patchLookup) + get chunksProcessors() { + return this.finishedTask.filter( + task => task instanceof ChunksProcessor, + ) as ChunksProcessor[] } - get cachedChunks() { - const cachedChunks: ChunkContainer[] = [] - for (const { chunkIndex } of this.indexedElements) { - const items = Object.values(chunkIndex).filter(val => val) - cachedChunks.push(...items) + get processedChunks() { + const allProcessedChunks = [] + // iter board indexed chunks + for (const chunkProcessor of this.chunksProcessors) { + const processedChunks = chunkProcessor.result as ChunkContainer[] + allProcessedChunks.push(...processedChunks) } - return cachedChunks + return allProcessedChunks } - async initCacheIndex(patchIndex: PatchKey) { - if (!this.patchLookup[patchIndex]) { - const processingParams: ItemsProcessingParams = { - mode: ItemsProcessMode.INDIVIDUAL, - } - const itemsLayer = new ItemsChunkLayer(patchIndex) - await itemsLayer.process(processingParams) - await itemsLayer.bakeIndividualChunks() - // await itemsLayer.bakeAsIndividualChunks() - const chunkIndex = {} - const cacheData: BoardCacheData = { - itemsLayer, - chunkIndex, + get processedItemsChunks() { + const processedItemsChunks = [] + // iter board indexed chunks + for (const itemProcessor of this.itemsProcessors) { + const itemsChunks = itemProcessor.result as ChunkContainer[] + processedItemsChunks.push(...itemsChunks) + } + return processedItemsChunks + } + + get boundingPatchIds() { + const { centerPatchId, patchRange } = this + const bmin = centerPatchId.clone().subScalar(patchRange) + const bmax = centerPatchId.clone().addScalar(patchRange) + const mapRange = new Box2(bmin, bmax) + return mapRange + } + + buildPatchIndex() { + const patchIndex: Record = {} + // const patchIds = [] + const { min, max } = this.boundingPatchIds + for (let { y } = min; y <= max.y; y++) { + for (let { x } = min; x <= max.x; x++) { + const patchId = new Vector2(x, y) + const patchKey = serializePatchId(patchId) + patchIndex[patchKey] = true + // patchIds.push(new Vector2(x, y)) } - this.patchLookup[patchIndex] = cacheData } + return patchIndex } - async buildCacheAroundPos(center: Vector3) { - const { thickness, radius } = this - const boardChunksRange = { - bottomId: getChunkYId(center.y - thickness), - topId: getChunkYId(center.y + thickness), + /** + * call each time view settings changes to regen batch + */ + build() { + // regen patch index from current view settings + const patchIndex = this.buildPatchIndex() + const chunksProcessingParams: ChunksProcessingParams = { + skipEntities: true, + noDataEncoding: true, } - this.center = center - const indexChanges = super.getIndexingChanges(asVect2(center), radius) - // insert new keys in index - await Promise.all( - indexChanges.map(patchKey => this.initCacheIndex(patchKey)), - ) - const indexEntries = Object.entries(this.patchLookup) - // refresh chunks required for board construction - for await (const [patchKey, cacheData] of indexEntries) { - const patchId = parsePatchKey(patchKey) as Vector2 - const { chunkIndex } = cacheData - // cache chunks only related to boards - for ( - let yId = boardChunksRange.bottomId; - yId <= boardChunksRange.topId; - yId++ - ) { - const chunkId = asVect3(patchId, yId) - const chunkKey = serializeChunkId(chunkId) - if (!chunkIndex[chunkKey]) { - const undegroundChunksProcessing = new ChunkSet(patchKey) - const processingParams = { skipEncoding: true, chunkId } - const chunk = - await undegroundChunksProcessing.delegate(processingParams) - chunkIndex[chunkKey] = chunk - } - } + // insert chunks gen tasks in processing queue + Object.keys(patchIndex) + .map(patchKey => new ChunksProcessor(patchKey)) + .forEach(task => { + task.processingParams = chunksProcessingParams + this.processingQueue.push(task) + }) + + const itemsProcessingParams: ItemsProcessingParams = { + recipe: ItemsProcessingRecipes.IndividualChunks, } + + // insert items gen tasks in processing queue + Object.keys(patchIndex) + .map(patchKey => new ItemsBaker(patchKey)) + .forEach(task => { + task.processingParams = itemsProcessingParams + this.processingQueue.push(task) + }) } + /** + * fills target chunk from cache + */ fillTargetChunk(boardTarget: ChunkContainer) { - this.cachedChunks.forEach(chunk => { + this.processedChunks.forEach(chunk => { ChunkContainer.copySourceToTarget(chunk, boardTarget) // itemsChunks.forEach(itemSource => // ChunkContainer.copySourceToTarget(itemSource, boardTarget), @@ -200,30 +224,35 @@ export class BoardCache extends PatchIndexer { } querySpawnedItems(bounds: Box3) { + const { itemsProcessors } = this const spawnedItems: ChunkContainer[] = [] - // const spawnedPlaces: Vector3[] = [] - // const individualChunks: ChunkContainer[] = [] - for (const cacheData of this.cachedItems) { - // cacheData.itemsLayer.spawnedLocs - // .filter(spawnLoc => asBox2(bounds).containsPoint(asVect2(spawnLoc))) - // .forEach(spawnLoc => spawnedPlaces.push(spawnLoc)) - cacheData.itemsLayer.individualChunks + itemsProcessors.forEach(itemProcessor => { + const itemsChunks = itemProcessor.result as ChunkContainer[] + itemsChunks .filter(itemChunk => { // const spawnLoc = asVect2(itemChunk.bounds.getCenter(new Vector3())) // return patchBounds.containsPoint(spawnLoc) return itemChunk.bounds.intersectsBox(bounds) }) .forEach(itemChunk => spawnedItems.push(itemChunk)) - } - // this.cachedData.forEach(({ itemsLayer }) => { - // const patchSpawnedItems = itemsLayer.getSpawnedItems() - // spawnedItems.push - // }) + }) return spawnedItems } } -export class BoardContainer { +type BoardContent = { + chunk: ChunkContainer + patch: BoardPatch +} + +/** + * Call: + * - `start` to create unique board instance at specific location + * - `terminate` to remove board instance + */ +export class BoardProcessor { + // eslint-disable-next-line no-use-before-define + static singleton: BoardProcessor | null localCache: BoardCache // dedicatedWorker: // board input @@ -238,15 +267,54 @@ export class BoardContainer { boardData!: BoardPatch static boardHolesLayer = new ProcLayer('holesMap') - constructor(boardRadius?: number, boardThickness?: number) { + /** + * access unique board instance from anywhere + */ + static get instance() { + return this.singleton + } + + /** + * create board instance running in background + */ + static createInstance(boardPosition: Vector3) { + this.singleton = this.singleton || new BoardProcessor(boardPosition) + return this.singleton + } + + /** + * + */ + static deleteInstance() { + this.singleton = null + } + + constructor( + boardCenter: Vector3, + boardRadius?: number, + boardThickness?: number, + ) { const { boardSettings } = WorldEnv.current boardRadius = boardRadius || boardSettings.boardRadius boardThickness = boardThickness || boardSettings.boardThickness - this.localCache = new BoardCache(boardRadius, boardThickness) + this.boardParams.center = boardCenter.clone().floor() this.boardParams.radius = boardRadius this.boardParams.thickness = boardThickness - BoardContainer.boardHolesLayer.sampling.periodicity = 0.25 + BoardProcessor.boardHolesLayer.sampling.periodicity = 0.25 + this.localCache = new BoardCache(this.centerPatchId, this.patchRange) + this.localCache.build() // this.center = boardCenter + console.log( + `create board at ${serializePatchId(this.centerPatchId)} (radius: ${boardRadius}, thickness: ${boardThickness})`, + ) + } + + get centerPatchId() { + return getPatchId(asVect2(this.boardParams.center), patchDimensions) + } + + get patchRange() { + return getUpperScalarId(this.boardParams.radius, patchSize) } get initialDims() { @@ -268,6 +336,10 @@ export class BoardContainer { return this.boardParams.center.y } + get boardCenter() { + return asVect2(this.boardParams.center) + } + isWithinBoard(buffPos: Vector2, buffer: Uint16Array) { const { radius, center } = this.boardParams if (buffPos) { @@ -292,9 +364,21 @@ export class BoardContainer { return false } - genBoardContent(center: Vector3) { - this.boardParams.center = center - this.finalBounds.setFromPoints([asVect2(center)]) + get nonOverlappingItemsChunks() { + const matching = this.localCache.processedItemsChunks.filter( + itemChunk => !this.overlapsBoard(asBox2(itemChunk.bounds)), + ) + return matching + } + + async genBoardContent() { + const onCacheReady = new Promise(resolve => { + this.localCache.enqueue(undefined, resolve) + }).then(() => console.log(`Board cache is ready!`)) + await onCacheReady + console.log(`gen board content`) + // this.boardParams.center = center + this.finalBounds.setFromPoints([this.boardCenter]) const emptyBlock = ChunkContainer.dataEncoder(BlockType.NONE) const initialPatchBounds = asBox2(this.initialBounds) const boardPatch = new BoardPatch(initialPatchBounds) @@ -348,7 +432,7 @@ export class BoardContainer { patch: new BoardPatch(this.finalBounds), chunk: new ChunkContainer(finalChunkBounds, 1), } - WorldUtils.data.copySourceToTargetPatch(boardPatch, boardContent.patch) + copySourceToTargetPatch(boardPatch, boardContent.patch) ChunkContainer.copySourceToTarget(boardChunk, boardContent.chunk) this.addTrimmedItems(boardContent.patch, boardContent.chunk) this.boardData = boardContent.patch @@ -397,4 +481,52 @@ export class BoardContainer { // } } } + + *overrideOriginalChunksContent( + boardChunk: ChunkContainer, + targetChunkId?: ChunkId, + ) { + const { nonOverlappingItemsChunks } = this + if (targetChunkId) { + // TODO + } + // iter processed original chunks + for (const originalChunk of this.localCache.processedChunks) { + // board_chunk.rawData.fill(113) + const targetChunk = new ChunkContainer( + originalChunk.chunkKey, + originalChunk.margin, + ) + originalChunk.rawData.forEach( + (val, i) => (targetChunk.rawData[i] = ChunkContainer.dataEncoder(val)), + ) + // copy items individually + nonOverlappingItemsChunks.forEach(itemChunk => + ChunkContainer.copySourceToTarget(itemChunk, targetChunk), + ) + // override with board_buffer + ChunkContainer.copySourceToTarget(boardChunk, targetChunk, false) + yield targetChunk + } + } + + *restoreOriginalChunksContent() { + const { processedItemsChunks } = this.localCache + // iter processed original chunks + for (const originalChunk of this.localCache.processedChunks) { + // board_chunk.rawData.fill(113) + const targetChunk = new ChunkContainer( + originalChunk.chunkKey, + originalChunk.margin, + ) + originalChunk.rawData.forEach( + (val, i) => (targetChunk.rawData[i] = ChunkContainer.dataEncoder(val)), + ) + // copy items individually + processedItemsChunks.forEach(itemChunk => + ChunkContainer.copySourceToTarget(itemChunk, targetChunk), + ) + yield targetChunk + } + } } diff --git a/src/processing/ChunksBatch.ts b/src/processing/ChunksBatch.ts index d204415..82dd2be 100644 --- a/src/processing/ChunksBatch.ts +++ b/src/processing/ChunksBatch.ts @@ -1,18 +1,17 @@ import { Box2, Vector2 } from 'three' -import { WorldEnv } from '../config/WorldEnv' -import { getPatchId, serializePatchId } from '../utils/convert' +import { serializePatchId } from '../utils/convert' import { PatchKey } from '../utils/types' import { BatchProcess } from './BatchProcessing' import { - ChunkSet, + ChunksProcessor, lowerChunksProcessingParams, upperChunksProcessingParams, } from './ChunksProcessing' import { ProcessingState } from './TaskProcessing' -const { patchDimensions } = WorldEnv.current +// const { patchSize, patchDimensions } = WorldEnv.current // type PipelineStage = { // in: T[], @@ -30,20 +29,12 @@ const { patchDimensions } = WorldEnv.current * then when within near dist moves to caves queue * when processed is done, moves into done queue */ -export class ChunksBatch extends BatchProcess { - viewPos = new Vector2(NaN, NaN) - viewDist = 0 +export class ViewChunksBatch extends BatchProcess { + viewCenter = new Vector2(NaN, NaN) + viewRange = 0 - constructor(onTaskCompleted?: any) { - super([], onTaskCompleted) - } - - get viewRange() { - return getPatchId(new Vector2(this.viewDist), patchDimensions).x - } - - get viewPosId() { - return getPatchId(this.viewPos, patchDimensions) + constructor() { + super([]) } get queuePatchIndex() { @@ -53,10 +44,10 @@ export class ChunksBatch extends BatchProcess { return patchIndex } - get patchViewRange() { - const { viewPosId } = this - const bmin = viewPosId.clone().subScalar(this.viewRange) - const bmax = viewPosId.clone().addScalar(this.viewRange) + get viewPatchRange() { + const { viewCenter, viewRange } = this + const bmin = viewCenter.clone().subScalar(viewRange) + const bmax = viewCenter.clone().addScalar(viewRange) const patchViewRange = new Box2(bmin, bmax) return patchViewRange } @@ -68,7 +59,7 @@ export class ChunksBatch extends BatchProcess { genViewPatchIndex() { const patchIndex: Record = {} // const patchIds = [] - const { min, max } = this.patchViewRange + const { min, max } = this.viewPatchRange for (let { y } = min; y <= max.y; y++) { for (let { x } = min; x <= max.x; x++) { const patchId = new Vector2(x, y) @@ -81,26 +72,25 @@ export class ChunksBatch extends BatchProcess { } reorderProcessingQueue() { - const { viewPosId } = this + const { viewCenter } = this this.processingQueue.sort( - (e1, e2) => e1.distanceTo(viewPosId) - e2.distanceTo(viewPosId), + (e1, e2) => e1.distanceTo(viewCenter) - e2.distanceTo(viewCenter), ) } - isSyncNeeded(viewPos: Vector2, viewDist: number) { - const viewPosId = getPatchId(viewPos, patchDimensions) - return ( - this.viewPosId.distanceTo(viewPosId) > 0 || this.viewDist !== viewDist - ) + viewChanged(viewCenter: Vector2, viewRange: number) { + const viewChanged = + this.viewCenter.distanceTo(viewCenter) > 0 || this.viewRange !== viewRange + return viewChanged } /** - * called each time view pos or view dist change to regen chunks index + * called each time view center or range change to regen chunks index */ - async syncView(viewPos: Vector2, viewDist: number) { - if (this.isSyncNeeded(viewPos, viewDist)) { - this.viewPos = viewPos - this.viewDist = viewDist + async syncView(viewCenter: Vector2, viewRange: number) { + if (this.viewChanged(viewCenter, viewRange)) { + this.viewCenter = viewCenter + this.viewRange = viewRange // regen patch index from current view const viewPatchIndex = this.genViewPatchIndex() // purge queues from out of range elements @@ -112,7 +102,7 @@ export class ChunksBatch extends BatchProcess { // insert elements never processed before Object.keys(viewPatchIndex) .filter(patchKey => !queuePatchIndex[patchKey]) - .map(patchKey => new ChunkSet(patchKey)) + .map(patchKey => new ChunksProcessor(patchKey)) .forEach(chunkset => this.processingQueue.push(chunkset)) // reorder processing queue this.reorderProcessingQueue() @@ -168,7 +158,7 @@ export class ChunksBatch extends BatchProcess { /** * Will process lower chunks only within near dist */ -export class LowerChunksBatch extends ChunksBatch { +export class LowerChunksBatch extends ViewChunksBatch { override async processNextTask(onTaskCompleted: any, onBatchTerminated: any) { const { nextTask } = this if (nextTask) { @@ -193,7 +183,7 @@ export class LowerChunksBatch extends ChunksBatch { /** * Will process upper chunks at far dist */ -export class UpperChunksBatch extends ChunksBatch { +export class UpperChunksBatch extends ViewChunksBatch { override async processNextTask(onTaskCompleted: any, onBatchTerminated: any) { const { nextTask } = this if (nextTask) { diff --git a/src/processing/ChunksProcessing.ts b/src/processing/ChunksProcessing.ts index 5b4eb69..07b15b3 100644 --- a/src/processing/ChunksProcessing.ts +++ b/src/processing/ChunksProcessing.ts @@ -7,7 +7,7 @@ import { serializeChunkId, asPatchBounds, } from '../utils/convert' -import { PatchId, PatchKey } from '../utils/types' +import { ChunkIndex, PatchId, PatchKey } from '../utils/types' import { ChunkContainer, ChunkStub, @@ -16,10 +16,10 @@ import { import { CavesMask, EmptyChunk, GroundChunk } from '../factory/ChunksFactory' import { GroundPatch } from './GroundPatch' -import { ItemsChunkLayer } from './ItemsProcessing' import { ProcessingState, ProcessingTask } from './TaskProcessing' +import { ItemsBaker } from './ItemsProcessing' const chunksRange = WorldEnv.current.chunks.range -const patchDims = WorldEnv.current.patchDimensions +const { patchDimensions: patchDims } = WorldEnv.current enum ChunksGenSide { Lower, @@ -27,7 +27,8 @@ enum ChunksGenSide { } export type ChunksProcessingParams = { - skipChunksEncoding?: boolean + noDataEncoding?: boolean + skipEntities?: boolean genSide?: ChunksGenSide } @@ -47,9 +48,10 @@ export const upperChunksProcessingParams: ChunksProcessingParams = { * - underground chunks always have higher priority than surface chunks because * near chunks needs to be displayed before far chunks and underground chunks are closer to player */ -export class ChunkSet extends ProcessingTask { +export class ChunksProcessor extends ProcessingTask { // static history: Record = {} patchKey: PatchKey + chunksIndex: ChunkIndex = {} constructor(patchKey: PatchKey) { super() @@ -99,49 +101,56 @@ export class ChunkSet extends ProcessingTask { // } override reconcile(stubs: ChunkStub[]) { - // ChunkSet.history[this.patchKey] = ChunkSet.history[this.patchKey] || 0 - // ChunkSet.history[this.patchKey]++ + // ChunksProcessor.history[this.patchKey] = ChunksProcessor.history[this.patchKey] || 0 + // ChunksProcessor.history[this.patchKey]++ const chunks = stubs.map(stub => ChunkContainer.fromStub(stub)) return chunks } override async process(processingParams: ChunksProcessingParams) { this.processingState = ProcessingState.Pending - const { skipChunksEncoding, genSide } = processingParams + this.processingParams = processingParams + const { noDataEncoding, genSide } = processingParams const lowerGen = genSide === undefined || genSide === ChunksGenSide.Lower const upperGen = genSide === undefined || genSide === ChunksGenSide.Upper const lowerChunks = lowerGen - ? await this.lowerChunksGen(skipChunksEncoding) + ? await this.lowerChunksGen(noDataEncoding) + : [] + const upperChunks = upperGen + ? await this.upperChunksGen(noDataEncoding) : [] - const upperChunks = upperGen ? await this.upperChunksGen() : [] this.processingState = ProcessingState.Done - return [...lowerChunks, ...upperChunks] + + const chunks = [...lowerChunks, ...upperChunks] + return chunks } /** * Chunks above ground surface including overground items & empty chunks */ - async upperChunksGen() { - const itemsLayer = new ItemsChunkLayer(this.patchKey) - await itemsLayer.process() - const itemsMergedChunk = itemsLayer.mergeIndividualChunks() + async upperChunksGen(noDataEncoding = false) { + const { skipEntities } = this.processingParams as ChunksProcessingParams + const groundLayer = new GroundPatch(this.patchKey) groundLayer.bake() const patchId = groundLayer.patchId as PatchId const upperChunks: ChunkContainer[] = [] // compute chunk id range - const { patchDimensions } = WorldEnv.current - const yMin = Math.min( - itemsMergedChunk.bounds.min.y, - groundLayer.valueRange.min, - ) - const yMax = Math.max( - itemsMergedChunk.bounds.max.y, - groundLayer.valueRange.max, - ) + let yMin = groundLayer.valueRange.min + let yMax = groundLayer.valueRange.max + + let mergedItemsChunk + if (!skipEntities) { + const itemsLayer = new ItemsBaker(this.patchKey) + mergedItemsChunk = await itemsLayer.mergeIndividualChunks() + // adjust chunks range accordingly + yMin = Math.min(mergedItemsChunk.bounds.min.y, yMin) + yMax = Math.max(mergedItemsChunk.bounds.max.y, yMax) + } + const surfaceIds = { - yMinId: Math.floor(yMin / patchDimensions.y), - yMaxId: Math.floor(yMax / patchDimensions.y), + yMinId: Math.floor(yMin / patchDims.y), + yMaxId: Math.floor(yMax / patchDims.y), } // gen each surface chunk in range @@ -150,10 +159,12 @@ export class ChunkSet extends ProcessingTask { const chunkKey = serializeChunkId(chunkId) const worldChunk = new ChunkContainer(chunkKey, 1) // copy items layer first to prevent overriding ground - ChunkContainer.copySourceToTarget(itemsMergedChunk, worldChunk) + mergedItemsChunk && + ChunkContainer.copySourceToTarget(mergedItemsChunk, worldChunk) if (worldChunk.bounds.min.y < groundLayer.valueRange.max) { // bake ground and undeground separately - const groundSurfaceChunk = new GroundChunk(chunkKey, 1) + const customEncoder = noDataEncoding ? defaultDataEncoder : undefined + const groundSurfaceChunk = new GroundChunk(chunkKey, 1, customEncoder) const cavesMask = new CavesMask(chunkKey, 1) cavesMask.bake() await groundSurfaceChunk.bake(groundLayer, cavesMask) @@ -161,19 +172,14 @@ export class ChunkSet extends ProcessingTask { ChunkContainer.copySourceToTarget(groundSurfaceChunk, worldChunk) } upperChunks.push(worldChunk) - // remaining chunks - - // console.log( - // `processed surface chunks: ${this.printChunkset(groundSurfaceChunks)}`, - // ) - // empty chunks start 1 chunk above ground surface - for (let y = surfaceIds.yMaxId + 1; y <= chunksRange.topId; y++) { - const chunkId = asVect3(this.patchId, y) - const chunkKey = serializeChunkId(chunkId) - const emptyChunk = new EmptyChunk(chunkKey) - upperChunks.push(emptyChunk) - } - // console.log(`processed empty chunks: ${this.printChunkset(emptyChunks)}`) + } + + // remaining chunks: empty chunks start 1 chunk above ground surface + for (let y = surfaceIds.yMaxId + 1; y <= chunksRange.topId; y++) { + const chunkId = asVect3(this.patchId, y) + const chunkKey = serializeChunkId(chunkId) + const emptyChunk = new EmptyChunk(chunkKey) + upperChunks.push(emptyChunk) } return upperChunks } @@ -181,20 +187,18 @@ export class ChunkSet extends ProcessingTask { /** * Chunks below ground surface */ - async lowerChunksGen(skipEncoding = false) { + async lowerChunksGen(noDataEncoding = false) { // find upper chunkId const groundLayer = new GroundPatch(this.patchKey) groundLayer.bake() - const upperId = Math.floor( - groundLayer.valueRange.min / WorldEnv.current.patchDimensions.y, - ) // - 1 + const upperId = Math.floor(groundLayer.valueRange.min / patchDims.y) - 1 const lowerChunks = [] // then iter until bottom is reached for (let yId = upperId; yId >= chunksRange.bottomId; yId--) { const chunkId = asVect3(this.patchId, yId) const chunkKey = serializeChunkId(chunkId) const currentChunk = new ChunkContainer(chunkKey, 1) - const customEncoder = skipEncoding ? defaultDataEncoder : undefined + const customEncoder = noDataEncoding ? defaultDataEncoder : undefined const groundSurfaceChunk = new GroundChunk(chunkKey, 1, customEncoder) const cavesMask = new CavesMask(chunkKey, 1) cavesMask.bake() @@ -210,6 +214,10 @@ export class ChunkSet extends ProcessingTask { return lowerChunks } + // individualChunkGen(chunkKey: ChunkKey){ + + // } + /** * Sequential chunk gen */ @@ -221,4 +229,4 @@ export class ChunkSet extends ProcessingTask { // } } -ProcessingTask.registeredObjects[ChunkSet.name] = ChunkSet +ProcessingTask.registeredObjects[ChunksProcessor.name] = ChunksProcessor diff --git a/src/processing/GroundPatch.ts b/src/processing/GroundPatch.ts index 8248c71..76c9a9f 100644 --- a/src/processing/GroundPatch.ts +++ b/src/processing/GroundPatch.ts @@ -2,7 +2,7 @@ import { Box2, Vector2, Vector3 } from 'three' import { GroundBlock, - LandscapesConf, + BiomeLands, PatchBlock, PatchBoundId, PatchKey, @@ -31,7 +31,8 @@ export type GroundBlockData = { // rawVal: number, level: number biome: BiomeType - landscapeIndex: number + landIndex: number + landId?: string flags: number } @@ -44,7 +45,7 @@ export type GroundPatchStub = PatchStub & { const BitAllocation = { level: 9, // level values ranging from 0 to 512 biome: 4, // 16 biomes - landscapeIndex: 5, // 32 landscapes per biome + landIndex: 5, // 32 landscapes per biome flags: 3, // 8 additional flags } @@ -93,30 +94,28 @@ export class GroundPatch decodeBlockData(rawData: number) { const shift = BitAllocation const level = - (rawData >> (shift.biome + shift.landscapeIndex + shift.flags)) & + (rawData >> (shift.biome + shift.landIndex + shift.flags)) & ((1 << shift.level) - 1) const biomeNum = - (rawData >> (shift.landscapeIndex + shift.flags)) & - ((1 << shift.biome) - 1) + (rawData >> (shift.landIndex + shift.flags)) & ((1 << shift.biome) - 1) const biome = ReverseBiomeNumericType[biomeNum] || BiomeType.Temperate - const landscapeIndex = - (rawData >> shift.flags) & ((1 << shift.landscapeIndex) - 1) + const landIndex = (rawData >> shift.flags) & ((1 << shift.landIndex) - 1) const flags = rawData & ((1 << shift.flags) - 1) const blockData: GroundBlockData = { level, biome, - landscapeIndex, + landIndex, flags, } return blockData } encodeBlockData(groundData: GroundBlockData): number { - const { level, biome, landscapeIndex, flags } = groundData + const { level, biome, landIndex, flags } = groundData const shift = BitAllocation let blockRawVal = level blockRawVal = (blockRawVal << shift.biome) | BiomeNumericType[biome] - blockRawVal = (blockRawVal << shift.landscapeIndex) | landscapeIndex + blockRawVal = (blockRawVal << shift.landIndex) | landIndex blockRawVal = (blockRawVal << shift.flags) | (flags || BlockMode.REGULAR) return blockRawVal } @@ -277,7 +276,7 @@ export class GroundPatch const nominalConf = Biome.instance.getBiomeConf( rawVal, biomeType, - ) as LandscapesConf + ) as BiomeLands // const confIndex = Biome.instance.getConfIndex(currLevelConf.key) // const confData = Biome.instance.indexedConf.get(confIndex) const level = Heightmap.instance.getGroundLevel( @@ -320,7 +319,8 @@ export class GroundPatch const groundBlockData: GroundBlockData = { level, biome: biomeType, - landscapeIndex: usedConf.index, + landIndex: usedConf.index, + landId: usedConf.data.key, flags, } return groundBlockData diff --git a/src/processing/ItemsProcessing.ts b/src/processing/ItemsProcessing.ts index 39dc233..ad86402 100644 --- a/src/processing/ItemsProcessing.ts +++ b/src/processing/ItemsProcessing.ts @@ -1,23 +1,15 @@ -import { Box2, Box3, Vector2, Vector3 } from 'three' +import { Box2, Box3, Vector3 } from 'three' -import { ChunkContainer } from '../datacontainers/ChunkContainer' -import { PatchStub } from '../datacontainers/PatchBase' +import { ChunkContainer, ChunkStub } from '../datacontainers/ChunkContainer' import { Biome, - BlocksBatch, + BlocksProcessing, DistributionProfile, ProcessingTask, PseudoDistributionMap, } from '../index' import { DistributionParams } from '../procgen/BlueNoisePattern' -import { - asPatchBounds, - asBox3, - asBox2, - asVect2, - asVect3, - parsePatchKey, -} from '../utils/convert' +import { asPatchBounds, asVect2, asVect3 } from '../utils/convert' import { PatchKey } from '../utils/types' import { WorldEnv } from '../config/WorldEnv' import { ItemsInventory, ItemType, SpawnedItems } from '../factory/ItemsFactory' @@ -40,193 +32,330 @@ type ItemsLayerStub = { individualChunks: ChunkContainer[] } -export enum ItemsProcessMode { - NONE, - INDIVIDUAL, - MERGED, +const getPatchBounds = (boundsOrPatchKey: Box2 | PatchKey) => { + const patchBounds = + boundsOrPatchKey instanceof Box2 + ? boundsOrPatchKey.clone() + : asPatchBounds(boundsOrPatchKey, WorldEnv.current.patchDimensions) + // this.bounds = asBox3(patchBounds) + // this.patch.bounds = patchBounds + // if (typeof boundsOrPatchKey === 'string') { + // this.patchKey = boundsOrPatchKey + // } + return patchBounds } -export type ItemsProcessingParams = { - mode: ItemsProcessMode +// takes +const adjustHeight = async (itemChunk: ChunkContainer) => { + const chunkBottomBlocks: Vector3[] = [] + // iter slice blocks + for (const heightBuff of itemChunk.iterChunkSlice()) { + if (heightBuff.data[0]) chunkBottomBlocks.push(asVect3(heightBuff.pos, 0)) + } + // compute blocks batch to find lowest element + const blocksBatch = new BlocksProcessing(chunkBottomBlocks) + const batchRes = await blocksBatch.process() + const [lowestBlock] = batchRes.sort((b1, b2) => b1.data.level - b2.data.level) + const lowestHeight = lowestBlock?.data.level || 0 + const heightOffset = itemChunk.bounds.min.y - lowestHeight + // adjust chunk elevation according to lowest element + itemChunk.bounds.translate(new Vector3(0, -heightOffset, 0)) } -const defaultProcessingParams: ItemsProcessingParams = { - mode: ItemsProcessMode.INDIVIDUAL, +/** + * retrieveOvergroundItems + * needs: patchBounds (externally provided) + * provides: spawned items + */ +export const retrieveOvergroundItems = (patchBounds: Box2) => { + const groundPatch = new GroundPatch(patchBounds) + groundPatch.preprocess() + + // take approximative item dimension until item type is known + const spawnedItems: Record = {} + const spawnPlaces = defaultSpawnMap.querySpawnLocations( + patchBounds, + asVect2(defaultItemDims), + ) + for (const pos of spawnPlaces) { + // console.log(pos) + const { level, biome, landId } = groundPatch.computeGroundBlock( + asVect3(pos), + ) + const { floraItems } = + Biome.instance.getBiomeLandConf(biome, landId as string) || {} + if (floraItems && floraItems?.length > 0) { + const itemType = defaultSpawnMap.getSpawnedItem( + pos, + floraItems, + ) as ItemType + if (itemType) { + spawnedItems[itemType] = spawnedItems[itemType] || [] + spawnedItems[itemType]?.push(asVect3(pos, level)) + } + } + } + return spawnedItems } /** - * Process all items found in given patch - * rename as ItemsProcessing? + * BakeIndividualChunks + * needs: spawned items + * provides: individual chunks */ -export class ItemsChunkLayer extends ProcessingTask { - bounds: Box3 - patch: PatchStub = { - bounds: new Box2(), +export const bakeIndividualChunks = async (spawnedItems: SpawnedItems) => { + // request all items belonging to this patch + const individualChunks = [] + let ymin = NaN + let ymax = NaN // compute y range + for await (const [itemType, spawnPlaces] of Object.entries(spawnedItems)) { + for await (const spawnOrigin of spawnPlaces) { + const itemChunk = await ItemsInventory.getInstancedChunk( + itemType, + spawnOrigin, + ) + if (itemChunk) { + // ChunkContainer.copySourceToTarget(itemChunk, this) + const { min, max } = itemChunk.bounds + ymin = isNaN(ymin) ? min.y : Math.min(ymin, min.y) + ymax = isNaN(ymax) ? max.y : Math.max(ymax, max.y) + await adjustHeight(itemChunk) + individualChunks.push(itemChunk) + } + } } + // const itemsRange = { + // ymin, + // ymax + // } + // this.bounds.min.y = ymin + // this.bounds.max.y = ymax + return individualChunks +} - spawnedItems: SpawnedItems = {} - individualChunks: ChunkContainer[] = [] +/** + * needs: individual items + * provides: merged items chunk + */ +export const mergeIndividualChunks = (individualChunks: ChunkContainer[]) => { + const mergeChunkBounds = new Box3() + for (const itemChunk of individualChunks) { + mergeChunkBounds.union(itemChunk?.bounds) + } + const mergeChunk = new ChunkContainer(mergeChunkBounds, 1) + for (const itemChunk of individualChunks) { + ChunkContainer.copySourceToTarget(itemChunk, mergeChunk) + } + return mergeChunk +} - constructor(boundsOrPatchKey: Box2 | PatchKey) { - super() - const patchBounds = - boundsOrPatchKey instanceof Box2 - ? boundsOrPatchKey.clone() - : asPatchBounds(boundsOrPatchKey, WorldEnv.current.patchDimensions) - this.bounds = asBox3(patchBounds) - this.patch.bounds = patchBounds - if (typeof boundsOrPatchKey === 'string') { - this.patchKey = boundsOrPatchKey +/** + * needs: spawned items + requested pos (externally provided) + * provides: items blocks at requested pos + * + * note: several spawned overlapping objects may be found at queried position + */ +export const queryIndividualPos = async ( + spawnedItems: SpawnedItems, + requestedPos: Vector3, +) => { + const mergeBuffer: number[] = [] + for await (const [itemType, spawnPlaces] of Object.entries(spawnedItems)) { + for await (const spawnOrigin of spawnPlaces) { + const templateChunk = await ItemsInventory.getTemplateChunk(itemType) + const shallowInstance = await ItemsInventory.getInstancedChunk( + itemType, + spawnOrigin, + ) + + if (templateChunk && shallowInstance) { + const localPos = shallowInstance.toLocalPos(requestedPos) + const yOffset = requestedPos.y - spawnOrigin.y + const sliceSectorData = templateChunk.readBuffer(asVect2(localPos)) + const sourceOffset = Math.max(yOffset, 0) + const targetOffset = -Math.min(yOffset, 0) + sliceSectorData.slice(sourceOffset).forEach((val, i) => { + const index = i + targetOffset + while (mergeBuffer.length <= index) mergeBuffer.push(0) + mergeBuffer[i + targetOffset] = val + }) + // const sliceSectors = templateChunk.iterChunkSlice(location) + // for (const sliceSector of sliceSectors) { + // sliceSectorData = sliceSector.data + // } + } } } + return mergeBuffer +} - get patchKey() { - return this.patch.key || '' - } +/** + * Recipes + * - spawnedItems + * - individualChunks + * - mergedItemsChunk + * - pointQuery + */ - set patchKey(patchKey: string) { - this.patch.key = patchKey - this.patch.id = parsePatchKey(patchKey) as Vector2 +export enum ItemsProcessingRecipes { + SpawnedItems = 'SpawnedItems', + IndividualChunks = 'IndividualChunks', + MergeIndividualChunks = 'MergeIndividualChunks', + IsolatedPointQuery = 'IsolatedPointQuery', +} + +const bakeRecipe = async ( + recipe: ItemsProcessingRecipes, + boundsOrPatchKey: Box2 | PatchKey, + requestedPos?: Vector3, +) => { + const patchBounds = getPatchBounds(boundsOrPatchKey) + const spawnedItems = retrieveOvergroundItems(patchBounds) + if (recipe === ItemsProcessingRecipes.SpawnedItems) { + return spawnedItems + } else if ( + recipe === ItemsProcessingRecipes.IsolatedPointQuery && + requestedPos + ) { + return await queryIndividualPos(spawnedItems, requestedPos) + } else { + const individualChunks = await bakeIndividualChunks(spawnedItems) + if (recipe === ItemsProcessingRecipes.IndividualChunks) { + return individualChunks + } else { + const mergedItems = await mergeIndividualChunks(individualChunks) + return mergedItems + } } +} - get patchId() { - return this.patch.id +export type ItemsProcessingParams = { + recipe: ItemsProcessingRecipes +} + +const defaultProcessingParams: ItemsProcessingParams = { + recipe: ItemsProcessingRecipes.IndividualChunks, +} + +const noParser = (stubs: any) => stubs +const chunkStubParser = (chunkStub: ChunkStub) => + new ChunkContainer().fromStub(chunkStub) +const chunkStubsParser = (stubs: ChunkStub[]) => stubs.map(chunkStubParser) + +const stubsParsers: Record any> = { + [ItemsProcessingRecipes.SpawnedItems]: noParser, + [ItemsProcessingRecipes.IndividualChunks]: chunkStubsParser, + [ItemsProcessingRecipes.MergeIndividualChunks]: chunkStubParser, + [ItemsProcessingRecipes.IsolatedPointQuery]: noParser, +} + +/** + * Process all items found in given patch + * rename as ItemsProcessing? + */ +export class ItemsBaker extends ProcessingTask { + // bounds: Box3 + // patch: PatchStub = { + // bounds: new Box2(), + // } + mandatoryInputs: any[] + optionalInputs: any[] + // spawnedItems: SpawnedItems = {} + // individualChunks: ChunkContainer[] = [] + + constructor(boundsOrPatchKey: Box2 | PatchKey, requestedPos?: Vector3) { + super() + this.mandatoryInputs = [boundsOrPatchKey] + this.optionalInputs = requestedPos ? [requestedPos] : [] + // const patchBounds = + // boundsOrPatchKey instanceof Box2 + // ? boundsOrPatchKey.clone() + // : asPatchBounds(boundsOrPatchKey, WorldEnv.current.patchDimensions) + // this.bounds = asBox3(patchBounds) + // this.patch.bounds = patchBounds + // if (typeof boundsOrPatchKey === 'string') { + // this.patchKey = boundsOrPatchKey + // } } + // get patchKey() { + // return this.patch.key || '' + // } + + // set patchKey(patchKey: string) { + // this.patch.key = patchKey + // this.patch.id = parsePatchKey(patchKey) as Vector2 + // } + + // get patchId() { + // return this.patch.id + // } + override get inputs() { - return [this.patch.key || this.patchBounds] + return [...this.mandatoryInputs, ...this.optionalInputs] } override reconcile(stubs: ItemsLayerStub) { - const { spawnedItems, individualChunks } = stubs - // fill object from worker's data - this.spawnedItems = spawnedItems - this.individualChunks = individualChunks || this.individualChunks + // const { spawnedItems, individualChunks } = stubs + // // fill object from worker's data + // this.spawnedItems = spawnedItems + // this.individualChunks = individualChunks || this.individualChunks + + // parse stubs + const { recipe } = this.processingParams as ItemsProcessingParams + const stubsParser = stubsParsers[recipe] + return stubsParser(stubs) } override async process(processingParams = defaultProcessingParams) { - const { mode } = processingParams - this.retrieveOvergroundItems() - switch (mode) { - case ItemsProcessMode.INDIVIDUAL: - await this.bakeIndividualChunks() - break - case ItemsProcessMode.MERGED: - await this.bakeIndividualChunks() - await this.mergeIndividualChunks() - break - default: - } + const { recipe } = processingParams + const [input] = this.mandatoryInputs + return bakeRecipe(recipe, input) } - toStub() { - const { spawnedItems } = this - // return { spawnedItems, individualChunks } - return { spawnedItems } + async bakeIndividualChunks() { + const [mandatory] = this.mandatoryInputs + return (await bakeRecipe( + ItemsProcessingRecipes.IndividualChunks, + mandatory, + )) as ChunkContainer[] } - get patchBounds() { - return asBox2(this.bounds) + async mergeIndividualChunks() { + const [mandatory] = this.mandatoryInputs + return (await bakeRecipe( + ItemsProcessingRecipes.MergeIndividualChunks, + mandatory, + )) as ChunkContainer } - get spawnedLocs() { - const spawnedLocs = [] - for (const [, spawnPlaces] of Object.entries(this.spawnedItems)) { - spawnedLocs.push(...spawnPlaces) - } - return spawnedLocs + async queryIsolatedPoint() { + const [mandatory] = this.mandatoryInputs + const [optional] = this.optionalInputs + return (await bakeRecipe( + ItemsProcessingRecipes.IsolatedPointQuery, + mandatory, + optional, + )) as number[] } - retrieveOvergroundItems() { - const groundPatch = new GroundPatch(this.patchBounds) - groundPatch.preprocess() - - const spawnedItems: Record = {} - const spawnPlaces = defaultSpawnMap.querySpawnLocations( - this.patchBounds, - asVect2(defaultItemDims), - ) - for (const pos of spawnPlaces) { - const { level, biome, landscapeIndex } = groundPatch.computeGroundBlock( - asVect3(pos), - ) - const weightedItems = - Biome.instance.mappings[biome]?.nth(landscapeIndex)?.data?.flora - if (weightedItems) { - const spawnableTypes: ItemType[] = [] - Object.entries(weightedItems).forEach(([itemType, spawnWeight]) => { - while (spawnWeight > 0) { - spawnableTypes.push(itemType) - spawnWeight-- - } - }) - const itemType = defaultSpawnMap.getSpawnedItem( - pos, - spawnableTypes, - ) as ItemType - if (itemType) { - spawnedItems[itemType] = spawnedItems[itemType] || [] - spawnedItems[itemType]?.push(asVect3(pos, level)) - } - } - } - this.spawnedItems = spawnedItems - } + // toStub() { + // const { spawnedItems } = this + // // return { spawnedItems, individualChunks } + // return { spawnedItems } + // } - async bakeIndividualChunks() { - // request all items belonging to this patch - const individualChunks = [] - let ymin = NaN - let ymax = NaN // compute y range - for await (const [itemType, spawnPlaces] of Object.entries( - this.spawnedItems, - )) { - for await (const spawnOrigin of spawnPlaces) { - const itemChunk = await ItemsInventory.getInstancedChunk( - itemType, - spawnOrigin, - ) - if (itemChunk) { - // ChunkContainer.copySourceToTarget(itemChunk, this) - const { min, max } = itemChunk.bounds - ymin = isNaN(ymin) ? min.y : Math.min(ymin, min.y) - ymax = isNaN(ymax) ? max.y : Math.max(ymax, max.y) - const chunkBottomBlocks: Vector2[] = [] - // iter slice blocks - for (const heightBuff of itemChunk.iterChunkSlice()) { - if (heightBuff.data[0]) chunkBottomBlocks.push(heightBuff.pos) - } - // compute blocks batch to find lowest element - const blocksBatch = new BlocksBatch(chunkBottomBlocks) - await blocksBatch.process() - const [lowestBlock] = blocksBatch.output.sort( - (b1, b2) => b1.data.level - b2.data.level, - ) - const lowestLevel = lowestBlock?.data.level || 0 - const yOffset = itemChunk.bounds.min.y - lowestLevel - const offset = new Vector3(0, -yOffset, 0) - // adjust chunk elevation according to lowest element - itemChunk.bounds.translate(offset) - individualChunks.push(itemChunk) - } - } - } - this.bounds.min.y = ymin - this.bounds.max.y = ymax - this.individualChunks = individualChunks - } + // get patchBounds() { + // return asBox2(this.bounds) + // } - mergeIndividualChunks() { - const mergeChunkBounds = new Box3() - for (const itemChunk of this.individualChunks) { - mergeChunkBounds.union(itemChunk?.bounds) - } - const mergeChunk = new ChunkContainer(mergeChunkBounds, 1) - for (const itemChunk of this.individualChunks) { - ChunkContainer.copySourceToTarget(itemChunk, mergeChunk) - } - return mergeChunk - } + // get spawnedLocs() { + // const spawnedLocs = [] + // for (const [, spawnPlaces] of Object.entries(this.spawnedItems)) { + // spawnedLocs.push(...spawnPlaces) + // } + // return spawnedLocs + // } } -ProcessingTask.registeredObjects[ItemsChunkLayer.name] = ItemsChunkLayer +ProcessingTask.registeredObjects[ItemsBaker.name] = ItemsBaker diff --git a/src/processing/TaskProcessing.ts b/src/processing/TaskProcessing.ts index e837ee8..56edf30 100644 --- a/src/processing/TaskProcessing.ts +++ b/src/processing/TaskProcessing.ts @@ -1,16 +1,21 @@ import workerpool from 'workerpool' -import { WorldEnv, WorldUtils } from '../index' +import { WorldEnv } from '../index' +import { parseThreeStub } from '../utils/convert' const toStubs = (res: any) => - res instanceof Array ? res.map(item => item.toStub()) : res.toStub?.() || res + res instanceof Array + ? res.map(item => item.toStub?.() || item) + : res.toStub?.() || res const parseArgs = (...rawArgs: any) => { - // const args = rawArgs.map((arg: any) => - const args = - rawArgs instanceof Array - ? rawArgs.map(arg => WorldUtils.convert.parseThreeStub(arg)) - : WorldUtils.convert.parseThreeStub(rawArgs) + const args = rawArgs.map((rawArg: any) => { + const arg = + rawArg instanceof Array + ? rawArg.map(item => parseThreeStub(item)) + : parseThreeStub(rawArg) + return arg + }) return args } @@ -33,9 +38,14 @@ type ProcessingTasksIndex = Record ProcessingTask> */ export class ProcessingTask { static registeredObjects: ProcessingTasksIndex = {} - static workerPool: any processingState: ProcessingState = ProcessingState.Waiting + processingParams: any = {} + result: any + deferredPromise + resolveDeferredPromise: any + scheduled = false + // pendingTask: any // static instances: ProcessingTask[] = [] @@ -91,6 +101,13 @@ export class ProcessingTask { } } + constructor() { + const deferredPromise = new Promise(resolve => { + this.resolveDeferredPromise = resolve + }) + this.deferredPromise = deferredPromise + } + get awaitingProcessing() { return ( this.processingState !== ProcessingState.Done && @@ -98,13 +115,20 @@ export class ProcessingTask { ) } + // getDeferredPromise = () => { + // this.deferredPromise = this.deferredPromise || new Promise(resolve => { + // this.resolveDeferredPromise = resolve + // }) + // return this.deferredPromise + // } + /** * pass object's creation parameters to worker for replication * @param processingParams * @param processingUnit */ async delegate( - processingParams = {}, + processingParams = this.processingParams, processingUnit = ProcessingTask.workerPool, ) { if (this.processingState === ProcessingState.Done) return undefined @@ -121,16 +145,34 @@ export class ProcessingTask { // throw e }) const stubs = await pendingTask - const output = stubs ? this.reconcile(stubs) : null + const taskRes = stubs ? this.reconcile(stubs) : null + this.result = taskRes this.processingState = this.processingState === ProcessingState.Pending ? ProcessingState.Done : this.processingState // this.pendingTask = null - return output // this.reconcile(stubs) + // this.onTaskProcessed(taskRes) + this.resolveDeferredPromise(taskRes) + return taskRes // this.reconcile(stubs) } } + deferProcessing(delay = 0, onDeferredStart?: any) { + if (!this.scheduled) { + this.scheduled = true + // promise that will resolve when task processing begin + return new Promise(resolve => { + setTimeout(() => { + this.delegate() + onDeferredStart?.() + resolve(this) + }, delay) + }) + } + return null + } + // cancelPendingTask() { // if (!this.pendingTask) { // console.warn(`no pending task running`) @@ -172,6 +214,10 @@ export class ProcessingTask { console.log(processingParams) } + // onTaskProcessed(taskRes: any) { + + // } + // toStub(): any { // const { stubs } = this // stubs instanceof Array diff --git a/src/procgen/Biome.ts b/src/procgen/Biome.ts index 4c1d3da..c3ef354 100644 --- a/src/procgen/Biome.ts +++ b/src/procgen/Biome.ts @@ -4,13 +4,21 @@ import { smoothstep } from 'three/src/math/MathUtils' import { LinkedList } from '../datacontainers/LinkedList' import { - BiomeLandscapeKey, + BiomeLandKey, BiomesConf, BiomesRawConf, - LandscapeFields, - LandscapesConf, + LandConfigFields, + BiomeLands, } from '../utils/types' -import { WorldEnv, WorldUtils } from '../index' +import { WorldEnv } from '../index' +import { clamp, roundToDec } from '../utils/math' +import { + findMatchingRange, + MappingRangeSorter, + typesNumbering, +} from '../utils/misc' +import { asVect3 } from '../utils/convert' +import { ItemType } from '../factory/ItemsFactory' import { ProcLayer } from './ProcLayer' @@ -86,7 +94,7 @@ export const BiomeNumericType: Record = { [BiomeType.Grassland]: 0, } -WorldUtils.misc.typesNumbering(BiomeNumericType) +typesNumbering(BiomeNumericType) export const ReverseBiomeNumericType: Record = {} Object.keys(BiomeNumericType).forEach( @@ -128,6 +136,29 @@ const BiomesMapping: Record> = { }, } +/** + * weightedFloraTypes: weighted item types which are supposed to spawn + * at given location + */ +const expandWeightedFloraTypes = ( + weightedFloraTypes: Record, +) => { + const floraTypes: ItemType[] = [] + if (weightedFloraTypes) { + Object.entries(weightedFloraTypes).forEach(([itemType, typeWeight]) => { + while (typeWeight > 0) { + floraTypes.push(itemType) + typeWeight-- + } + }) + } + return floraTypes +} + +type PreprocessedLandConf = { + floraItems: ItemType[] +} + /** * assign block types: water, sand, grass, mud, rock, snow, .. */ @@ -159,7 +190,7 @@ export class Biome { high: 0.7, } - indexedConf = new Map() + preprocessed = new Map() constructor() { this.heatmap = new ProcLayer('heatmap') @@ -270,7 +301,7 @@ export class Biome { }) Object.keys(biomeContribs).forEach( k => - (biomeContribs[k as BiomeType] = WorldUtils.math.roundToDec( + (biomeContribs[k as BiomeType] = roundToDec( biomeContribs[k as BiomeType], 2, )), @@ -282,70 +313,80 @@ export class Biome { return biomeContribs } + preprocessLandConfig( + biomeType: BiomeType, + biomeConfig: LinkedList, + ) { + const configs = biomeConfig.first().forwardIter() + for (const conf of configs) { + const landConf = conf.data + const confKey = biomeType + '_' + landConf.key + // console.log(confKey) + const floraItems = landConf.flora + ? expandWeightedFloraTypes(landConf.flora) + : [] + this.preprocessed.set(confKey, { + floraItems, + }) + // this.indexedConf.set(conf.data.key, conf) + } + } + parseBiomesConfig(biomesRawConf: BiomesRawConf) { - // Object.entries(biomeConfigs).forEach(([biomeType, biomeConf]) => { // complete missing data - for (const [biomeType, biomeConf] of Object.entries(biomesRawConf)) { - // for (const [landId, landConf] of Object.entries(biomeConf)) { - // landConf.key = biomeType + '_' + landId - // } - - const configItems = Object.values(biomeConf) as LandscapeFields[] + for (const [biomeType, biomeLands] of Object.entries(biomesRawConf)) { + for (const [landId, landConf] of Object.entries(biomeLands)) { + landConf.key = landId + } + const configItems = Object.values(biomeLands) as LandConfigFields[] const mappingRanges = LinkedList.fromArrayAfterSorting( configItems, - WorldUtils.misc.MappingRangeSorter, + MappingRangeSorter, ) this.mappings[biomeType as BiomeType] = mappingRanges - // index configs - // const confIter = mappingRanges.first().forwardIter() - // for (const conf of confIter) { - // this.indexedConf.set(conf.data.key, conf) - // } + this.preprocessLandConfig(biomeType as BiomeType, mappingRanges) } - // }) } landscapeTransition = ( groundPos: Vector2, baseHeight: number, - landscapeConf: LandscapesConf, + biomeLands: BiomeLands, ) => { const period = 0.005 * Math.pow(2, 2) const mapCoords = groundPos.clone().multiplyScalar(period) - const posRandomizerVal = this.posRandomizer.eval( - WorldUtils.convert.asVect3(mapCoords), - ) + const posRandomizerVal = this.posRandomizer.eval(asVect3(mapCoords)) // add some height variations to break painting monotony - const { amplitude }: any = landscapeConf.data + const { amplitude }: any = biomeLands.data const bounds = { - lower: landscapeConf.data.x, - upper: landscapeConf.next?.data.x || 1, + lower: biomeLands.data.x, + upper: biomeLands.next?.data.x || 1, } let blockType // randomize on lower side if ( - landscapeConf.prev && + biomeLands.prev && baseHeight - bounds.lower <= bounds.upper - baseHeight && baseHeight - amplitude.low < bounds.lower ) { const heightVariation = posRandomizerVal * amplitude.low const varyingHeight = baseHeight - heightVariation blockType = - varyingHeight < landscapeConf.data.x - ? landscapeConf.prev?.data.type - : landscapeConf.data.type + varyingHeight < biomeLands.data.x + ? biomeLands.prev?.data.type + : biomeLands.data.type } // randomize on upper side - else if (landscapeConf.next && baseHeight + amplitude.high > bounds.upper) { + else if (biomeLands.next && baseHeight + amplitude.high > bounds.upper) { // let heightVariation = // Utils.clamp(this.paintingRandomness.eval(groundPos), 0.5, 1) * randomness.high // heightVariation = heightVariation > 0 ? (heightVariation - 0.5) * 2 : 0 const heightVariation = posRandomizerVal * amplitude.high const varyingHeight = baseHeight + heightVariation blockType = - varyingHeight > landscapeConf.next.data.x - ? landscapeConf.next.data.type - : landscapeConf.data.type + varyingHeight > biomeLands.next.data.x + ? biomeLands.next.data.type + : biomeLands.data.type } return blockType } @@ -357,12 +398,9 @@ export class Biome { ) => { const { seaLevel } = WorldEnv.current.biomes rawVal = includeSea ? Math.max(rawVal, seaLevel) : rawVal - rawVal = WorldUtils.math.clamp(rawVal, 0, 1) + rawVal = clamp(rawVal, 0, 1) const firstItem = this.mappings[biomeType] - const confId = WorldUtils.misc.findMatchingRange( - rawVal as number, - firstItem, - ) + const confId = findMatchingRange(rawVal as number, firstItem) const current = firstItem.nth(confId) const upper = current?.next || current const min = new Vector2(current.data.x, current.data.y) @@ -385,12 +423,15 @@ export class Biome { return blockLevel } + getBiomeLandConf = (biomeType: BiomeType, landId: string) => { + const confKey = biomeType + '_' + landId + const biomeConf = this.preprocessed.get(confKey) + return biomeConf + } + getBiomeConf = (rawVal: number, biomeType: BiomeType) => { const firstItem = this.mappings[biomeType] - const confId = WorldUtils.misc.findMatchingRange( - rawVal as number, - firstItem, - ) + const confId = findMatchingRange(rawVal as number, firstItem) let currentItem = firstItem.nth(confId) while (!currentItem?.data.type && currentItem?.prev) { currentItem = currentItem.prev diff --git a/src/utils/convert.ts b/src/utils/convert.ts index c7f2e1b..3fe2bc3 100644 --- a/src/utils/convert.ts +++ b/src/utils/convert.ts @@ -117,6 +117,16 @@ const asPatchBounds = (patchKey: string, patchDims: Vector2) => { return bbox } +const getScalarId = (scalarValue: number, size: number) => { + const scalarId = Math.floor(scalarValue / size) + return scalarId +} + +const getUpperScalarId = (scalarValue: number, size: number) => { + const scalarId = Math.ceil(scalarValue / size) + return scalarId +} + const getPatchId = (position: Vector2, patchSize: Vector2) => { const patchId = position.clone().divide(patchSize).floor() return patchId @@ -191,6 +201,8 @@ export { asBox2, asBox3, parsePatchKey, + getScalarId, + getUpperScalarId, getPatchId, patchUpperId, serializePatchId, diff --git a/src/utils/misc.ts b/src/utils/misc.ts index 39a6896..6777c7a 100644 --- a/src/utils/misc.ts +++ b/src/utils/misc.ts @@ -1,9 +1,9 @@ -import { LandscapeFields, LandscapesConf } from './types' +import { BiomeLands, LandConfigFields } from './types' // const MappingRangeFinder = (item: LinkedList, inputVal: number) => item.next && inputVal > (item.next.data as MappingData).x export const MappingRangeSorter = ( - item1: LandscapeFields, - item2: LandscapeFields, + item1: LandConfigFields, + item2: LandConfigFields, ) => item1.x - item2.x /** @@ -13,7 +13,7 @@ export const MappingRangeSorter = ( */ export const findMatchingRange = ( inputVal: number, - noiseMappings: LandscapesConf, + noiseMappings: BiomeLands, ) => { let match = noiseMappings.first() let i = 1 diff --git a/src/utils/types.ts b/src/utils/types.ts index c80bd2b..c0a9268 100644 --- a/src/utils/types.ts +++ b/src/utils/types.ts @@ -138,6 +138,9 @@ export type PatchId = Vector2 export type ChunkKey = string export type ChunkId = Vector3 +export type PatchIndex = Record +export type ChunkIndex = Record + // export enum TerrainType { // SEA, // BEACH, @@ -149,11 +152,11 @@ export type ChunkId = Vector3 // MOUNTAINS_TOP, // } -export type LandscapeId = string // landscape id assigned to noise level -export type BiomeLandscapeKey = string // combination of biomeType and LandscapeId +export type LandConfigId = string // landscape id assigned to noise level +export type BiomeLandKey = string // combination of BiomeType and LandId -export type LandscapeFields = { - key: BiomeLandscapeKey +export type LandConfigFields = { + key: BiomeLandKey x: number // noise value y: number // height noise mapping type: BlockType // ground surface @@ -165,7 +168,7 @@ export type LandscapeFields = { } // Biome landscapes mappings -export type LandscapesRawConf = Record> -export type BiomesRawConf = Record -export type LandscapesConf = LinkedList -export type BiomesConf = Record +export type BiomeLandsRawConf = Record> +export type BiomesRawConf = Record +export type BiomeLands = LinkedList +export type BiomesConf = Record