Fixed more failing tests to match nested sets

This commit is contained in:
ThaumRystra
2023-09-28 20:57:35 +02:00
parent 60c13643fb
commit 09c66aff0b
19 changed files with 198 additions and 78 deletions

View File

@@ -1,4 +1,3 @@
import findAncestorByType from '/imports/api/engine/computation/utility/findAncestorByType.js';
import { traverse } from '/imports/parser/resolve.js';
export default function linkCalculationDependencies(dependencyGraph, prop, { propsById }) {
@@ -57,3 +56,14 @@ function getAncestorProp(type, memo, prop, propsById) {
return ancestorProp;
}
}
function findAncestorByType(prop, type, propsById) {
if (!prop || !prop.parentId) return;
let parentProp = prop;
while (parentProp) {
parentProp = propsById[parentProp.parentId];
if (parentProp?.type === type) {
return parentProp;
}
}
}

View File

@@ -13,7 +13,8 @@ var testProperties = [
clean({
_id: 'slotId',
type: 'propertySlot',
ancestors: [{ id: 'charId' }],
left: 1,
right: 8,
}),
// Children
clean({
@@ -21,16 +22,22 @@ var testProperties = [
type: 'folder',
slotQuantityFilled: 3,
slotFillerType: 'item',
ancestors: [{ id: 'charId' }, { id: 'slotId' }],
parentId: 'slotId',
left: 2,
right: 3,
}),
clean({
_id: 'slotChildId',
type: 'item',
ancestors: [{ id: 'charId' }, { id: 'slotId' }],
parentId: 'slotId',
left: 4,
right: 7,
}),
clean({
_id: 'slotGrandchildId',
type: 'effect',
ancestors: [{ id: 'charId' }, { id: 'slotId' }, { id: 'slotChildId' }],
parentId: 'slotChildId',
left: 5,
right: 6,
}),
];

View File

@@ -2,7 +2,7 @@ import { buildComputationFromProps } from '/imports/api/engine/computation/build
import { assert } from 'chai';
import clean from '../../utility/cleanProp.testFn.js';
export default function(){
export default function () {
const computation = buildComputationFromProps(testProperties);
const hasLink = computation.dependencyGraph.hasLink;
const prop = (id) => computation.propsById[id];
@@ -32,7 +32,8 @@ var testProperties = [
clean({
_id: 'spellListId',
type: 'spellList',
ancestors: [{id: 'charId'}],
left: 1,
right: 6,
}),
clean({
_id: 'childId',
@@ -40,7 +41,9 @@ var testProperties = [
description: {
text: 'DC {#spellList.dc} save or suck'
},
ancestors: [{id: 'charId'}, {id: 'spellListId'}],
parentId: 'spellListId',
left: 2,
right: 5,
}),
clean({
_id: 'grandchildId',
@@ -48,7 +51,9 @@ var testProperties = [
dc: {
calculation: '#spellList.dc + strength + wisdom.modifier'
},
ancestors: [{id: 'charId'}, {id: 'spellListId'}, {id: 'childId'}],
parentId: 'childId',
left: 3,
right: 4,
}),
clean({
_id: 'strengthId',
@@ -57,6 +62,7 @@ var testProperties = [
baseValue: {
calculation: '15 + ',
},
ancestors: [{id: 'charId'}],
left: 7,
right: 8,
}),
];

View File

@@ -1,8 +1,9 @@
import { buildComputationFromProps } from '/imports/api/engine/computation/buildCreatureComputation.js';
import { assert } from 'chai';
import clean from '../../utility/cleanProp.testFn.js';
import { applyNestedSetProperties } from '/imports/api/parenting/parentingFunctions';
export default function(){
export default function () {
const computation = buildComputationFromProps(testProperties);
const hasLink = computation.dependencyGraph.hasLink;
@@ -62,28 +63,28 @@ var testProperties = [
type: 'item',
equipped: true,
attuned: true,
ancestors: [{id: 'charId'}],
}),
clean({
_id: 'containerId',
type: 'container',
carried: true,
ancestors: [{id: 'charId'}],
}),
clean({
_id: 'childContainerId',
type: 'container',
carried: true,
ancestors: [{id: 'charId'}, {id: 'containerId'}],
}),
clean({
_id: 'childItemId',
type: 'item',
ancestors: [{id: 'charId'}, {id: 'containerId'}],
parentId: 'containerId',
}),
clean({
_id: 'grandchildItemId',
type: 'item',
ancestors: [{id: 'charId'}, {id: 'containerId'}, {id: 'childContainerId'}],
parentId: 'childContainerId',
}),
clean({
_id: 'childItemId',
type: 'item',
parentId: 'containerId',
}),
];
applyNestedSetProperties(testProperties);

View File

@@ -32,9 +32,7 @@ function aggregateCalculationEffects(node, computation) {
operation: linkedNode.data.operation,
amount: linkedNode.data.amount && {
value: linkedNode.data.amount.value,
//parseNode: linkedNode.data.amount.parseNode,
},
// ancestors: linkedNode.data.ancestors,
});
},
true // enumerate only outbound links

View File

@@ -1,5 +1,5 @@
export default function aggregateDefinition({node, linkedNode, link}){
export default function aggregateDefinition({ node, linkedNode, link }) {
// Look at all definition links
if (link.data !== 'definition') return;
@@ -12,7 +12,7 @@ export default function aggregateDefinition({node, linkedNode, link}){
!definingProp ||
prop.type !== 'pointBuyRow' && (
definingProp.type === 'pointBuyRow' ||
prop.order > definingProp.order
prop.left > definingProp.left
)
) {
// override the current defining prop
@@ -50,12 +50,12 @@ export default function aggregateDefinition({node, linkedNode, link}){
type: prop.type,
});
}
if (node.data.baseValue === undefined || propBaseValue > node.data.baseValue){
if (node.data.baseValue === undefined || propBaseValue > node.data.baseValue) {
node.data.baseValue = propBaseValue;
}
}
function overrideProp(prop, node){
function overrideProp(prop, node) {
if (!prop) return;
prop.overridden = true;
if (!node.data.overriddenProps) node.data.overriddenProps = [];

View File

@@ -39,7 +39,6 @@ export default function aggregateEffect({ node, linkedNode, link }) {
amount: effectAmount,
type: linkedNode.data.type,
text: linkedNode.data.text,
// ancestors: linkedNode.data.ancestors,
});
// get a shorter reference to the aggregator document

View File

@@ -10,7 +10,7 @@ export default function aggregateEventDefinition({ node, linkedNode, link }) {
// Find the last defining event
if (
!definingEvent ||
prop.order > definingEvent.order
prop.left > definingEvent.left
) {
// override the current defining prop
if (definingEvent) definingEvent.overridden = true;

View File

@@ -3,7 +3,7 @@ import { assert } from 'chai';
import computeCreatureComputation from '../../computeCreatureComputation.js';
import clean from '../../utility/cleanProp.testFn.js';
export default function(){
export default function () {
const computation = buildComputationFromProps(testProperties);
computeCreatureComputation(computation);
@@ -33,7 +33,6 @@ var testProperties = [
clean({
_id: 'actionId',
type: 'action',
ancestors: [{id: 'charId'}],
summary: {
text: 'test summary {1 + 2} without referencing anything {3 + 4}',
},
@@ -61,14 +60,17 @@ var testProperties = [
calculation: 'nonExistantProperty + 7',
},
usesUsed: 5,
left: 1,
right: 2,
}),
clean({
_id: 'rolledDescriptionId',
type: 'action',
ancestors: [{id: 'charId'}],
summary: {
text: 'test roll gets compiled {4 + (2 + 2)} properly',
},
left: 3,
right: 4,
}),
clean({
_id: 'numItemsConumedId',
@@ -77,6 +79,8 @@ var testProperties = [
baseValue: {
calculation: '3',
},
left: 5,
right: 6,
}),
clean({
_id: 'numResourceConumedId',
@@ -85,6 +89,8 @@ var testProperties = [
baseValue: {
calculation: '4',
},
left: 7,
right: 8,
}),
clean({
_id: 'resourceVarId',
@@ -94,6 +100,8 @@ var testProperties = [
baseValue: {
calculation: '9',
},
left: 9,
right: 10,
}),
clean({
_id: 'inlineRefResourceId',
@@ -102,6 +110,8 @@ var testProperties = [
baseValue: {
calculation: '1 + 5',
},
left: 11,
right: 12,
}),
clean({
_id: 'arrowId',
@@ -110,5 +120,7 @@ var testProperties = [
quantity: 27,
icon: 'itemIcon',
color: 'itemColor',
left: 13,
right: 14,
}),
];

View File

@@ -3,7 +3,7 @@ import { assert } from 'chai';
import computeCreatureComputation from '../../computeCreatureComputation.js';
import clean from '../../utility/cleanProp.testFn.js';
export default function(){
export default function () {
const computation = buildComputationFromProps(testProperties);
computeCreatureComputation(computation);
const prop = id => computation.propsById[id];
@@ -27,6 +27,8 @@ var testProperties = [
_id: 'emptyId',
type: 'attribute',
attributeType: 'ability',
left: 1,
right: 2,
}),
clean({
_id: 'noVariableNameId',
@@ -35,6 +37,8 @@ var testProperties = [
baseValue: {
calculation: '8'
},
left: 3,
right: 4,
}),
clean({
_id: 'strengthId',
@@ -44,6 +48,8 @@ var testProperties = [
baseValue: {
calculation: '12'
},
left: 5,
right: 6,
}),
clean({
_id: 'overriddenDexId',
@@ -54,6 +60,8 @@ var testProperties = [
baseValue: {
calculation: '15'
},
left: 7,
right: 8,
}),
clean({
_id: 'dexterityId',
@@ -64,6 +72,8 @@ var testProperties = [
baseValue: {
calculation: '15'
},
left: 9,
right: 10,
}),
clean({
_id: 'constitutionId',
@@ -73,6 +83,8 @@ var testProperties = [
baseValue: {
calculation: '21'
},
left: 11,
right: 12,
}),
clean({
_id: 'referencesDexId',
@@ -81,6 +93,8 @@ var testProperties = [
baseValue: {
calculation: 'dexterity.modifier + 2'
},
left: 13,
right: 14,
}),
clean({
_id: 'hitDiceId',
@@ -91,6 +105,8 @@ var testProperties = [
baseValue: {
calculation: '4'
},
left: 15,
right: 16,
}),
clean({
_id: 'parseErrorId',
@@ -100,5 +116,7 @@ var testProperties = [
baseValue: {
calculation: '12 +'
},
left: 17,
right: 18,
}),
];

View File

@@ -2,8 +2,9 @@ import { buildComputationFromProps } from '/imports/api/engine/computation/build
import { assert } from 'chai';
import computeCreatureComputation from '../../computeCreatureComputation.js';
import clean from '../../utility/cleanProp.testFn.js';
import { applyNestedSetProperties, compareOrder } from '/imports/api/parenting/parentingFunctions';
export default function(){
export default function () {
const computation = buildComputationFromProps(testProperties);
computeCreatureComputation(computation);
const prop = id => computation.propsById[id];
@@ -13,9 +14,8 @@ export default function(){
assert.equal(scope('valueEquipment'), 3);
assert.equal(scope('itemsAttuned'), 1);
assert.equal(prop('childContainerId').carriedWeight, 69);
assert.equal(prop('childContainerId').contentsWeight, 69);
assert.equal(prop('childContainerId').carriedWeight, 69, 'Calculates container carried weight correctly');
assert.equal(prop('childContainerId').contentsWeight, 69, 'Calculates container contents weight correctly');
assert.equal(scope('weightCarried'), 104);
assert.equal(scope('valueCarried'), 129);
@@ -32,7 +32,6 @@ var testProperties = [
attuned: true,
weight: 2,
value: 3,
ancestors: [{id: 'charId'}],
}),
clean({
_id: 'containerId',
@@ -40,22 +39,13 @@ var testProperties = [
carried: true,
weight: 5,
value: 7,
ancestors: [{id: 'charId'}],
}),
clean({
_id: 'childContainerId',
type: 'container',
carried: true,
weight: 11,
value: 13,
ancestors: [{id: 'charId'}, {id: 'containerId'}],
}),
clean({
_id: 'childItemId',
type: 'item',
weight: 17,
value: 19,
ancestors: [{id: 'charId'}, {id: 'containerId'}],
parentId: 'containerId',
}),
clean({
_id: 'grandchildItemId',
@@ -63,6 +53,16 @@ var testProperties = [
weight: 23, // 69 total
value: 29, // 87 total
quantity: 3,
ancestors: [{id: 'charId'}, {id: 'containerId'}, {id: 'childContainerId'}],
parentId: 'childContainerId',
}),
clean({
_id: 'childContainerId',
type: 'container',
carried: true,
weight: 11,
value: 13,
parentId: 'containerId',
}),
];
applyNestedSetProperties(testProperties);
testProperties.sort(compareOrder);

View File

@@ -2,9 +2,11 @@ import { buildComputationFromProps } from '/imports/api/engine/computation/build
import { assert } from 'chai';
import computeCreatureComputation from '../../computeCreatureComputation.js';
import clean from '../../utility/cleanProp.testFn.js';
import { applyNestedSetProperties, compareOrder } from '/imports/api/parenting/parentingFunctions';
export default function () {
const computation = buildComputationFromProps(testProperties);
const hasLink = computation.dependencyGraph.hasLink;
computeCreatureComputation(computation);
const prop = id => computation.propsById[id];
assert.equal(
@@ -15,6 +17,10 @@ export default function () {
prop('strengthId').modifier, -1,
'The proficiency bonus should not change the strength modifier'
);
assert.isTrue(
!!hasLink('actionId.attackRoll', 'tagTargetedProficiency'),
'There should be a link from the proficiency to the attack roll'
);
assert.exists(prop('actionId').attackRoll.proficiencies, 'The proficiency aggregator should be here')
assert.exists(prop('actionId').attackRoll.proficiencies[0], 'The proficiency should be here')
// attack roll = strength.mod + proficiencyBonus/2 rounded down
@@ -62,3 +68,5 @@ var testProperties = [
targetTags: ['martial weapon']
}),
];
applyNestedSetProperties(testProperties);
testProperties.sort(compareOrder);

View File

@@ -1,6 +1,9 @@
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
export default function cleanProp(prop) {
if (!prop.root) {
prop.root = { collection: 'creatures', id: 'testCreature' }
}
let schema = CreatureProperties.simpleSchema(prop);
return schema.clean(prop);
}

View File

@@ -1,10 +0,0 @@
export default function findAncestorByType(prop, type, propsById){
if (!prop || !prop.ancestors) return;
let ancestor;
for (let i = prop.ancestors.length - 1; i >= 0; i--){
ancestor = propsById[prop.ancestors[i].id];
if (ancestor && ancestor.type === type){
return ancestor;
}
}
}

View File

@@ -21,7 +21,6 @@ export default function writeScope(creatureId, computation) {
// Remove large properties that aren't likely to be accessed
delete scope[key].parent;
delete scope[key].ancestors;
// Remove empty keys
for (const subKey in scope[key]) {

View File

@@ -74,6 +74,25 @@ describe('Parenting with nested sets', function () {
op('dbm', 8, 9),
]);
});
it('Can recalculate left and right for docs with set parents in random order', function () {
const docArray = [
doc('MongoDB', 71, 33, 'Databases'),
doc('Programming', 72, 33, 'Books'),
doc('Languages', 73, 33, 'Programming'),
doc('Books', 74, 33, undefined),
doc('Databases', 75, 33, 'Programming'),
doc('dbm', 76, 33, 'Databases'),
];
const ops = calculateNestedSetOperations(docArray);
assert.deepEqual(ops, [
op('Books', 1, 12),
op('Programming', 2, 11),
op('Languages', 3, 4),
op('Databases', 5, 10),
op('MongoDB', 6, 7),
op('dbm', 8, 9),
]);
});
});
describe('Document tree filters can fetch other documents based on their position in the tree', function () {

View File

@@ -381,18 +381,23 @@ export async function rebuildNestedSets(collection: Mongo.Collection<TreeDoc>, r
await writeBulkOperations(collection, operations);
}
/** Calculates the operations needed to make a tree of nested sets
* Warning: Will reverse the order of docs!
* Walk around the tree numbering left on the way down and right on the way up like so:
*
* 1 Books 12
* ┃
* 2 Programming 11
* ┏━━━━━━━━┻━━━━━━━━━┓
* 3 Languages 4 5 Databases 10
* ┏━━━━━━━┻━━━━━━━┓
* 6 MongoDB 7 8 dbm 9
*
*
* @param docs
* @returns
*/
export function calculateNestedSetOperations(docs: TreeDoc[]) {
// Walk around the tree numbering left on the way down and right on the way up like so:
/*
* 1 Books 12
* ┃
* 2 Programming 11
* ┏━━━━━━━━┻━━━━━━━━━┓
* 3 Languages 4 5 Databases 10
* ┏━━━━━━━┻━━━━━━━┓
* 6 MongoDB 7 8 dbm 9
*/
// Get the forest, but in reverse order so that the stack always has the first documents on top
const { forest: stack, orphanIds } = docsToForestByParentId(reverse(docs));
const removeMissingParentsOp = orphanIds.length ? {
updateMany: {
@@ -442,6 +447,51 @@ export function calculateNestedSetOperations(docs: TreeDoc[]) {
return operations;
}
/**
* Same as calculateNestedSetOperations, but applies the ops to the properties
* Mostly used to create testing documents.
* @param docs
* @returns
*/
export function applyNestedSetProperties(docs: TreeDoc[]) {
// Walk around the tree numbering left on the way down and right on the way up like so:
const { forest: stack, orphanIds } = docsToForestByParentId(reverse([...docs]));
const visitedNodes = new Set();
const visitedChildren = new Set();
let count = 1;
while (stack.length) {
const top = stack[stack.length - 1];
if (orphanIds.includes(top.doc._id)) {
delete top.doc.parentId;
}
if (visitedNodes.has(top)) {
// We've arrived at this node again for some reason, this shouldn't happen
console.log('visited already, parent loop maybe?')
stack.pop();
} else if (visitedChildren.has(top)) {
// We've arrived at this node after visiting the children,
// we must be on the way up, mark the right number
visitedNodes.add(top);
stack.pop();
if (top.doc.right !== count) {
top.doc.right = count;
}
count += 1;
} else {
// We're arriving at this node for the first time
// We must be on the way down, mark the left number and go visit the children
visitedChildren.add(top);
stack.push(...top.children);
if (top.doc.left !== count) {
top.doc.left = count;
}
count += 1;
}
}
}
/**
* Write some number of bulk operations to the collection, uses a bulk write on the server
* and iterates through regular updates on the client

View File

@@ -43,7 +43,7 @@ export function migrateProperty({ collection, reversed, prop }) {
let migratedProp = transformFields(prop, transforms, reversed);
const schema = collection.simpleSchema({ type: migratedProp.type });
// Only clean if the schema version matches our destination version
if (!reversed && SCHEMA_VERSION >= 1) {
if (!reversed && SCHEMA_VERSION == 1) {
try {
migratedProp = schema.clean(migratedProp);
schema.validate(migratedProp);
@@ -81,6 +81,7 @@ const transformsByPropType = {
...getComputedPropertyTransforms('rollBonus', 'attackRoll'),
//change type to action
{ from: 'type', to: 'type', up: () => 'action' },
{ from: 'results' },
],
'attribute': [
// from: baseValue must be first or else it will delete the field we need

View File

@@ -124,7 +124,6 @@ const expectedMigratedAttribute = {
damage: 3,
value: 17,
constitutionMod: 2,
dirty: true,
}
const exampleAttack = {
@@ -183,6 +182,7 @@ const expectedMigratedAttack = {
},
'attackRoll': {
calculation: 'dexterity.modifier + proficiencyBonus + 2 - hp.total + hp.value',
value: 6,
},
'type': 'action',
'name': 'Claws',
@@ -221,7 +221,6 @@ describe('migrateProperty', function () {
prop: newAction,
reversed: true,
});
delete reversedAction.dirty;
assert.deepEqual(action, exampleAction, 'action should not be bashed');
assert.deepEqual(exampleAction, reversedAction, 'operation should be reversible');
});
@@ -237,14 +236,14 @@ describe('migrateProperty', function () {
'Attribute should match the expected result');
});
it('Migrates attacks as expected', function () {
const attribute = {
const attack = {
...exampleAttack
};
const newAttribute = migrateProperty({
const newAttack = migrateProperty({
collection: LibraryNodes,
prop: attribute
prop: attack
});
assert.deepEqual(newAttribute, expectedMigratedAttack,
'Attribute should match the expected result');
assert.deepEqual(newAttack, expectedMigratedAttack,
'Attack should match the expected result');
});
});