Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b3f11603a |
14
.codio
Normal file
14
.codio
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
// Configure your Run and Preview buttons here.
|
||||
|
||||
// Run button configuration
|
||||
"commands": {
|
||||
"Run Meteor": "cd rpg-docs \n ROOT_URL=http://period-sheriff.codio.io:3000 meteor"
|
||||
},
|
||||
|
||||
// Preview button configuration
|
||||
"preview": {
|
||||
"Prieview": "http://period-sheriff-3000.codio.io",
|
||||
"Ungit": "https://period-sheriff-9501.codio.io/#/repository?path=/home/codio/workspace"
|
||||
}
|
||||
}
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +0,0 @@
|
||||
build
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +0,0 @@
|
||||
[submodule "app/packages/redis-oplog"]
|
||||
path = app/packages/redis-oplog
|
||||
url = https://github.com/ramezrafla/redis-oplog.git
|
||||
13
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
13
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="HtmlUnknownAttribute" enabled="false" level="WARNING" enabled_by_default="false">
|
||||
<option name="myValues">
|
||||
<value>
|
||||
<list size="0" />
|
||||
</value>
|
||||
</option>
|
||||
<option name="myCustomValuesEnabled" value="true" />
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</component>
|
||||
7
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
7
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="PROJECT_PROFILE" value="Project Default" />
|
||||
<option name="USE_PROJECT_PROFILE" value="true" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
56
.jscsrc
Normal file
56
.jscsrc
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"requireOperatorBeforeLineBreak": true,
|
||||
"requireCamelCaseOrUpperCaseIdentifiers": true,
|
||||
"maximumLineLength": {
|
||||
"value": 80,
|
||||
"allowComments": true,
|
||||
"allowRegex": true
|
||||
},
|
||||
"validateIndentation": "\t",
|
||||
"validateQuoteMarks": "\"",
|
||||
|
||||
"disallowMultipleLineStrings": true,
|
||||
"disallowMixedSpacesAndTabs": "smart",
|
||||
"disallowTrailingWhitespace": true,
|
||||
"disallowSpaceAfterPrefixUnaryOperators": true,
|
||||
"disallowMultipleVarDecl": true,
|
||||
"disallowNewlineBeforeBlockStatements": true,
|
||||
"disallowKeywordsOnNewLine": ["else"],
|
||||
|
||||
"requireSpaceAfterKeywords": [
|
||||
"if",
|
||||
"else",
|
||||
"for",
|
||||
"while",
|
||||
"do",
|
||||
"switch",
|
||||
"return",
|
||||
"try",
|
||||
"catch"
|
||||
],
|
||||
"requireSpaceBeforeBinaryOperators": [
|
||||
"=", "+=", "-=", "*=", "/=", "%=", "<<=", ">>=", ">>>=",
|
||||
"&=", "|=", "^=", "+=",
|
||||
|
||||
"+", "-", "*", "/", "%", "<<", ">>", ">>>", "&",
|
||||
"|", "^", "&&", "||", "===", "==", ">=",
|
||||
"<=", "<", ">", "!=", "!=="
|
||||
],
|
||||
"requireSpaceAfterBinaryOperators": true,
|
||||
"requireSpacesInConditionalExpression": true,
|
||||
"requireSpacesInForStatement": true,
|
||||
"requireTrailingComma": {
|
||||
"ignoreSingleValue": true,
|
||||
"ignoreSingleLine": true
|
||||
},
|
||||
"requireLineFeedAtFileEnd": true,
|
||||
"disallowSpacesInAnonymousFunctionExpression": {
|
||||
"beforeOpeningRoundBrace": true
|
||||
},
|
||||
"disallowSpacesInsideObjectBrackets": "all",
|
||||
"disallowSpacesInsideArrayBrackets": "all",
|
||||
"disallowSpacesInsideParentheses": true,
|
||||
|
||||
"disallowMultipleLineBreaks": true,
|
||||
"disallowNewlineBeforeBlockStatements": true
|
||||
}
|
||||
17
.vscode/launch.json
vendored
17
.vscode/launch.json
vendored
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Meteor: Test",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceFolder}/app",
|
||||
"runtimeExecutable": "npm",
|
||||
"runtimeArgs": [
|
||||
"run-script",
|
||||
"test"
|
||||
],
|
||||
"outputCapture": "std",
|
||||
}
|
||||
]
|
||||
}
|
||||
50
.vscode/settings.json
vendored
50
.vscode/settings.json
vendored
@@ -1,50 +0,0 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"alea",
|
||||
"armor",
|
||||
"autorun",
|
||||
"blackbox",
|
||||
"cantrip",
|
||||
"Cantrips",
|
||||
"Crit",
|
||||
"Crits",
|
||||
"cyrb",
|
||||
"denormalize",
|
||||
"denormalized",
|
||||
"EJSON",
|
||||
"healthbar",
|
||||
"healthbars",
|
||||
"Hitpoints",
|
||||
"jank",
|
||||
"meteortesting",
|
||||
"multigraph",
|
||||
"nearley",
|
||||
"ngraph",
|
||||
"nonreactive",
|
||||
"ostrio",
|
||||
"pather",
|
||||
"recomputation",
|
||||
"Ruleset",
|
||||
"snackbars",
|
||||
"Spellcasting",
|
||||
"Subheaders",
|
||||
"thumbhash",
|
||||
"uncomputed",
|
||||
"untarget",
|
||||
"vars",
|
||||
"vuedraggable",
|
||||
"vuetify",
|
||||
"Vuex",
|
||||
"walkdown"
|
||||
],
|
||||
"javascript.preferences.importModuleSpecifier": "non-relative",
|
||||
"javascript.preferences.importModuleSpecifierEnding": "minimal",
|
||||
"javascript.preferences.organizeImports": {
|
||||
"enabled": true,
|
||||
},
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.importModuleSpecifierEnding": "minimal",
|
||||
"typescript.preferences.organizeImports": {
|
||||
"enabled": true,
|
||||
}
|
||||
}
|
||||
29
Dockerfile
29
Dockerfile
@@ -1,29 +0,0 @@
|
||||
FROM ubuntu:jammy
|
||||
|
||||
USER root
|
||||
RUN adduser --system mt
|
||||
|
||||
RUN apt-get update && apt-get install -y ca-certificates curl gnupg git
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \
|
||||
| gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
|
||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" \
|
||||
> /etc/apt/sources.list.d/nodesource.list
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
USER mt
|
||||
|
||||
RUN curl https://install.meteor.com/ | sh
|
||||
|
||||
WORKDIR /home/mt
|
||||
RUN git clone https://github.com/ThaumRystra/DiceCloud dicecloud
|
||||
WORKDIR /home/mt/dicecloud/app
|
||||
RUN npm install --production
|
||||
ENV PATH=$PATH:/home/mt/.meteor
|
||||
RUN meteor build --directory ~/dc/ --architecture os.linux.x86_64
|
||||
WORKDIR /home/mt/dc/bundle/programs/server
|
||||
RUN npm install
|
||||
WORKDIR /home/mt/dc/bundle
|
||||
RUN rm -r /home/mt/dicecloud
|
||||
|
||||
ENTRYPOINT node main.js
|
||||
674
License.md
674
License.md
@@ -1,674 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
{one line to give the program's name and a brief idea of what it does.}
|
||||
Copyright (C) {year} {name of author}
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
{project} Copyright (C) {year} {fullname}
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
||||
89
README.md
89
README.md
@@ -1,89 +1,4 @@
|
||||
DiceCloud
|
||||
RPG Docs
|
||||
========
|
||||
|
||||
This is the repo for [DiceCloud](https://dicecloud.com).
|
||||
|
||||
DiceCloud is a free, auditable, real-time character sheet for D&D 5e.
|
||||
|
||||
Philosophy
|
||||
----------
|
||||
|
||||
Setting up your character on DiceCloud takes a little longer than
|
||||
just filling it in on a paper character sheet would. The goal of using an
|
||||
online sheet is to make actually playing the game more streamlined, and
|
||||
ultimately more fun. So putting a little extra effort into setting up a
|
||||
character now pays off over and over again once you're playing.
|
||||
|
||||
The idea is to track where each number comes from, and allow you to easily make
|
||||
changes on the fly. Let's look at a hypothetical example.
|
||||
|
||||
> You need to swim through a sunken section of dungeon to fetch the quest's Thing.
|
||||
> You'll need to take off your magical Plate Armor of +1 Constitution to swim
|
||||
> without sinking, of course.
|
||||
>
|
||||
> Taking it off will take away that disadvantage on
|
||||
> stealth checks, change your armor class, your speed and your constitution, and
|
||||
> which in turn changes your hit points and your constitution saving throw.
|
||||
> Working out all those changes in the middle of a game will drag the game to a
|
||||
> halt.
|
||||
>
|
||||
> Fortunately you have DiceCloud, so it's a matter of dragging
|
||||
> your Plate Armor +1 Con from your "equipment" box to your "backpack" box and
|
||||
> you're done. Your hitpoints change correctly, your saving throws are up to date,
|
||||
> your armor class goes back to reflecting the fact that you have natural armor
|
||||
> from being a dragonborn. Your character sheet keeps up and you
|
||||
> ultimately get more time to play the game. Huzzah!
|
||||
|
||||
Getting started
|
||||
---------------
|
||||
|
||||
Running DiceCloud locally, either to host it yourself away from an internet
|
||||
connection, or to contribute to developing it further, is fairly
|
||||
straightforward and it should work on Linux, Windows, and Mac.
|
||||
|
||||
You'll need to have installed:
|
||||
|
||||
- [git](https://www.atlassian.com/git/tutorials/install-git)
|
||||
- [Meteor](https://www.meteor.com/install)
|
||||
|
||||
Then, it's just a matter of cloning this repository into a folder, and running
|
||||
`meteor` in the app directory.
|
||||
|
||||
`git clone https://github.com/ThaumRystra/DiceCloud dicecloud`
|
||||
`cd dicecloud`
|
||||
`cd app`
|
||||
`meteor npm install`
|
||||
`meteor`
|
||||
|
||||
You should see this:
|
||||
|
||||
```
|
||||
=> Started proxy.
|
||||
=> [HMR] Dev server listening on port 3003.
|
||||
=> Started MongoDB.
|
||||
=> Started your app.
|
||||
|
||||
=> App running at: http://localhost:3000/
|
||||
```
|
||||
|
||||
Environmental Variables
|
||||
-----------------------
|
||||
|
||||
```
|
||||
MAIL_URL=smtp://<your smtp mail url>
|
||||
METEOR_SETTINGS={ "public": { "environment": "production", "patreon": { "clientId": "<your patreon client ID>", "campaignId": "<your campaign id>" } }, "patreon": { "clientSecret": "<your client secret>", "creatorAccessToken": "<your creator access token>" } }
|
||||
MONGO_OPLOG_URL=mongodb+srv://<your url for the oplog account of your mongo database>
|
||||
MONGO_URL=mongodb+srv://<your url for the read/write account of your mongo database>
|
||||
NPM_CONFIG_PRODUCTION=true
|
||||
PROJECT_DIR=app
|
||||
ROOT_URL=https://<url of your DiceCloud instance>
|
||||
DEFAULT_LIBRARIES=<comma separated list of library ids that will be subscribed by default: "abc123,def456">
|
||||
```
|
||||
|
||||
To disable Patreon features and unlock all paid restrictions for all users of your deployment, replace
|
||||
`"patreon": { "clientId": ... }"` with `"disablePatreon": true` in the public key of the METEOR_SETTINGS environment variable.
|
||||
|
||||
Alternatively run `meteor run --settings exampleMeteorSettings.json` to start the app with the example settings that disable Patreon by default.
|
||||
|
||||
Now, visiting [](http://localhost:3000/) should show you an empty instance of
|
||||
DiceCloud running.
|
||||
This is the repo for [DiceCloud](dicecloud.com). The currently deployed version should always be the head of the master branch.
|
||||
|
||||
17
app/.gitignore
vendored
17
app/.gitignore
vendored
@@ -1,17 +0,0 @@
|
||||
.meteor/local
|
||||
.meteor/meteorite
|
||||
.demeteorized
|
||||
.cache
|
||||
.vscode
|
||||
.coverage
|
||||
.nyc_output
|
||||
.DS_Store
|
||||
fileStorage
|
||||
settings.json
|
||||
public/components
|
||||
public/_imports.html
|
||||
private/oldClient
|
||||
nohup.out
|
||||
node_modules
|
||||
dump
|
||||
*.crt
|
||||
@@ -1,55 +0,0 @@
|
||||
# Meteor packages used by this project, one per line.
|
||||
#
|
||||
# 'meteor add' and 'meteor remove' will edit this file for you,
|
||||
# but you can also edit it by hand.
|
||||
|
||||
accounts-password@2.4.0
|
||||
random@1.2.1
|
||||
underscore@1.6.1
|
||||
dburles:mongo-collection-instances
|
||||
accounts-google@1.4.0
|
||||
email@2.2.6
|
||||
meteor-base@1.5.1
|
||||
mobile-experience@1.1.1
|
||||
mongo@1.16.10
|
||||
session@1.2.1
|
||||
tracker@1.3.3
|
||||
logging@1.3.4
|
||||
reload@1.3.1
|
||||
ejson@1.1.3
|
||||
check@1.4.1
|
||||
standard-minifier-js@2.8.1
|
||||
shell-server@0.5.0
|
||||
service-configuration@1.3.4
|
||||
dynamic-import@0.7.3
|
||||
ddp-rate-limiter@1.2.1
|
||||
rate-limit@1.1.1
|
||||
mdg:validated-method
|
||||
static-html@1.3.2
|
||||
aldeed:collection2
|
||||
aldeed:schema-index
|
||||
accounts-patreon
|
||||
bozhao:link-accounts
|
||||
peerlibrary:reactive-publish
|
||||
simple:rest
|
||||
simple:rest-method-mixin
|
||||
mikowals:batch-insert
|
||||
peerlibrary:subscription-data
|
||||
zer0th:meteor-vuetify-loader
|
||||
akryum:vue-component@0.15.2
|
||||
akryum:vue-router2
|
||||
percolate:migrations
|
||||
meteortesting:mocha
|
||||
ostrio:files
|
||||
simple:rest-bearer-token-parser
|
||||
simple:rest-json-error-handler
|
||||
littledata:synced-cron
|
||||
#mdg:meteor-apm-agent
|
||||
seba:minifiers-autoprefixer
|
||||
#mixmax:smart-disconnect
|
||||
zodern:types
|
||||
zodern:fix-async-stubs
|
||||
typescript@4.9.5
|
||||
ecmascript@0.16.8
|
||||
lmieulet:meteor-legacy-coverage
|
||||
lmieulet:meteor-coverage
|
||||
@@ -1 +0,0 @@
|
||||
METEOR@2.16
|
||||
@@ -1,130 +0,0 @@
|
||||
accounts-base@2.2.11
|
||||
accounts-google@1.4.0
|
||||
accounts-oauth@1.4.4
|
||||
accounts-password@2.4.0
|
||||
accounts-patreon@0.1.0
|
||||
akryum:npm-check@0.1.2
|
||||
akryum:vue-component@0.16.0
|
||||
akryum:vue-component-dev-client@0.4.7
|
||||
akryum:vue-component-dev-server@0.1.4
|
||||
akryum:vue-router2@0.2.3
|
||||
aldeed:collection2@3.5.0
|
||||
aldeed:schema-index@3.1.0
|
||||
allow-deny@1.1.1
|
||||
autoupdate@1.8.0
|
||||
babel-compiler@7.10.5
|
||||
babel-runtime@1.5.1
|
||||
base64@1.0.12
|
||||
binary-heap@1.0.11
|
||||
blaze-tools@1.1.4
|
||||
boilerplate-generator@1.7.2
|
||||
bozhao:link-accounts@2.8.0
|
||||
caching-compiler@1.2.2
|
||||
caching-html-compiler@1.2.2
|
||||
callback-hook@1.5.1
|
||||
check@1.4.1
|
||||
coffeescript@2.7.0
|
||||
coffeescript-compiler@2.4.1
|
||||
dburles:mongo-collection-instances@0.4.0
|
||||
ddp@1.4.1
|
||||
ddp-client@2.6.2
|
||||
ddp-common@1.4.1
|
||||
ddp-rate-limiter@1.2.1
|
||||
ddp-server@2.7.1
|
||||
diff-sequence@1.1.2
|
||||
dynamic-import@0.7.3
|
||||
ecmascript@0.16.8
|
||||
ecmascript-runtime@0.8.1
|
||||
ecmascript-runtime-client@0.12.1
|
||||
ecmascript-runtime-server@0.11.0
|
||||
ejson@1.1.3
|
||||
email@2.2.6
|
||||
es5-shim@4.8.0
|
||||
fetch@0.1.4
|
||||
geojson-utils@1.0.11
|
||||
google-oauth@1.4.4
|
||||
hot-code-push@1.0.4
|
||||
html-tools@1.1.4
|
||||
htmljs@1.2.1
|
||||
http@2.0.0
|
||||
id-map@1.1.1
|
||||
inter-process-messaging@0.1.1
|
||||
lai:collection-extensions@0.4.0
|
||||
launch-screen@2.0.0
|
||||
littledata:synced-cron@1.5.1
|
||||
lmieulet:meteor-coverage@4.1.0
|
||||
lmieulet:meteor-legacy-coverage@0.2.0
|
||||
localstorage@1.2.0
|
||||
logging@1.3.4
|
||||
mdg:validated-method@1.3.0
|
||||
meteor@1.11.5
|
||||
meteor-base@1.5.1
|
||||
meteortesting:browser-tests@1.5.3
|
||||
meteortesting:mocha@2.1.0
|
||||
meteortesting:mocha-core@8.1.2
|
||||
mikowals:batch-insert@1.3.0
|
||||
minifier-css@1.6.4
|
||||
minifier-js@2.8.0
|
||||
minimongo@1.9.4
|
||||
mobile-experience@1.1.1
|
||||
mobile-status-bar@1.1.0
|
||||
modern-browsers@0.1.10
|
||||
modules@0.20.0
|
||||
modules-runtime@0.13.1
|
||||
mongo@1.16.10
|
||||
mongo-decimal@0.1.3
|
||||
mongo-dev-server@1.1.0
|
||||
mongo-id@1.0.8
|
||||
npm-mongo@4.17.2
|
||||
oauth@2.2.1
|
||||
oauth2@1.3.2
|
||||
ordered-dict@1.1.0
|
||||
ostrio:cookies@2.8.1
|
||||
ostrio:files@2.3.3
|
||||
patreon-oauth@0.1.0
|
||||
peerlibrary:assert@0.3.0
|
||||
peerlibrary:check-extension@0.7.0
|
||||
peerlibrary:computed-field@0.10.0
|
||||
peerlibrary:data-lookup@0.3.0
|
||||
peerlibrary:extend-publish@0.6.0
|
||||
peerlibrary:fiber-utils@0.10.0
|
||||
peerlibrary:reactive-mongo@0.4.1
|
||||
peerlibrary:reactive-publish@0.10.0
|
||||
peerlibrary:server-autorun@0.8.0
|
||||
peerlibrary:subscription-data@0.8.0
|
||||
percolate:migrations@1.1.1
|
||||
promise@0.12.2
|
||||
raix:eventemitter@1.0.0
|
||||
random@1.2.1
|
||||
rate-limit@1.1.1
|
||||
react-fast-refresh@0.2.8
|
||||
reactive-dict@1.3.1
|
||||
reactive-var@1.0.12
|
||||
reload@1.3.1
|
||||
retry@1.1.0
|
||||
routepolicy@1.1.1
|
||||
seba:minifiers-autoprefixer@2.0.1
|
||||
service-configuration@1.3.4
|
||||
session@1.2.1
|
||||
sha@1.0.9
|
||||
shell-server@0.5.0
|
||||
simple:json-routes@2.3.1
|
||||
simple:rest@1.2.1
|
||||
simple:rest-bearer-token-parser@1.1.1
|
||||
simple:rest-json-error-handler@1.1.3
|
||||
simple:rest-method-mixin@1.1.0
|
||||
socket-stream-client@0.5.2
|
||||
spacebars-compiler@1.3.2
|
||||
standard-minifier-js@2.8.1
|
||||
static-html@1.3.2
|
||||
templating-tools@1.2.3
|
||||
tmeasday:check-npm-versions@1.0.2
|
||||
tracker@1.3.3
|
||||
typescript@4.9.5
|
||||
underscore@1.6.1
|
||||
url@1.3.2
|
||||
webapp@1.13.8
|
||||
webapp-hashing@1.1.1
|
||||
zer0th:meteor-vuetify-loader@0.1.41
|
||||
zodern:fix-async-stubs@1.0.2
|
||||
zodern:types@1.0.13
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
||||
<head>
|
||||
<link href="https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900" rel="stylesheet">
|
||||
<link href="https://cdn.jsdelivr.net/npm/@mdi/font@5.x/css/materialdesignicons.min.css" rel="stylesheet">
|
||||
<meta name="viewport" content="width=device-width initial-scale=1.0, user-scalable=no">
|
||||
<meta name="mobile-web-app-capable" content="yes">
|
||||
<meta name="apple-mobile-web-app-capable" content="yes">
|
||||
<link rel="apple-touch-icon" sizes="57x57" href="/apple-touch-icon-57x57.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="60x60" href="/apple-touch-icon-60x60.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="72x72" href="/apple-touch-icon-72x72.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="/apple-touch-icon-76x76.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="114x114" href="/apple-touch-icon-114x114.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="/apple-touch-icon-120x120.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="144x144" href="/apple-touch-icon-144x144.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="152x152" href="/apple-touch-icon-152x152.png?v=lk6WXp6Pmj">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon-180x180.png?v=lk6WXp6Pmj">
|
||||
<link rel="icon" type="image/png" href="/favicon-32x32.png?v=lk6WXp6Pmj" sizes="32x32">
|
||||
<link rel="icon" type="image/png" href="/favicon-194x194.png?v=lk6WXp6Pmj" sizes="194x194">
|
||||
<link rel="icon" type="image/png" href="/favicon-96x96.png?v=lk6WXp6Pmj" sizes="96x96">
|
||||
<link rel="icon" type="image/png" href="/android-chrome-192x192.png?v=lk6WXp6Pmj" sizes="192x192">
|
||||
<link rel="icon" type="image/png" href="/favicon-16x16.png?v=lk6WXp6Pmj" sizes="16x16">
|
||||
<link rel="manifest" href="/manifest.json?v=lk6WXp6Pmj">
|
||||
<link rel="shortcut icon" href="/favicon.ico?v=lk6WXp6Pmj">
|
||||
<meta name="msapplication-TileColor" content="#b91d1d">
|
||||
<meta name="msapplication-TileImage" content="/mstile-144x144.png?v=lk6WXp6Pmj">
|
||||
<meta name="theme-color" content="#d12929">
|
||||
|
||||
<style type="text/css" media="print">
|
||||
@page {
|
||||
margin: 0mm;
|
||||
}
|
||||
html {
|
||||
margin: 0px;
|
||||
}
|
||||
* {
|
||||
-webkit-transition: none !important;
|
||||
transition: none !important;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
@@ -1,3 +0,0 @@
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
</body>
|
||||
@@ -1,7 +0,0 @@
|
||||
import '/imports/api/simpleSchemaConfig';
|
||||
import '/imports/client/ui/vueSetup';
|
||||
import '/imports/client/ui/styles/stylesIndex';
|
||||
import '/imports/client/config';
|
||||
import '/imports/client/serviceWorker';
|
||||
|
||||
import 'ngraph.graph';
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"public": {
|
||||
"environment": "production",
|
||||
"disablePatreon": true,
|
||||
"disallowCreatureApiImport": false
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
declare module 'ddp-rate-limiter-mixin' {
|
||||
export const RateLimiterMixin: <T>(options: T) => T;
|
||||
}
|
||||
3
app/imports/@types/ddp.d.ts
vendored
3
app/imports/@types/ddp.d.ts
vendored
@@ -1,3 +0,0 @@
|
||||
declare namespace DDP {
|
||||
function randomStream(seed: string): typeof Random;
|
||||
}
|
||||
244
app/imports/@types/meteor-ostrio-files.d.ts
vendored
244
app/imports/@types/meteor-ostrio-files.d.ts
vendored
@@ -1,244 +0,0 @@
|
||||
declare module 'meteor/ostrio:files' {
|
||||
import { Meteor } from 'meteor/meteor';
|
||||
import { Mongo } from 'meteor/mongo';
|
||||
import { ReactiveVar } from 'meteor/reactive-var';
|
||||
import { SimpleSchemaDefinition } from 'simpl-schema';
|
||||
import * as http from 'http';
|
||||
import { IncomingMessage } from 'connect';
|
||||
|
||||
interface Params {
|
||||
_id: string;
|
||||
query: { [key: string]: string };
|
||||
name: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
interface ContextHTTP {
|
||||
request: IncomingMessage;
|
||||
response: http.ServerResponse;
|
||||
params: Params;
|
||||
}
|
||||
|
||||
interface ContextUser {
|
||||
userId: string;
|
||||
user: () => Meteor.User;
|
||||
}
|
||||
|
||||
interface ContextUpload {
|
||||
file: object;
|
||||
/** On server only. */
|
||||
chunkId?: number;
|
||||
/** On server only. */
|
||||
eof?: boolean;
|
||||
}
|
||||
|
||||
interface Version<MetadataType> {
|
||||
extension: string;
|
||||
meta: MetadataType;
|
||||
path: string;
|
||||
size: number;
|
||||
type: string;
|
||||
}
|
||||
|
||||
class FileObj<MetadataType> {
|
||||
_id: string;
|
||||
size: number;
|
||||
name: string;
|
||||
type: string;
|
||||
path: string;
|
||||
isVideo: boolean;
|
||||
isAudio: boolean;
|
||||
isImage: boolean;
|
||||
isText: boolean;
|
||||
isJSON: boolean;
|
||||
isPDF: boolean;
|
||||
ext?: string;
|
||||
extension?: string;
|
||||
extensionWithDot: string;
|
||||
_storagePath: string;
|
||||
_downloadRoute: string;
|
||||
_collectionName: string;
|
||||
public?: boolean;
|
||||
meta?: MetadataType;
|
||||
userId?: string;
|
||||
updatedAt?: Date;
|
||||
versions: {
|
||||
[propName: string]: Version<MetadataType>;
|
||||
};
|
||||
mime: string;
|
||||
'mime-type': string;
|
||||
}
|
||||
|
||||
class FileRef<MetadataType> extends FileObj<MetadataType> {
|
||||
remove: (callback?: (error: Meteor.Error) => void) => void;
|
||||
link: (version?: string, location?: string) => string;
|
||||
get: (property?: string) => any;
|
||||
fetch: () => Array<FileObj<MetadataType>>;
|
||||
with: () => FileCursor<MetadataType>;
|
||||
}
|
||||
|
||||
interface FileData<MetadataType> {
|
||||
size: number;
|
||||
type: string;
|
||||
mime: string;
|
||||
'mime-type': string;
|
||||
ext: string;
|
||||
extension: string;
|
||||
name: string;
|
||||
meta: MetadataType;
|
||||
}
|
||||
|
||||
interface FilesCollectionConfig<MetadataType> {
|
||||
storagePath?: string | ((fileObj: FileObj<MetadataType>) => string);
|
||||
collection?: Mongo.Collection<FileObj<MetadataType>>;
|
||||
collectionName?: string;
|
||||
continueUploadTTL?: string;
|
||||
ddp?: object;
|
||||
cacheControl?: string;
|
||||
responseHeaders?: { [x: string]: string } | ((responseCode?: string, fileRef?: FileRef<MetadataType>, versionRef?: Version<MetadataType>, version?: string) => { [x: string]: string });
|
||||
throttle?: number | boolean;
|
||||
downloadRoute?: string;
|
||||
schema?: SimpleSchemaDefinition;
|
||||
chunkSize?: number;
|
||||
namingFunction?: (fileObj: FileObj<MetadataType>) => string;
|
||||
permissions?: number;
|
||||
parentDirPermissions?: number;
|
||||
integrityCheck?: boolean;
|
||||
strict?: boolean;
|
||||
downloadCallback?: (this: ContextHTTP & ContextUser, fileObj: FileObj<MetadataType>) => boolean;
|
||||
protected?: boolean | ((this: ContextHTTP & ContextUser, fileObj: FileObj<MetadataType>) => boolean | number);
|
||||
public?: boolean;
|
||||
onBeforeUpload?: (this: ContextUpload & ContextUser, fileData: FileData<MetadataType>) => boolean | string;
|
||||
onBeforeRemove?: (this: ContextUser, cursor: Mongo.Cursor<FileObj<MetadataType>>) => boolean;
|
||||
onInitiateUpload?: (this: ContextUpload & ContextUser, fileData: FileData<MetadataType>) => void;
|
||||
onAfterUpload?: (fileRef: FileRef<MetadataType>) => any;
|
||||
onAfterRemove?: (files: ReadonlyArray<FileObj<MetadataType>>) => any;
|
||||
onbeforeunloadMessage?: string | (() => string);
|
||||
allowClientCode?: boolean;
|
||||
debug?: boolean;
|
||||
interceptDownload?: (http: object, fileRef: FileRef<MetadataType>, version: string) => boolean;
|
||||
}
|
||||
|
||||
interface SearchOptions<MetadataType, TransformAdditions> {
|
||||
sort?: Mongo.SortSpecifier;
|
||||
skip?: number;
|
||||
limit?: number;
|
||||
fields?: Mongo.FieldSpecifier;
|
||||
reactive?: boolean;
|
||||
transform?: (fileObj: FileObj<MetadataType>) => FileObj<MetadataType> & TransformAdditions;
|
||||
}
|
||||
|
||||
interface InsertOptions<MetadataType> {
|
||||
file: File | object | string;
|
||||
fileId?: string;
|
||||
fileName?: string;
|
||||
isBase64?: boolean;
|
||||
meta?: MetadataType;
|
||||
transport?: 'ddp' | 'http';
|
||||
ddp?: object;
|
||||
onStart?: (error: Meteor.Error, fileData: FileData<MetadataType>) => any;
|
||||
onUploaded?: (error: Meteor.Error, fileRef: FileRef<MetadataType>) => any;
|
||||
onAbort?: (fileData: FileData<MetadataType>) => any;
|
||||
onError?: (error: Meteor.Error, fileData: FileData<MetadataType>) => any;
|
||||
onProgress?: (progress: number, fileData: FileData<MetadataType>) => any;
|
||||
onBeforeUpload?: (fileData: FileData<MetadataType>) => any;
|
||||
chunkSize?: number | 'dynamic';
|
||||
allowWebWorkers?: boolean;
|
||||
type?: string;
|
||||
}
|
||||
|
||||
interface LoadOptions<MetadataType> {
|
||||
fileName: string;
|
||||
meta?: MetadataType;
|
||||
type?: string;
|
||||
size?: number;
|
||||
userId?: string;
|
||||
fileId?: string;
|
||||
}
|
||||
|
||||
class FileUpload {
|
||||
file: File;
|
||||
onPause: ReactiveVar<boolean>;
|
||||
progress: ReactiveVar<number>;
|
||||
estimateTime: ReactiveVar<number>;
|
||||
estimateSpeed: ReactiveVar<number>;
|
||||
state: ReactiveVar<'active' | 'paused' | 'aborted' | 'completed'>;
|
||||
pause(): void;
|
||||
continue(): void;
|
||||
toggle(): void;
|
||||
pipe(): void;
|
||||
start(): void;
|
||||
on(event: string, callback: () => void): void;
|
||||
}
|
||||
|
||||
class FileCursor<MetadataType> extends FileRef<MetadataType> { }
|
||||
|
||||
class FilesCursor<MetadataType, TransformAdditions> extends Mongo.Cursor<FileObj<MetadataType>> {
|
||||
cursor: Mongo.Cursor<FileObj<MetadataType>>; // Refers to base cursor? Why is this existing?
|
||||
|
||||
get(): Array<FileCursor<MetadataType> & TransformAdditions>;
|
||||
hasNext(): boolean;
|
||||
next(): FileCursor<MetadataType> & TransformAdditions;
|
||||
hasPrevious(): boolean;
|
||||
previous(): FileCursor<MetadataType> & TransformAdditions;
|
||||
first(): FileCursor<MetadataType> & TransformAdditions;
|
||||
last(): FileCursor<MetadataType> & TransformAdditions;
|
||||
remove(callback?: (err: object) => void): void;
|
||||
each(callback: (cursor: FileCursor<MetadataType> & TransformAdditions) => void): void;
|
||||
current(): object | undefined;
|
||||
}
|
||||
|
||||
class FilesCollection<MetadataType = { [x: string]: any }> {
|
||||
collection: Mongo.Collection<FileObj<MetadataType>>;
|
||||
schema: SimpleSchemaDefinition;
|
||||
|
||||
constructor(config: FilesCollectionConfig<MetadataType>)
|
||||
|
||||
/**
|
||||
* Find and return Cursor for matching documents.
|
||||
*
|
||||
* @param selector [[http://docs.meteor.com/api/collections.html#selectors | Mongo-Style selector]]
|
||||
* @param options [[http://docs.meteor.com/api/collections.html#sortspecifiers | Mongo-Style selector Options]]
|
||||
*
|
||||
* @template TransformAdditions Additional properties provided by transforming a document with options.tranform().
|
||||
* Note that removing fields with a transform function is not currently supported as this may break
|
||||
* functions defined on a FileRef or FileCursor.
|
||||
*/
|
||||
find<TransformAdditions = {}>(
|
||||
selector?: Mongo.Selector<Partial<FileObj<MetadataType>>>,
|
||||
options?: SearchOptions<MetadataType, TransformAdditions>
|
||||
): FilesCursor<MetadataType, TransformAdditions>;
|
||||
|
||||
/**
|
||||
* Finds the first document that matches the selector, as ordered by sort and skip options.
|
||||
*
|
||||
* @param selector [[http://docs.meteor.com/api/collections.html#selectors | Mongo-Style selector]]
|
||||
* @param options [[http://docs.meteor.com/api/collections.html#sortspecifiers | Mongo-Style selector Options]]
|
||||
*
|
||||
* @template TransformAdditions Additional properties provided by transforming a document with options.tranform().
|
||||
* Note that removing fields with a transform function is not currently supported as this may break
|
||||
* functions defined on a FileRef or FileCursor.
|
||||
*/
|
||||
findOne<TransformAdditions = {}>(
|
||||
selector?: Mongo.Selector<Partial<FileObj<MetadataType>>> | string,
|
||||
options?: SearchOptions<MetadataType, TransformAdditions>
|
||||
): FileCursor<MetadataType> & TransformAdditions;
|
||||
|
||||
insert(settings: InsertOptions<MetadataType>, autoStart?: boolean): FileUpload;
|
||||
remove(select: Mongo.Selector<FileObj<MetadataType>> | string, callback?: (error: Meteor.Error) => void): FilesCollection<MetadataType>;
|
||||
update(select: Mongo.Selector<FileObj<MetadataType>> | string, modifier: Mongo.Modifier<FileObj<MetadataType>>, options?: {
|
||||
multi?: boolean;
|
||||
upsert?: boolean;
|
||||
arrayFilters?: Array<{ [identifier: string]: any }>;
|
||||
}, callback?: (error: Meteor.Error, insertedCount: number) => void): FilesCollection<MetadataType>;
|
||||
link(fileRef: FileRef<MetadataType>, version?: string): string;
|
||||
allow(options: Mongo.AllowDenyOptions): void;
|
||||
deny(options: Mongo.AllowDenyOptions): void;
|
||||
denyClient(): void;
|
||||
on(event: string, callback: (fileRef: FileRef<MetadataType>) => void): void;
|
||||
unlink(fileRef: FileRef<MetadataType>, version?: string): FilesCollection<MetadataType>;
|
||||
addFile(path: string, opts: LoadOptions<MetadataType>, callback?: (err: any, fileRef: FileRef<MetadataType>) => any, proceedAfterUpload?: boolean): FilesCollection<MetadataType>;
|
||||
load(url: string, opts: LoadOptions<MetadataType>, callback?: (err: object, fileRef: FileRef<MetadataType>) => any, proceedAfterUpload?: boolean): FilesCollection<MetadataType>;
|
||||
write(buffer: Buffer, opts: LoadOptions<MetadataType>, callback?: (err: object, fileRef: FileRef<MetadataType>) => any, proceedAfterUpload?: boolean): FilesCollection<MetadataType>;
|
||||
}
|
||||
}
|
||||
5
app/imports/@types/meteor.d.ts
vendored
5
app/imports/@types/meteor.d.ts
vendored
@@ -1,5 +0,0 @@
|
||||
declare namespace Meteor {
|
||||
interface User {
|
||||
roles?: string[];
|
||||
}
|
||||
}
|
||||
49
app/imports/@types/mongo.d.ts
vendored
49
app/imports/@types/mongo.d.ts
vendored
@@ -1,49 +0,0 @@
|
||||
type SimpleSchema = import('simpl-schema').default;
|
||||
type TypedSimpleSchema<T> = import('imports/api/utility/TypedSimpleSchema').TypedSimpleSchema<T>;
|
||||
|
||||
declare namespace Mongo {
|
||||
interface CollectionStatic {
|
||||
get: <T>(
|
||||
collectionName: string, options?: { connection: Meteor.Connection }
|
||||
) => Mongo.Collection<T>;
|
||||
}
|
||||
type SchemaOptions = {
|
||||
/**
|
||||
* Set to `true` if your document must be passed through the collection's transform to properly validate
|
||||
*/
|
||||
transform?: boolean,
|
||||
/**
|
||||
* Set to `true` to replace any existing schema instead of combining
|
||||
*/
|
||||
replace?: boolean
|
||||
selector?: any;
|
||||
}
|
||||
|
||||
interface Collection<T> {
|
||||
schema: TypedSimpleSchema<T>;
|
||||
simpleSchema<U extends Partial<T>>(selector?: U): TypedSimpleSchema<T & U>;
|
||||
/**
|
||||
* Use this method to attach a schema to a collection created by another package,
|
||||
* such as Meteor.users. It is most likely unsafe to call this method more than
|
||||
* once for a single collection, or to call this for a collection that had a
|
||||
* schema object passed to its constructor.
|
||||
* @param ss SimpleSchema instance or a schema definition object from which to create a new SimpleSchema instance
|
||||
* @param options Options
|
||||
*
|
||||
*/
|
||||
attachSchema(ss: SimpleSchema | TypedSimpleSchema<T>, options?: SchemaOptions): void;
|
||||
update(
|
||||
selector: Selector<T> | ObjectID | string,
|
||||
modifier: Modifier<T>,
|
||||
options?: {
|
||||
multi?: boolean | undefined;
|
||||
upsert?: boolean | undefined;
|
||||
arrayFilters?: Array<{ [identifier: string]: any }> | undefined;
|
||||
// Add Collection2 options
|
||||
selector?: Record<string, any>;
|
||||
getAutoValues?: boolean;
|
||||
},
|
||||
callback?: FunctionConstructor,
|
||||
): number;
|
||||
}
|
||||
}
|
||||
27
app/imports/@types/validated-method.d.ts
vendored
27
app/imports/@types/validated-method.d.ts
vendored
@@ -1,27 +0,0 @@
|
||||
declare module 'meteor/mdg:validated-method' {
|
||||
interface ValidatedMethodOptionsMixinFields<TRunArg, TRunReturn> {
|
||||
rateLimit: {
|
||||
numRequests: number,
|
||||
timeInterval: number,
|
||||
};
|
||||
}
|
||||
type Return<TFunc> = TFunc extends (...args: any[]) => infer TReturn ? TReturn : never;
|
||||
type Argument<TFunc> = TFunc extends (...args: infer TArgs) => any ? TArgs extends [infer TArg] ? TArg
|
||||
: NoArguments
|
||||
: never;
|
||||
interface ValidatedMethod<TName extends string, TRun extends (...args: any[]) => any> {
|
||||
callAsync: Argument<TRun> extends NoArguments
|
||||
// methods with no argument can be called with () or just a callback
|
||||
?
|
||||
& ((unusedArg: any, callback?: (error: Meteor.Error, result: Return<TRun>) => void) => void)
|
||||
& ((callback?: (error: Meteor.Error | undefined, result: Return<TRun>) => void) => void)
|
||||
& (() => Return<TRun>)
|
||||
// methods with arguments require those arguments to be called
|
||||
:
|
||||
& ((
|
||||
arg: Argument<TRun>,
|
||||
callback?: (error: Meteor.Error | undefined, result: Return<TRun>) => void,
|
||||
) => void)
|
||||
& ((arg: Argument<TRun>) => Return<TRun>);
|
||||
}
|
||||
}
|
||||
15
app/imports/@types/vue-meteor.d.ts
vendored
15
app/imports/@types/vue-meteor.d.ts
vendored
@@ -1,15 +0,0 @@
|
||||
import Vue from 'vue';
|
||||
|
||||
declare module 'vue/types/options' {
|
||||
interface ComponentOptions<V extends Vue> {
|
||||
meteor?: any;
|
||||
}
|
||||
}
|
||||
|
||||
declare module 'vue/types/vue' {
|
||||
interface Vue {
|
||||
$subscribe: (name: string, params: any[]) => void;
|
||||
$autorun: (fn: () => void) => number;
|
||||
$subReady: Record<string, boolean>;
|
||||
}
|
||||
}
|
||||
@@ -1,62 +0,0 @@
|
||||
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { incrementFileStorageUsed } from '/imports/api/users/methods/updateFileStorageUsed';
|
||||
import { CreaturePropertySchema } from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { CreatureSchema } from '/imports/api/creature/creatures/Creatures';
|
||||
import assertUserHasFileSpace from '/imports/api/files/assertUserHasFileSpace';
|
||||
let createS3FilesCollection;
|
||||
if (Meteor.isServer) {
|
||||
createS3FilesCollection = require('/imports/api/files/server/s3FileStorage').createS3FilesCollection
|
||||
} else {
|
||||
createS3FilesCollection = require('/imports/api/files/client/s3FileStorage').createS3FilesCollection
|
||||
}
|
||||
|
||||
const ArchiveCreatureFiles = createS3FilesCollection({
|
||||
collectionName: 'archiveCreatureFiles',
|
||||
storagePath: Meteor.isDevelopment ? '../../../../../fileStorage/archiveCreatures' : 'assets/app/archiveCreatures',
|
||||
onBeforeUpload(file) {
|
||||
// Allow upload files under 10MB, and only in json format
|
||||
if (file.size > 10485760) {
|
||||
return 'Please upload with size equal or less than 10MB';
|
||||
}
|
||||
// Make sure the user has enough space
|
||||
assertUserHasFileSpace(Meteor.userId(), file.size);
|
||||
// Only accept JSON
|
||||
if (!/json/i.test(file.extension)) {
|
||||
return 'Please upload only a JSON file';
|
||||
}
|
||||
return true;
|
||||
},
|
||||
onAfterUpload(file) {
|
||||
if (Meteor.isServer) incrementFileStorageUsed(file.userId, file.size);
|
||||
}
|
||||
});
|
||||
|
||||
let archiveSchema = new SimpleSchema({
|
||||
meta: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
creature: CreatureSchema,
|
||||
properties: {
|
||||
type: Array,
|
||||
},
|
||||
'properties.$': CreaturePropertySchema,
|
||||
experiences: {
|
||||
type: Array,
|
||||
},
|
||||
'experiences.$': {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
logs: {
|
||||
type: Array,
|
||||
},
|
||||
'logs.$': {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
});
|
||||
|
||||
export default ArchiveCreatureFiles;
|
||||
export { archiveSchema };
|
||||
@@ -1,100 +0,0 @@
|
||||
import { Meteor } from 'meteor/meteor';
|
||||
import SCHEMA_VERSION from '/imports/constants/SCHEMA_VERSION';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import { assertOwnership } from '/imports/api/creature/creatures/creaturePermissions';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import CreatureLogs from '/imports/api/creature/log/CreatureLogs';
|
||||
import Experiences from '/imports/api/creature/experience/Experiences';
|
||||
import { removeCreatureWork } from '/imports/api/creature/creatures/methods/removeCreature';
|
||||
import ArchiveCreatureFiles from '/imports/api/creature/archive/ArchiveCreatureFiles';
|
||||
import { getFilter } from '/imports/api/parenting/parentingFunctions';
|
||||
|
||||
export function getArchiveObj(creatureId) {
|
||||
// Build the archive document
|
||||
const creature = Creatures.findOne(creatureId);
|
||||
if (!creature) throw new Meteor.Error('creature-not-found', 'Creature not found');
|
||||
const properties = CreatureProperties.find({ ...getFilter.descendantsOfRoot(creatureId) }).fetch();
|
||||
const experiences = Experiences.find({ creatureId }).fetch();
|
||||
const logs = CreatureLogs.find({ creatureId }).fetch();
|
||||
let archiveCreature = {
|
||||
meta: {
|
||||
type: 'DiceCloud V2 Creature Archive',
|
||||
schemaVersion: SCHEMA_VERSION,
|
||||
archiveDate: new Date(),
|
||||
},
|
||||
creature,
|
||||
properties,
|
||||
experiences,
|
||||
logs,
|
||||
};
|
||||
|
||||
return archiveCreature;
|
||||
}
|
||||
|
||||
export const archiveCreature = Meteor.wrapAsync(function archiveCreatureFn(creatureId, callback) {
|
||||
const archive = getArchiveObj(creatureId);
|
||||
const buffer = Buffer.from(JSON.stringify(archive, null, 2));
|
||||
ArchiveCreatureFiles.write(buffer, {
|
||||
fileName: `${archive.creature.name || archive.creature._id}.json`,
|
||||
type: 'application/json',
|
||||
userId: archive.creature.owner,
|
||||
meta: {
|
||||
schemaVersion: SCHEMA_VERSION,
|
||||
creatureId: archive.creature._id,
|
||||
creatureName: archive.creature.name,
|
||||
},
|
||||
}, (error, fileRef) => {
|
||||
if (error) {
|
||||
// If there is an error already, just call the callback
|
||||
callback(error);
|
||||
} else if (!Meteor.settings.useS3) {
|
||||
// If we aren't using s3, remove the creature and call the callback
|
||||
removeCreatureWork(creatureId);
|
||||
callback();
|
||||
} else {
|
||||
// Wait for s3Result event that occurs when the s3 attempt to write ends.
|
||||
// If it's successful, remove the creature, otherwise callback with error
|
||||
const resultHandler = (s3Error, resultRef) => {
|
||||
// This event is for a different file, ignore it
|
||||
if (resultRef._id !== fileRef._id) return;
|
||||
// Remove this handler, we are only running it once for this fileId
|
||||
ArchiveCreatureFiles.off('s3Result', resultHandler);
|
||||
// Remove the creature if there was no error
|
||||
if (!s3Error) {
|
||||
removeCreatureWork(creatureId);
|
||||
}
|
||||
// Alert the callback that we're done
|
||||
callback(s3Error);
|
||||
}
|
||||
ArchiveCreatureFiles.on('s3Result', resultHandler);
|
||||
}
|
||||
}, true);
|
||||
});
|
||||
|
||||
const archiveCreatureToFile = new ValidatedMethod({
|
||||
name: 'Creatures.methods.archiveCreatureToFile',
|
||||
validate: new SimpleSchema({
|
||||
'creatureId': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 10,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
async run({ creatureId }) {
|
||||
assertOwnership(creatureId, this.userId);
|
||||
if (Meteor.isServer) {
|
||||
archiveCreature(creatureId);
|
||||
} else {
|
||||
removeCreatureWork(creatureId);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default archiveCreatureToFile;
|
||||
@@ -1,3 +0,0 @@
|
||||
import '/imports/api/creature/archive/methods/archiveCreatureToFile';
|
||||
import '/imports/api/creature/archive/methods/restoreCreatureFromFile';
|
||||
import '/imports/api/creature/archive/methods/removeArchiveCreature';
|
||||
@@ -1,40 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import ArchiveCreatureFiles from '/imports/api/creature/archive/ArchiveCreatureFiles';
|
||||
import { incrementFileStorageUsed } from '/imports/api/users/methods/updateFileStorageUsed';
|
||||
|
||||
const removeArchiveCreature = new ValidatedMethod({
|
||||
name: 'ArchiveCreatureFiles.methods.removeArchiveCreature',
|
||||
validate: new SimpleSchema({
|
||||
'fileId': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
async run({ fileId }) {
|
||||
// fetch the file
|
||||
const file = ArchiveCreatureFiles.findOne({ _id: fileId }).get();
|
||||
if (!file) {
|
||||
throw new Meteor.Error('File not found',
|
||||
'The requested creature archive does not exist');
|
||||
}
|
||||
// Assert ownership
|
||||
const userId = file?.userId;
|
||||
if (!userId || userId !== this.userId) {
|
||||
throw new Meteor.Error('Permission denied',
|
||||
'You can only restore creatures you own');
|
||||
}
|
||||
//Remove the archive once the restore succeeded
|
||||
ArchiveCreatureFiles.remove({ _id: fileId });
|
||||
// Update the user's file storage limits
|
||||
incrementFileStorageUsed(userId, -file.size);
|
||||
},
|
||||
});
|
||||
|
||||
export default removeArchiveCreature;
|
||||
@@ -1,104 +0,0 @@
|
||||
import SCHEMA_VERSION from '/imports/constants/SCHEMA_VERSION';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import CreatureLogs from '/imports/api/creature/log/CreatureLogs';
|
||||
import Experiences from '/imports/api/creature/experience/Experiences';
|
||||
import { removeCreatureWork } from '/imports/api/creature/creatures/methods/removeCreature';
|
||||
import ArchiveCreatureFiles from '/imports/api/creature/archive/ArchiveCreatureFiles';
|
||||
import assertHasCharactersSlots from '/imports/api/creature/creatures/methods/assertHasCharacterSlots';
|
||||
import { incrementFileStorageUsed } from '/imports/api/users/methods/updateFileStorageUsed';
|
||||
import verifyArchiveSafety from '/imports/api/creature/archive/methods/verifyArchiveSafety';
|
||||
|
||||
let migrateArchive;
|
||||
if (Meteor.isServer) {
|
||||
migrateArchive = require('/imports/migrations/archive/migrateArchive').default;
|
||||
}
|
||||
|
||||
function restoreCreature(archive, userId) {
|
||||
if (SCHEMA_VERSION < archive.meta.schemaVersion) {
|
||||
throw new Meteor.Error('Incompatible',
|
||||
'The archive file is from a newer version. Update required to read.')
|
||||
}
|
||||
|
||||
// Migrate and verify the archive meets the current schema
|
||||
migrateArchive(archive);
|
||||
|
||||
// Asset that the archive is safe
|
||||
verifyArchiveSafety(archive);
|
||||
|
||||
// Don't upload creatures twice
|
||||
const existingCreature = Creatures.findOne(archive.creature._id, {
|
||||
fields: { _id: 1 }
|
||||
});
|
||||
if (existingCreature) throw new Meteor.Error('Already exists',
|
||||
'The creature you are trying to restore already exists.')
|
||||
|
||||
// Ensure the user owns the restored creature
|
||||
archive.creature.owner = userId;
|
||||
|
||||
// Insert the creature sub documents
|
||||
// They still have their original _id's
|
||||
Creatures.insert(archive.creature);
|
||||
try {
|
||||
// Add all the properties
|
||||
if (archive.properties && archive.properties.length) {
|
||||
CreatureProperties.batchInsert(archive.properties);
|
||||
}
|
||||
if (archive.experiences && archive.experiences.length) {
|
||||
Experiences.batchInsert(archive.experiences);
|
||||
}
|
||||
if (archive.logs && archive.logs.length) {
|
||||
CreatureLogs.batchInsert(archive.logs);
|
||||
}
|
||||
} catch (e) {
|
||||
// If the above fails, delete the inserted creature
|
||||
removeCreatureWork(archive.creature._id);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
const restoreCreaturefromFile = new ValidatedMethod({
|
||||
name: 'Creatures.methods.restoreCreaturefromFile',
|
||||
validate: new SimpleSchema({
|
||||
'fileId': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 10,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
async run({ fileId }) {
|
||||
// fetch the file
|
||||
const file = ArchiveCreatureFiles.findOne({ _id: fileId }).get();
|
||||
if (!file) {
|
||||
throw new Meteor.Error('File not found',
|
||||
'The requested creature archive does not exist');
|
||||
}
|
||||
// Assert ownership
|
||||
const userId = file?.userId;
|
||||
if (!userId || userId !== this.userId) {
|
||||
throw new Meteor.Error('Permission denied',
|
||||
'You can only restore creatures you own');
|
||||
}
|
||||
|
||||
assertHasCharactersSlots(this.userId);
|
||||
|
||||
if (Meteor.isServer) {
|
||||
// Read the file data
|
||||
const archive = await ArchiveCreatureFiles.readJSONFile(file);
|
||||
restoreCreature(archive, this.userId);
|
||||
}
|
||||
//Remove the archive once the restore succeeded
|
||||
ArchiveCreatureFiles.remove({ _id: fileId });
|
||||
// Update the user's file storage limits
|
||||
incrementFileStorageUsed(userId, -file.size);
|
||||
},
|
||||
});
|
||||
|
||||
export default restoreCreaturefromFile;
|
||||
@@ -1,28 +0,0 @@
|
||||
import { slice } from 'lodash';
|
||||
import { PER_CREATURE_LOG_LIMIT } from '/imports/api/creature/log/CreatureLogs';
|
||||
|
||||
export default function verifyArchiveSafety({ creature, properties, experiences, logs }) {
|
||||
const creatureId = creature._id;
|
||||
|
||||
// Check lengths of arrays
|
||||
if (logs.length > PER_CREATURE_LOG_LIMIT) {
|
||||
logs = slice(logs, 0, PER_CREATURE_LOG_LIMIT);
|
||||
}
|
||||
|
||||
// Check that everything belongs to the right creature
|
||||
logs.forEach(log => {
|
||||
if (log.creatureId !== creatureId) {
|
||||
throw new Meteor.Error('Malicious log', 'Log contains an entry for the wrong creature');
|
||||
}
|
||||
});
|
||||
experiences.forEach(experience => {
|
||||
if (experience.creatureId !== creatureId) {
|
||||
throw new Meteor.Error('Malicious experience', 'Experiences contains an entry for the wrong creature');
|
||||
}
|
||||
});
|
||||
properties.forEach(prop => {
|
||||
if (prop.root?.id !== creatureId) {
|
||||
throw new Meteor.Error('Malicious prop', 'Properties contains an entry for the wrong creature');
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
|
||||
let CreatureFolders = new Mongo.Collection('creatureFolders');
|
||||
|
||||
let creatureFolderSchema = new SimpleSchema({
|
||||
name: {
|
||||
type: String,
|
||||
trim: false,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
creatures: {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
},
|
||||
'creatures.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
owner: {
|
||||
type: String,
|
||||
max: 32,
|
||||
index: 1,
|
||||
},
|
||||
archived: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
defaultValue: 0,
|
||||
},
|
||||
});
|
||||
|
||||
CreatureFolders.attachSchema(creatureFolderSchema);
|
||||
|
||||
import '/imports/api/creature/creatureFolders/methods.js/index';
|
||||
export default CreatureFolders;
|
||||
@@ -1,4 +0,0 @@
|
||||
import '/imports/api/creature/creatureFolders/methods.js/insertCreatureFolder';
|
||||
import '/imports/api/creature/creatureFolders/methods.js/updateCreatureFolderName';
|
||||
import '/imports/api/creature/creatureFolders/methods.js/removeCreatureFolder';
|
||||
import '/imports/api/creature/creatureFolders/methods.js/moveCreatureToFolder';
|
||||
@@ -1,46 +0,0 @@
|
||||
import CreatureFolders from '/imports/api/creature/creatureFolders/CreatureFolders';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
|
||||
const insertCreatureFolder = new ValidatedMethod({
|
||||
name: 'creatureFolders.methods.insert',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run() {
|
||||
// Ensure logged in
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.insert.denied',
|
||||
'You need to be logged in to insert a folder');
|
||||
}
|
||||
// Limit folders to 50 per user
|
||||
let existingFolders = CreatureFolders.find({
|
||||
owner: userId
|
||||
}, {
|
||||
fields: { order: 1 },
|
||||
sort: { left: -1 }
|
||||
});
|
||||
if (existingFolders.count() >= 50) {
|
||||
throw new Meteor.Error('creatureFolders.methods.insert.denied',
|
||||
'You can not have more than 50 folders');
|
||||
}
|
||||
// Make the new folder the last in the order
|
||||
let order = 0;
|
||||
let lastFolder = existingFolders.fetch()[0];
|
||||
if (lastFolder) {
|
||||
order = (lastFolder.order || 0) + 1;
|
||||
}
|
||||
// Insert
|
||||
return CreatureFolders.insert({
|
||||
name: 'Folder',
|
||||
owner: userId,
|
||||
order,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default insertCreatureFolder;
|
||||
@@ -1,45 +0,0 @@
|
||||
import CreatureFolders from '/imports/api/creature/creatureFolders/CreatureFolders';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
|
||||
const moveCreatureToFolder = new ValidatedMethod({
|
||||
name: 'creatureFolders.methods.moveCreatureToFolder',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ creatureId, folderId }) {
|
||||
// Ensure logged in
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'You need to be logged in to remove a folder');
|
||||
}
|
||||
// Check that this folder is owned by the user
|
||||
if (folderId) {
|
||||
let existingFolder = CreatureFolders.findOne(folderId);
|
||||
if (existingFolder.owner !== userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'This folder does not belong to you');
|
||||
}
|
||||
}
|
||||
// Remove from other folders
|
||||
CreatureFolders.update({
|
||||
owner: userId
|
||||
}, {
|
||||
$pull: { creatures: creatureId },
|
||||
}, {
|
||||
multi: true,
|
||||
});
|
||||
if (folderId) {
|
||||
// Add to this folder
|
||||
CreatureFolders.update(folderId, {
|
||||
$addToSet: { creatures: creatureId },
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default moveCreatureToFolder;
|
||||
@@ -1,31 +0,0 @@
|
||||
import CreatureFolders from '/imports/api/creature/creatureFolders/CreatureFolders';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
|
||||
const removeCreatureFolder = new ValidatedMethod({
|
||||
name: 'creatureFolders.methods.remove',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
// Ensure logged in
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'You need to be logged in to remove a folder');
|
||||
}
|
||||
// Check that this folder is owned by the user
|
||||
let existingFolder = CreatureFolders.findOne(_id);
|
||||
if (existingFolder.owner !== userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'This folder does not belong to you');
|
||||
}
|
||||
// Remove
|
||||
return CreatureFolders.remove(_id);
|
||||
},
|
||||
});
|
||||
|
||||
export default removeCreatureFolder;
|
||||
@@ -1,43 +0,0 @@
|
||||
import CreatureFolders from '/imports/api/creature/creatureFolders/CreatureFolders';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
|
||||
const reorderCreatureFolder = new ValidatedMethod({
|
||||
name: 'creatureFolders.methods.reorder',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, order }) {
|
||||
// Ensure logged in
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.reorder.denied',
|
||||
'You need to be logged in to reorder a folder');
|
||||
}
|
||||
// Check that this folder is owned by the user
|
||||
let existingFolder = CreatureFolders.findOne(_id);
|
||||
if (existingFolder.owner !== userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.reorder.denied',
|
||||
'This folder does not belong to you');
|
||||
}
|
||||
// First give it the new order, it should end in 0.5 putting it between two other docs
|
||||
CreatureFolders.update(_id, { $set: { order } });
|
||||
this.unblock();
|
||||
// Reorder all the folders with integer numbers in this new order
|
||||
CreatureFolders.find({
|
||||
owner: userId
|
||||
}, {
|
||||
fields: { order: 1, },
|
||||
sort: { order: 1 }
|
||||
}).forEach((folder, index) => {
|
||||
if (folder.order !== index) {
|
||||
CreatureFolders.update(_id, { $set: { order: index } })
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default reorderCreatureFolder;
|
||||
@@ -1,31 +0,0 @@
|
||||
import CreatureFolders from '/imports/api/creature/creatureFolders/CreatureFolders';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
|
||||
const updateCreatureFolderName = new ValidatedMethod({
|
||||
name: 'creatureFolders.methods.updateName',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, name }) {
|
||||
// Ensure logged in
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'You need to be logged in to update a folder');
|
||||
}
|
||||
// Check that this folder is owned by the user
|
||||
let existingFolder = CreatureFolders.findOne(_id);
|
||||
if (existingFolder.owner !== userId) {
|
||||
throw new Meteor.Error('creatureFolders.methods.updateName.denied',
|
||||
'This folder does not belong to you');
|
||||
}
|
||||
// Update
|
||||
return CreatureFolders.update(_id, { $set: { name } });
|
||||
},
|
||||
});
|
||||
|
||||
export default updateCreatureFolderName;
|
||||
@@ -1,174 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import ColorSchema from '/imports/api/properties/subSchemas/ColorSchema';
|
||||
import ChildSchema from '/imports/api/parenting/ChildSchema';
|
||||
import SoftRemovableSchema from '/imports/api/parenting/SoftRemovableSchema';
|
||||
import propertySchemasIndex from '/imports/api/properties/computedPropertySchemasIndex';
|
||||
import { storedIconsSchema } from '/imports/api/icons/Icons';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
import { ConvertToUnion, InferType, TypedSimpleSchema } from '/imports/api/utility/TypedSimpleSchema';
|
||||
import { Simplify } from 'type-fest';
|
||||
import type { PropertyType } from '/imports/api/properties/PropertyType.type';
|
||||
|
||||
const PreComputeCreaturePropertySchema = TypedSimpleSchema.from({
|
||||
_id: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
_migrationError: {
|
||||
type: String,
|
||||
optional: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
allowedValues: Object.keys(propertySchemasIndex),
|
||||
},
|
||||
tags: {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
maxCount: STORAGE_LIMITS.tagCount,
|
||||
},
|
||||
'tags.$': {
|
||||
type: String,
|
||||
max: STORAGE_LIMITS.tagLength,
|
||||
},
|
||||
disabled: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
icon: {
|
||||
type: storedIconsSchema,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.icon,
|
||||
},
|
||||
// Reference to the library node that this property was copied from
|
||||
libraryNodeId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
},
|
||||
// Fill more than one quantity in a slot, like feats and ability score
|
||||
// improvements, filtered out of UI if there isn't space in quantityExpected
|
||||
slotQuantityFilled: {
|
||||
type: SimpleSchema.Integer,
|
||||
optional: true, // Undefined implies 1
|
||||
},
|
||||
});
|
||||
|
||||
const DenormalisedOnlyCreaturePropertySchema = TypedSimpleSchema.from({
|
||||
// Denormalised flag if this property is inactive on the sheet for any reason
|
||||
// Including being disabled, or a descendant of a disabled property
|
||||
inactive: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
// Denormalised flag if this property was made inactive by an inactive
|
||||
// ancestor. True if this property has an inactive ancestor even if this
|
||||
// property is itself inactive
|
||||
deactivatedByAncestor: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
// Denormalised flag if this property was made inactive because of its own
|
||||
// state
|
||||
deactivatedBySelf: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
// Denormalised flag if this property was made inactive because of a toggle
|
||||
// calculation. Either an ancestor toggle calculation or its own.
|
||||
deactivatedByToggle: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
deactivatingToggleId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
// Triggers that fire when this property is applied
|
||||
'triggerIds': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
removeBeforeCompute: true,
|
||||
},
|
||||
'triggerIds.before': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'triggerIds.before.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
'triggerIds.after': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'triggerIds.after.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
'triggerIds.afterChildren': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'triggerIds.afterChildren.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
// When this is true on any property, the creature needs to be recomputed
|
||||
dirty: {
|
||||
type: Boolean,
|
||||
// Default to true because new properties cause a recomputation
|
||||
defaultValue: true,
|
||||
optional: true,
|
||||
},
|
||||
});
|
||||
|
||||
const CreaturePropertySchema = PreComputeCreaturePropertySchema.extend(DenormalisedOnlyCreaturePropertySchema);
|
||||
|
||||
export type CreaturePropertyTypes = {
|
||||
[T in PropertyType]: Simplify<
|
||||
{ type: T }
|
||||
& InferType<typeof propertySchemasIndex[T]>
|
||||
> & Simplify<
|
||||
Exclude<InferType<typeof CreaturePropertySchema>, 'type'>
|
||||
& InferType<typeof ColorSchema>
|
||||
& InferType<typeof ChildSchema>
|
||||
& InferType<typeof SoftRemovableSchema>
|
||||
>
|
||||
}
|
||||
|
||||
export type CreatureProperty = Simplify<ConvertToUnion<CreaturePropertyTypes>>;
|
||||
|
||||
const CreatureProperties = new Mongo.Collection<CreatureProperty>('creatureProperties');
|
||||
|
||||
const genericCreaturePropertySchema = TypedSimpleSchema.from({})
|
||||
.extend(CreaturePropertySchema)
|
||||
.extend(ColorSchema)
|
||||
.extend(ChildSchema)
|
||||
.extend(SoftRemovableSchema);
|
||||
|
||||
// Attach the default schema
|
||||
CreatureProperties.attachSchema(genericCreaturePropertySchema);
|
||||
|
||||
// Attach the schemas for each type
|
||||
let key: keyof typeof propertySchemasIndex;
|
||||
for (key in propertySchemasIndex) {
|
||||
const schema = TypedSimpleSchema.from({})
|
||||
.extend(propertySchemasIndex[key])
|
||||
.extend(genericCreaturePropertySchema)
|
||||
CreatureProperties.attachSchema(schema, {
|
||||
selector: { type: key }
|
||||
});
|
||||
}
|
||||
|
||||
export default CreatureProperties;
|
||||
export {
|
||||
DenormalisedOnlyCreaturePropertySchema,
|
||||
CreaturePropertySchema,
|
||||
};
|
||||
@@ -1,5 +0,0 @@
|
||||
import { getCreature } from '/imports/api/engine/loadCreatures';
|
||||
|
||||
export default function getRootCreatureAncestor(property) {
|
||||
return getCreature(property.root.id);
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
|
||||
const adjustQuantity = new ValidatedMethod({
|
||||
name: 'creatureProperties.adjustQuantity',
|
||||
validate: new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id,
|
||||
operation: {
|
||||
type: String,
|
||||
allowedValues: ['set', 'increment']
|
||||
},
|
||||
value: Number,
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, operation, value }) {
|
||||
// Permissions
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Do work
|
||||
adjustQuantityWork({ property, operation, value });
|
||||
},
|
||||
});
|
||||
|
||||
export function adjustQuantityWork({ property, operation, value }) {
|
||||
// Check if property has quantity
|
||||
let schema = CreatureProperties.simpleSchema(property);
|
||||
if (!schema.allowsKey('quantity')) {
|
||||
throw new Meteor.Error(
|
||||
'Adjust quantity failed',
|
||||
`Property of type "${property.type}" doesn't have a quantity`
|
||||
);
|
||||
}
|
||||
if (operation === 'set') {
|
||||
CreatureProperties.update(property._id, {
|
||||
$set: { quantity: value, dirty: true }
|
||||
}, {
|
||||
selector: property
|
||||
});
|
||||
} else if (operation === 'increment') {
|
||||
// value here is 'damage'
|
||||
value = -value;
|
||||
let currentQuantity = property.quantity;
|
||||
if (currentQuantity + value < 0) value = -currentQuantity;
|
||||
CreatureProperties.update(property._id, {
|
||||
$inc: { quantity: value },
|
||||
$set: { dirty: true }
|
||||
}, {
|
||||
selector: property
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default adjustQuantity;
|
||||
@@ -1,182 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import LibraryNodes from '/imports/api/library/LibraryNodes';
|
||||
import { RefSchema } from '/imports/api/parenting/ChildSchema';
|
||||
import {
|
||||
assertEditPermission,
|
||||
assertDocEditPermission,
|
||||
assertCopyPermission
|
||||
} from '/imports/api/sharing/sharingPermissions';
|
||||
import {
|
||||
fetchDocByRef,
|
||||
getFilter,
|
||||
renewDocIds
|
||||
} from '/imports/api/parenting/parentingFunctions';
|
||||
import { rebuildNestedSets } from '/imports/api/parenting/parentingFunctions';
|
||||
import Libraries from '/imports/api/library/Libraries';
|
||||
const DUPLICATE_CHILDREN_LIMIT = 500;
|
||||
|
||||
const copyPropertyToLibrary = new ValidatedMethod({
|
||||
name: 'creatureProperties.copyPropertyToLibrary',
|
||||
validate: new SimpleSchema({
|
||||
propId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
parentRef: {
|
||||
type: RefSchema,
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
optional: true,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 1,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ propId, parentRef, order }) {
|
||||
// get the new ancestry for the properties
|
||||
const parentDoc = fetchDocByRef(parentRef);
|
||||
|
||||
// Check permission to edit the destination
|
||||
let rootLibrary;
|
||||
if (parentRef.collection === 'libraries') {
|
||||
rootLibrary = parentDoc;
|
||||
} else if (parentRef.collection === 'libraryNodes') {
|
||||
rootLibrary = Libraries.findOne(parentDoc.root.id)
|
||||
} else {
|
||||
throw `${parentRef.collection} is not a valid parent collection`
|
||||
}
|
||||
assertEditPermission(rootLibrary, this.userId);
|
||||
|
||||
const insertedRootNode = insertNodeFromProperty(propId, order, this);
|
||||
|
||||
// Tree structure changed by inserts, reorder the tree
|
||||
rebuildNestedSets(LibraryNodes, rootLibrary._id);
|
||||
|
||||
// Return the docId of the inserted root property
|
||||
return insertedRootNode?._id;
|
||||
},
|
||||
});
|
||||
|
||||
function insertNodeFromProperty(propId, order, method) {
|
||||
// Fetch the property and its descendants, provided they have not been
|
||||
// removed
|
||||
let prop = CreatureProperties.findOne({
|
||||
_id: propId,
|
||||
removed: { $ne: true },
|
||||
});
|
||||
if (!prop) {
|
||||
if (Meteor.isClient) return;
|
||||
else {
|
||||
throw new Meteor.Error(
|
||||
'Insert property from library failed',
|
||||
`No property with id '${propId}' was found`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure we can edit this property
|
||||
assertDocEditPermission(prop, method.userId);
|
||||
|
||||
let oldParentId = prop.parentId;
|
||||
const propCursor = CreatureProperties.find({
|
||||
...getFilter.descendants(prop),
|
||||
removed: { $ne: true },
|
||||
});
|
||||
|
||||
// Make sure there aren't too many descendants
|
||||
if (propCursor.count() > DUPLICATE_CHILDREN_LIMIT) {
|
||||
throw new Meteor.Error('Copy children limit',
|
||||
`The property has over ${DUPLICATE_CHILDREN_LIMIT} descendants and cannot be copied`);
|
||||
}
|
||||
|
||||
let props = propCursor.fetch();
|
||||
|
||||
// The root prop is first in the array of props
|
||||
// It must get the first generated ID to prevent flickering
|
||||
props = [prop, ...props];
|
||||
|
||||
// If the docs came from a library, that library must consent to this user copying their
|
||||
// properties
|
||||
assertSourceLibraryCopyPermission(props, method);
|
||||
|
||||
// Give the docs new IDs without breaking internal references
|
||||
renewDocIds({
|
||||
docArray: props,
|
||||
collectionMap: { 'creatureProperties': 'libraryNodes' }
|
||||
});
|
||||
|
||||
// Order the root node
|
||||
prop.left = Number.MAX_SAFE_INTEGER - 1;
|
||||
prop.right = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
// Clean the props
|
||||
props = cleanProps(props);
|
||||
|
||||
// Insert the props as library nodes
|
||||
LibraryNodes.batchInsert(props);
|
||||
return prop;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param props The properties to check
|
||||
* @param userId The userId trying to copy these properties to a library
|
||||
* Checks that every property can be copied out of the library that originated it by this user
|
||||
*/
|
||||
function assertSourceLibraryCopyPermission(props, method) {
|
||||
// Skip on the client
|
||||
if (method.isSimulation) return;
|
||||
|
||||
// Get all the library node ids that are sources for these properties
|
||||
const libraryNodeIds = [];
|
||||
props.forEach(prop => {
|
||||
if (prop.libraryNodeId) libraryNodeIds.push(prop.libraryNodeId);
|
||||
});
|
||||
if (!libraryNodeIds.length) return;
|
||||
|
||||
// Get the actual library Ids that each of these source nodes came from
|
||||
const sourceLibIds = new Set();
|
||||
LibraryNodes.find({
|
||||
_id: { $in: libraryNodeIds }
|
||||
}, {
|
||||
fields: { root: 1 }
|
||||
}).forEach(node => {
|
||||
sourceLibIds.add(node.root.id);
|
||||
});
|
||||
|
||||
// Assert copy permission on each of those libraries
|
||||
Libraries.find({
|
||||
_id: { $in: Array.from(sourceLibIds) }
|
||||
}, {
|
||||
fields: {
|
||||
name: 1,
|
||||
owner: 1,
|
||||
readers: 1,
|
||||
writers: 1,
|
||||
public: 1,
|
||||
readersCanCopy: 1,
|
||||
}
|
||||
}).forEach(lib => {
|
||||
try {
|
||||
assertCopyPermission(lib, method.userId);
|
||||
} catch (e) {
|
||||
throw new Meteor.Error('Copy permission denied',
|
||||
`One of the properties you are copying comes from ${lib.name}, which you do not have permission to copy from`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function cleanProps(props) {
|
||||
return props.map(prop => {
|
||||
let schema = LibraryNodes.simpleSchema(prop);
|
||||
return schema.clean(prop);
|
||||
});
|
||||
}
|
||||
|
||||
export default copyPropertyToLibrary;
|
||||
@@ -1,98 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import {
|
||||
getFilter,
|
||||
renewDocIds
|
||||
} from '/imports/api/parenting/parentingFunctions';
|
||||
import { rebuildNestedSets } from '/imports/api/parenting/parentingFunctions';
|
||||
var snackbar;
|
||||
if (Meteor.isClient) {
|
||||
snackbar = require(
|
||||
'/imports/client/ui/components/snackbars/SnackbarQueue'
|
||||
).snackbar
|
||||
}
|
||||
|
||||
const DUPLICATE_CHILDREN_LIMIT = 50;
|
||||
|
||||
const duplicateProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.duplicate',
|
||||
validate: new SimpleSchema({
|
||||
_id: {
|
||||
type: String,
|
||||
max: 32,
|
||||
}
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
if (!property) throw new Meteor.Error('not-found', 'The source property was not found');
|
||||
|
||||
const creature = getRootCreatureAncestor(property);
|
||||
|
||||
assertEditPermission(creature, this.userId);
|
||||
|
||||
// Renew the doc ID
|
||||
const randomSrc = DDP.randomStream('duplicateProperty');
|
||||
const propertyId = randomSrc.id();
|
||||
property._id = propertyId;
|
||||
|
||||
// Change the variableName so it isn't immediately overridden
|
||||
if (property.variableName) {
|
||||
property.variableName += 'Copy'
|
||||
}
|
||||
|
||||
// Get all the descendants
|
||||
const nodes = CreatureProperties.find({
|
||||
...getFilter.descendants(property),
|
||||
removed: { $ne: true },
|
||||
}, {
|
||||
limit: DUPLICATE_CHILDREN_LIMIT + 1,
|
||||
sort: { left: 1 },
|
||||
}).fetch();
|
||||
|
||||
// Alert the user if the limit was hit
|
||||
if (nodes.length > DUPLICATE_CHILDREN_LIMIT) {
|
||||
nodes.pop();
|
||||
if (Meteor.isClient) {
|
||||
snackbar({
|
||||
text: `Only the first ${DUPLICATE_CHILDREN_LIMIT} children were duplicated`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Give the docs new IDs without breaking internal references
|
||||
const allNodes = [property, ...nodes];
|
||||
renewDocIds({
|
||||
docArray: allNodes,
|
||||
idMap: {
|
||||
[_id]: propertyId,
|
||||
[propertyId]: propertyId,
|
||||
},
|
||||
});
|
||||
|
||||
// Order the root node
|
||||
property.left = Number.MAX_SAFE_INTEGER - 1;
|
||||
property.right = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
// Mark the sheet as needing recompute
|
||||
property.dirty = true;
|
||||
|
||||
// Insert the properties
|
||||
CreatureProperties.batchInsert(allNodes);
|
||||
|
||||
// Tree structure changed by inserts, reorder the tree
|
||||
rebuildNestedSets(CreatureProperties, property.root.id);
|
||||
|
||||
return propertyId;
|
||||
},
|
||||
});
|
||||
|
||||
export default duplicateProperty;
|
||||
@@ -1,51 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import { organizeDoc } from '/imports/api/parenting/organizeMethods';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import BUILT_IN_TAGS from '/imports/constants/BUILT_IN_TAGS';
|
||||
import getParentRefByTag from './getParentByTag';
|
||||
|
||||
// Equipping or unequipping an item will also change its parent
|
||||
const equipItem = new ValidatedMethod({
|
||||
name: 'creatureProperties.equip',
|
||||
validate({ _id, equipped }) {
|
||||
if (!_id) throw new Meteor.Error('No _id', '_id is required');
|
||||
if (equipped !== true && equipped !== false) {
|
||||
throw new Meteor.Error('No equipped', 'equipped is required to be true or false');
|
||||
}
|
||||
},
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, equipped }) {
|
||||
let item = CreatureProperties.findOne(_id);
|
||||
if (item.type !== 'item') throw new Meteor.Error('wrong type',
|
||||
'Equip and unequip can only be performed on items');
|
||||
let creature = getRootCreatureAncestor(item);
|
||||
assertEditPermission(creature, this.userId);
|
||||
CreatureProperties.update(_id, {
|
||||
$set: { equipped, dirty: true },
|
||||
}, {
|
||||
selector: { type: 'item' },
|
||||
});
|
||||
let tag = equipped ? BUILT_IN_TAGS.equipment : BUILT_IN_TAGS.carried;
|
||||
let parentRef = getParentRefByTag(creature._id, tag);
|
||||
if (!parentRef) parentRef = { id: creature._id, collection: 'creatures' };
|
||||
|
||||
organizeDoc.callAsync({
|
||||
docRef: {
|
||||
id: _id,
|
||||
collection: 'creatureProperties',
|
||||
},
|
||||
parentRef,
|
||||
order: Number.MAX_SAFE_INTEGER,
|
||||
skipRecompute: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default equipItem;
|
||||
@@ -1,47 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
|
||||
const flipToggle = new ValidatedMethod({
|
||||
name: 'creatureProperties.flipToggle',
|
||||
validate({ _id }) {
|
||||
if (!_id) throw new Meteor.Error('No _id', '_id is required');
|
||||
},
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
// Permission
|
||||
let property = CreatureProperties.findOne(_id, {
|
||||
fields: { type: 1, root: 1, enabled: 1, disabled: 1 }
|
||||
});
|
||||
if (property.type !== 'toggle') {
|
||||
throw new Meteor.Error('wrong property',
|
||||
'This method can only be applied to toggles');
|
||||
}
|
||||
if (!property.enabled && !property.disabled) {
|
||||
throw new Meteor.Error('Computed toggle',
|
||||
'Can\'t flip a toggle that is computed')
|
||||
}
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Invert the current value, disabled is the canonical store of value
|
||||
const currentValue = !property.disabled;
|
||||
CreatureProperties.update(_id, {
|
||||
$set: {
|
||||
enabled: !currentValue,
|
||||
disabled: currentValue,
|
||||
dirty: true,
|
||||
}
|
||||
}, {
|
||||
selector: { type: 'toggle' },
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default flipToggle;
|
||||
@@ -1,13 +0,0 @@
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { getFilter } from '/imports/api/parenting/parentingFunctions';
|
||||
|
||||
export default function getParentByTag(creatureId, tag) {
|
||||
return CreatureProperties.findOne({
|
||||
...getFilter.descendantsOfRoot(creatureId),
|
||||
removed: { $ne: true },
|
||||
inactive: { $ne: true },
|
||||
tags: tag,
|
||||
}, {
|
||||
sort: { left: 1 },
|
||||
});
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
import { getFilter } from '/imports/api/parenting/parentingFunctions';
|
||||
|
||||
export default function getSlotFillFilter({ slot, libraryIds }) {
|
||||
|
||||
if (!slot) throw 'Slot is required for getSlotFillFilter';
|
||||
if (!libraryIds) throw 'LibraryIds is required for getSlotFillFilter';
|
||||
|
||||
let filter = {
|
||||
fillSlots: true,
|
||||
removed: { $ne: true },
|
||||
$and: [],
|
||||
};
|
||||
if (libraryIds.length) {
|
||||
Object.assign(
|
||||
filter,
|
||||
getFilter.descendantsOfAllRoots(libraryIds)
|
||||
);
|
||||
}
|
||||
if (slot.slotType) {
|
||||
filter.$and.push({
|
||||
$or: [{
|
||||
type: slot.slotType
|
||||
}, {
|
||||
slotFillerType: slot.slotType,
|
||||
}]
|
||||
});
|
||||
} else if (slot.type === 'class') {
|
||||
const classLevelFilter = {
|
||||
type: 'classLevel',
|
||||
};
|
||||
const slotFillerFilter = {
|
||||
slotFillerType: 'classLevel',
|
||||
};
|
||||
|
||||
// Match variable name or tags
|
||||
if (slot.variableName) {
|
||||
classLevelFilter.variableName = slot.variableName;
|
||||
slotFillerFilter.libraryTags = slot.variableName;
|
||||
}
|
||||
|
||||
// Only search for levels the class needs
|
||||
if (slot.missingLevels && slot.missingLevels.length) {
|
||||
classLevelFilter.level = { $in: slot.missingLevels };
|
||||
slotFillerFilter['cache.node.level'] = { $in: slot.missingLevels };
|
||||
} else {
|
||||
classLevelFilter.level = { $gt: slot.level || 0 };
|
||||
slotFillerFilter['cache.node.level'] = { $gt: slot.level || 0 };
|
||||
}
|
||||
|
||||
filter.$and.push({
|
||||
$or: [classLevelFilter, slotFillerFilter]
|
||||
});
|
||||
}
|
||||
let tagsOr = [];
|
||||
let tagsNin = [];
|
||||
if (slot.slotTags && slot.slotTags.length) {
|
||||
tagsOr.push({ libraryTags: { $all: slot.slotTags } });
|
||||
}
|
||||
if (slot.extraTags && slot.extraTags.length) {
|
||||
slot.extraTags.forEach(extra => {
|
||||
if (!extra.tags || !extra.tags.length) return;
|
||||
if (extra.operation === 'OR') {
|
||||
tagsOr.push({ libraryTags: { $all: extra.tags } });
|
||||
} else if (extra.operation === 'NOT') {
|
||||
tagsNin.push(...extra.tags);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (tagsOr.length) {
|
||||
filter.$or = tagsOr;
|
||||
}
|
||||
if (tagsNin.length) {
|
||||
filter.$and.push({ libraryTags: { $nin: tagsNin } });
|
||||
}
|
||||
if (!filter.$and.length) {
|
||||
delete filter.$and;
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import getSlotFillFilter from '/imports/api/creature/creatureProperties/methods/getSlotFillFilter';
|
||||
|
||||
describe('Slot fill filter', function () {
|
||||
|
||||
it('Gives error if arguments aren\'t provided', function () {
|
||||
assert.throws(
|
||||
() => getSlotFillFilter(undefined),
|
||||
null, null, 'Passing undefined should give an error'
|
||||
);
|
||||
assert.throws(
|
||||
() => getSlotFillFilter({
|
||||
slot: { slotTags: ['tag1'] },
|
||||
}),
|
||||
null, null, 'Passing no libraryIds should give an error'
|
||||
);
|
||||
assert.throws(
|
||||
() => getSlotFillFilter({
|
||||
libraryIds: ['libraryId1'],
|
||||
}),
|
||||
null, null, 'Passing no slot should give an error'
|
||||
);
|
||||
});
|
||||
|
||||
it('filters using basic slot tags', function () {
|
||||
const filter = getSlotFillFilter({
|
||||
slot: {
|
||||
slotTags: ['tag1', 'tag2']
|
||||
},
|
||||
libraryIds: ['libraryId1', 'libraryId2'],
|
||||
});
|
||||
assert.deepStrictEqual(filter, {
|
||||
$or: [{
|
||||
libraryTags: { $all: ['tag1', 'tag2'] }
|
||||
}],
|
||||
'root.id': { $in: ['libraryId1', 'libraryId2'] },
|
||||
removed: { $ne: true },
|
||||
fillSlots: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('filters using slot type', function () {
|
||||
const filter = getSlotFillFilter({
|
||||
slot: {
|
||||
slotTags: ['tag1', 'tag2'],
|
||||
slotType: 'feature',
|
||||
},
|
||||
libraryIds: ['libraryId1', 'libraryId2']
|
||||
});
|
||||
assert.deepStrictEqual(filter.$and, [{
|
||||
$or: [{
|
||||
type: 'feature'
|
||||
}, {
|
||||
slotFillerType: 'feature',
|
||||
}],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('filters using extra tags', function () {
|
||||
const filter = getSlotFillFilter({
|
||||
slot: {
|
||||
slotTags: ['tag1', 'tag2'],
|
||||
extraTags: [
|
||||
{ operation: 'OR', tags: ['tag3', 'tag4'] },
|
||||
{ operation: 'NOT', tags: ['tag5', 'tag6'] },
|
||||
{ operation: 'NOT', tags: ['tag7', 'tag8'] },
|
||||
],
|
||||
},
|
||||
libraryIds: ['libraryId1', 'libraryId2'],
|
||||
});
|
||||
assert.deepStrictEqual(filter, {
|
||||
$or: [
|
||||
{ libraryTags: { $all: ['tag1', 'tag2'] } },
|
||||
{ libraryTags: { $all: ['tag3', 'tag4'] } },
|
||||
],
|
||||
$and: [
|
||||
{ libraryTags: { $nin: ['tag5', 'tag6', 'tag7', 'tag8'] } },
|
||||
],
|
||||
'root.id': { $in: ['libraryId1', 'libraryId2'] },
|
||||
removed: { $ne: true },
|
||||
fillSlots: true,
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,13 +0,0 @@
|
||||
import '/imports/api/creature/creatureProperties/methods/adjustQuantity';
|
||||
import '/imports/api/creature/creatureProperties/methods/copyPropertyToLibrary';
|
||||
import '/imports/api/creature/creatureProperties/methods/duplicateProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/equipItem';
|
||||
import '/imports/api/creature/creatureProperties/methods/insertProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/insertPropertyFromLibraryNode';
|
||||
import '/imports/api/creature/creatureProperties/methods/pullFromProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/pushToProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/restoreProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/selectAmmoItem';
|
||||
import '/imports/api/creature/creatureProperties/methods/softRemoveProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/updateCreatureProperty';
|
||||
import '/imports/api/creature/creatureProperties/methods/flipToggle';
|
||||
@@ -1,126 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import { fetchDocByRef, rebuildNestedSets } from '/imports/api/parenting/parentingFunctions';
|
||||
import getParentRefByTag from './getParentByTag';
|
||||
import { RefSchema } from '/imports/api/parenting/ChildSchema';
|
||||
|
||||
const insertProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.insert',
|
||||
validate: new SimpleSchema({
|
||||
creatureProperty: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
parentRef: RefSchema,
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ creatureProperty, parentRef }) {
|
||||
let rootCreature;
|
||||
const parentDoc = fetchDocByRef(parentRef);
|
||||
|
||||
// Check permission to edit
|
||||
if (parentRef.collection === 'creatures') {
|
||||
rootCreature = parentDoc;
|
||||
} else if (parentRef.collection === 'creatureProperties') {
|
||||
rootCreature = getRootCreatureAncestor(parentDoc);
|
||||
creatureProperty.parentId = parentDoc._id;
|
||||
} else {
|
||||
throw `${parentRef.collection} is not a valid parent collection`
|
||||
}
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
creatureProperty.root = { collection: 'creatures', id: rootCreature._id };
|
||||
|
||||
return insertPropertyWork(creatureProperty);
|
||||
},
|
||||
});
|
||||
|
||||
const insertPropertyAsChildOfTag = new ValidatedMethod({
|
||||
name: 'creatureProperties.insertAsChildOfTag',
|
||||
validate: new SimpleSchema({
|
||||
creatureProperty: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
creatureId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
tag: {
|
||||
type: String,
|
||||
max: 20,
|
||||
},
|
||||
tagDefaultName: {
|
||||
type: String,
|
||||
max: 20,
|
||||
optional: true,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ creatureProperty, creatureId, tag, tagDefaultName }) {
|
||||
let parentRef = getParentRefByTag(creatureId, tag);
|
||||
let insertFolderFirst = false;
|
||||
|
||||
if (!parentRef) {
|
||||
// Use the creature as the parent and mark that we need to insert the folder first later
|
||||
insertFolderFirst = true;
|
||||
parentRef = { id: creatureId, collection: 'creatures' };
|
||||
}
|
||||
|
||||
// Check permission to edit
|
||||
let rootCreature;
|
||||
const parentDoc = fetchDocByRef(parentRef);
|
||||
if (parentRef.collection === 'creatures') {
|
||||
rootCreature = parentDoc;
|
||||
} else if (parentRef.collection === 'creatureProperties') {
|
||||
rootCreature = getRootCreatureAncestor(parentDoc);
|
||||
} else {
|
||||
throw `${parentRef.collection} is not a valid parent collection`
|
||||
}
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
const root = { collection: 'creatures', id: rootCreature._id };
|
||||
|
||||
// Add the folder first if we need to
|
||||
if (insertFolderFirst) {
|
||||
let id = CreatureProperties.insert({
|
||||
type: 'folder',
|
||||
name: tagDefaultName || (tag.charAt(0).toUpperCase() + tag.slice(1)),
|
||||
tags: [tag],
|
||||
// parentId: undefined,
|
||||
root,
|
||||
});
|
||||
// Make the folder our new parent
|
||||
parentRef = { id, collection: 'creatureProperties' };
|
||||
}
|
||||
|
||||
creatureProperty.root = root;
|
||||
creatureProperty.parentId = parentRef.id;
|
||||
|
||||
return insertPropertyWork(creatureProperty);
|
||||
},
|
||||
});
|
||||
|
||||
export function insertPropertyWork(property) {
|
||||
delete property._id;
|
||||
property.dirty = true;
|
||||
let _id = CreatureProperties.insert(property);
|
||||
// Tree structure changed by insert, reorder the tree
|
||||
rebuildNestedSets(CreatureProperties, property.root.id);
|
||||
return _id;
|
||||
}
|
||||
|
||||
export default insertProperty;
|
||||
export { insertPropertyAsChildOfTag };
|
||||
@@ -1,209 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import LibraryNodes from '/imports/api/library/LibraryNodes';
|
||||
import { RefSchema } from '/imports/api/parenting/ChildSchema';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import {
|
||||
renewDocIds,
|
||||
fetchDocByRef,
|
||||
rebuildNestedSets,
|
||||
getFilter
|
||||
} from '/imports/api/parenting/parentingFunctions';
|
||||
import { union } from 'lodash';
|
||||
|
||||
const insertPropertyFromLibraryNode = new ValidatedMethod({
|
||||
name: 'creatureProperties.insertPropertyFromLibraryNode',
|
||||
validate: new SimpleSchema({
|
||||
nodeIds: {
|
||||
type: Array,
|
||||
max: 20,
|
||||
},
|
||||
'nodeIds.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
parentRef: {
|
||||
type: RefSchema,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ nodeIds, parentRef }) {
|
||||
// get the new ancestry for the properties
|
||||
const parentDoc = fetchDocByRef(parentRef);
|
||||
|
||||
// Check permission to edit
|
||||
let rootCreature;
|
||||
if (parentRef.collection === 'creatures') {
|
||||
rootCreature = parentDoc;
|
||||
} else if (parentRef.collection === 'creatureProperties') {
|
||||
rootCreature = getRootCreatureAncestor(parentDoc);
|
||||
} else {
|
||||
throw `${parentRef.collection} is not a valid parent collection`
|
||||
}
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
const root = { collection: 'creatures', id: rootCreature._id };
|
||||
const parentId = parentRef.id;
|
||||
|
||||
let node;
|
||||
nodeIds.forEach(nodeId => {
|
||||
node = insertPropertyFromNode(nodeId, root, parentId);
|
||||
});
|
||||
|
||||
// Tree structure changed by inserts, reorder the tree
|
||||
rebuildNestedSets(CreatureProperties, rootCreature._id);
|
||||
|
||||
// get one of the root inserted docs
|
||||
const lastInsertedId = node?._id;
|
||||
return lastInsertedId;
|
||||
},
|
||||
});
|
||||
|
||||
function insertPropertyFromNode(nodeId, root, parentId) {
|
||||
// Fetch the library node and its descendants, provided they have not been
|
||||
// removed
|
||||
let node = LibraryNodes.findOne({
|
||||
_id: nodeId,
|
||||
removed: { $ne: true },
|
||||
});
|
||||
if (!node) {
|
||||
if (Meteor.isClient) return {};
|
||||
else {
|
||||
throw new Meteor.Error(
|
||||
'Insert property from library failed',
|
||||
`No library document with id '${nodeId}' was found`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let nodes = LibraryNodes.find({
|
||||
...getFilter.descendants(node),
|
||||
removed: { $ne: true },
|
||||
}).fetch();
|
||||
|
||||
// The root node is first in the array of nodes
|
||||
// It must get the first generated ID to prevent flickering
|
||||
nodes = [node, ...nodes];
|
||||
|
||||
// Convert all references into actual nodes
|
||||
nodes = reifyNodeReferences(nodes);
|
||||
// Refetch the root node, it might have been reified
|
||||
node = nodes[0] || node;
|
||||
|
||||
// set libraryNodeIds
|
||||
storeLibraryNodeReferences(nodes);
|
||||
|
||||
// Give the docs new IDs without breaking internal references
|
||||
renewDocIds({
|
||||
docArray: nodes,
|
||||
collectionMap: { 'libraryNodes': 'creatureProperties' }
|
||||
});
|
||||
|
||||
// Mark root node as dirty
|
||||
node.dirty = true;
|
||||
|
||||
// Move the root node to the end of the order
|
||||
node.left = Number.MAX_SAFE_INTEGER;
|
||||
|
||||
// Insert the creature properties
|
||||
CreatureProperties.batchInsert(nodes);
|
||||
return node;
|
||||
}
|
||||
|
||||
export function storeLibraryNodeReferences(nodes) {
|
||||
nodes.forEach(node => {
|
||||
if (node.libraryNodeId) return;
|
||||
node.libraryNodeId = node._id;
|
||||
});
|
||||
}
|
||||
|
||||
// Covert node references into actual nodes
|
||||
// TODO: check permissions for each library a reference node references
|
||||
export function reifyNodeReferences(nodes, visitedRefs = new Set(), depth = 0) {
|
||||
depth += 1;
|
||||
// New nodes added this function
|
||||
let newNodes = [];
|
||||
|
||||
// Filter out the reference nodes we replace
|
||||
let resultingNodes = nodes.filter(node => {
|
||||
// This isn't a reference node, continue as normal
|
||||
if (node.type !== 'reference') return true;
|
||||
|
||||
// We have gone too deep, keep the reference node as an error
|
||||
if (depth >= 10) {
|
||||
if (Meteor.isClient) console.warn('Reference depth limit exceeded');
|
||||
node.cache = { error: 'Reference depth limit exceeded' };
|
||||
return true;
|
||||
}
|
||||
|
||||
let referencedNode
|
||||
try {
|
||||
referencedNode = fetchDocByRef(node.ref);
|
||||
referencedNode.tags = union(node.tags, referencedNode.tags);
|
||||
// We are definitely replacing this node, so add it to the list
|
||||
visitedRefs.add(node._id);
|
||||
} catch (e) {
|
||||
node.cache = { error: e.reason || e.message || e.toString() };
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get all the descendants of the referenced node
|
||||
let descendants = LibraryNodes.find({
|
||||
...getFilter.descendants(referencedNode),
|
||||
removed: { $ne: true },
|
||||
}, {
|
||||
sort: { left: 1 },
|
||||
}).fetch();
|
||||
|
||||
// We are adding the referenced node and its descendants
|
||||
let addedNodes = [referencedNode, ...descendants];
|
||||
|
||||
// Filter all the looped references
|
||||
addedNodes = addedNodes.filter(addedNode => {
|
||||
// Add all non-reference nodes
|
||||
if (addedNode.type !== 'reference') {
|
||||
return true;
|
||||
}
|
||||
// If this exact reference has already been resolved before, filter it out
|
||||
if (visitedRefs.has(addedNode._id)) {
|
||||
return false;
|
||||
} else {
|
||||
// Otherwise mark it as visited, and keep it
|
||||
visitedRefs.add(addedNode._id);
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
// Before renewing Ids make sure the library node reference is stored
|
||||
storeLibraryNodeReferences(addedNodes);
|
||||
|
||||
// Give the new referenced sub-tree new ids
|
||||
// The referenced node must get the id of the ref node so that the
|
||||
// descendants of the ref node keep their ancestry intact
|
||||
renewDocIds({
|
||||
docArray: addedNodes,
|
||||
idMap: { [referencedNode._id]: node._id },
|
||||
});
|
||||
|
||||
// Reify the subtree as well with recursion
|
||||
addedNodes = reifyNodeReferences(addedNodes, visitedRefs, depth);
|
||||
|
||||
// Store the new nodes from this inner loop without altering the array
|
||||
// we are looping over
|
||||
newNodes.push(...addedNodes);
|
||||
});
|
||||
|
||||
// We are done filtering the array, we can add the new nodes to it
|
||||
resultingNodes.push(...newNodes);
|
||||
|
||||
return resultingNodes;
|
||||
}
|
||||
|
||||
export default insertPropertyFromLibraryNode;
|
||||
@@ -1,32 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
|
||||
const pullFromProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.pull',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, itemId }) {
|
||||
// Permissions
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Do work
|
||||
CreatureProperties.update(_id, {
|
||||
$pull: { [path.join('.')]: { _id: itemId } },
|
||||
$set: { dirty: true }
|
||||
}, {
|
||||
selector: { type: property.type },
|
||||
getAutoValues: false,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default pullFromProperty;
|
||||
@@ -1,49 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import { get } from 'lodash';
|
||||
|
||||
const pushToProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.push',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, value }) {
|
||||
// Permissions
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
let joinedPath = path.join('.');
|
||||
|
||||
// Respect maxCount
|
||||
let schema = CreatureProperties.simpleSchema(property);
|
||||
let maxCount = schema.get(joinedPath, 'maxCount');
|
||||
|
||||
if (Number.isFinite(maxCount)) {
|
||||
let array = get(property, path);
|
||||
let currentCount = array ? array.length : 0;
|
||||
if (currentCount >= maxCount) {
|
||||
throw new Meteor.Error(
|
||||
'Array is full',
|
||||
`Cannot have more than ${maxCount} values`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Do work
|
||||
CreatureProperties.update(_id, {
|
||||
$push: { [joinedPath]: value },
|
||||
$set: { dirty: true },
|
||||
}, {
|
||||
selector: { type: property.type },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export default pushToProperty;
|
||||
@@ -1,30 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import { restore } from '/imports/api/parenting/softRemove';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
|
||||
const restoreProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.restore',
|
||||
validate: new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
// Permissions
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Do work
|
||||
restore(CreatureProperties, property, { $set: { dirty: true } });
|
||||
}
|
||||
});
|
||||
|
||||
export default restoreProperty;
|
||||
@@ -1,46 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
|
||||
const selectAmmoItem = new ValidatedMethod({
|
||||
name: 'creatureProperties.selectAmmoItem',
|
||||
validate: new SimpleSchema({
|
||||
actionId: SimpleSchema.RegEx.Id,
|
||||
itemId: SimpleSchema.RegEx.Id,
|
||||
itemConsumedIndex: Number,
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ actionId, itemId, itemConsumedIndex }) {
|
||||
// Permissions
|
||||
let action = CreatureProperties.findOne(actionId);
|
||||
let rootCreature = getRootCreatureAncestor(action);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Check that this index has a document to edit
|
||||
let itemConsumed = action.resources.itemsConsumed[itemConsumedIndex];
|
||||
if (!itemConsumed) {
|
||||
throw new Meteor.Error('Resouce not found',
|
||||
'Could not set ammo, because the ammo document was not found');
|
||||
}
|
||||
let itemToLink = CreatureProperties.findOne(itemId);
|
||||
if (!itemToLink) {
|
||||
throw new Meteor.Error('Item not found',
|
||||
'Could not set ammo: the item was not found');
|
||||
}
|
||||
let path = `resources.itemsConsumed.${itemConsumedIndex}.itemId`;
|
||||
CreatureProperties.update(actionId, {
|
||||
$set: { [path]: itemId, dirty: true }
|
||||
}, {
|
||||
selector: action,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default selectAmmoItem;
|
||||
@@ -1,30 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import { softRemove } from '/imports/api/parenting/softRemove';
|
||||
import getRootCreatureAncestor from '/imports/api/creature/creatureProperties/getRootCreatureAncestor';
|
||||
|
||||
const softRemoveProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.softRemove',
|
||||
validate: new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
// Permissions
|
||||
let property = CreatureProperties.findOne(_id);
|
||||
let rootCreature = getRootCreatureAncestor(property);
|
||||
assertEditPermission(rootCreature, this.userId);
|
||||
|
||||
// Do work
|
||||
softRemove(CreatureProperties, property);
|
||||
}
|
||||
});
|
||||
|
||||
export default softRemoveProperty;
|
||||
@@ -1,47 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import { assertDocEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
|
||||
const updateCreatureProperty = new ValidatedMethod({
|
||||
name: 'creatureProperties.update',
|
||||
validate({ _id, path }) {
|
||||
if (!_id) throw new Meteor.Error('No _id', '_id is required');
|
||||
// We cannot change these fields with a simple update
|
||||
switch (path[0]) {
|
||||
case 'type':
|
||||
case 'root':
|
||||
case 'left':
|
||||
case 'right':
|
||||
case 'parentId':
|
||||
throw new Meteor.Error('Permission denied',
|
||||
'This property can\'t be updated directly');
|
||||
}
|
||||
},
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 12,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, value }) {
|
||||
// Permission
|
||||
const property = CreatureProperties.findOne(_id, {
|
||||
fields: { type: 1, root: 1 }
|
||||
});
|
||||
assertDocEditPermission(property, this.userId);
|
||||
|
||||
const pathString = path.join('.');
|
||||
let modifier;
|
||||
// unset empty values
|
||||
if (value === null || value === undefined) {
|
||||
modifier = { $unset: { [pathString]: 1 }, $set: { dirty: true } };
|
||||
} else {
|
||||
modifier = { $set: { [pathString]: value, dirty: true } };
|
||||
}
|
||||
CreatureProperties.update(_id, modifier, {
|
||||
selector: { type: property.type },
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default updateCreatureProperty;
|
||||
@@ -1,103 +0,0 @@
|
||||
import { getSingleProperty } from '/imports/api/engine/loadCreatures';
|
||||
import ParseNode from '/imports/parser/parseTree/ParseNode';
|
||||
import array from '/imports/parser/parseTree/array';
|
||||
import constant, { isFiniteNode } from '/imports/parser/parseTree/constant';
|
||||
|
||||
//set up the collection for creature variables
|
||||
const CreatureVariables = new Mongo.Collection('creatureVariables');
|
||||
|
||||
// Unique index on _creatureId
|
||||
if (Meteor.isServer) {
|
||||
CreatureVariables._ensureIndex({ _creatureId: 1 }, { unique: true })
|
||||
}
|
||||
|
||||
/** No schema because the structure isn't known until compute time
|
||||
* Expect documents to look like:
|
||||
* {
|
||||
* _id: "nE8Ngd6K4L4jSxLY2",
|
||||
* _creatureId: "nE8Ngd6K4L4jSxLY2", // indexed reference to the creature
|
||||
* explicitlyDefinedVariableName: {...some creatureProperty},
|
||||
* // Must be found in CreatureProperties before using:
|
||||
* linkedProperty: { _propId: "nE8Ngd6K1234SxLY2" }
|
||||
* implicitVariableName: {value: 10},
|
||||
* undefinedVariableName: {},
|
||||
* }
|
||||
* Where top level fields that don't start with `_` are variables on the sheet
|
||||
**/
|
||||
|
||||
/**
|
||||
* Get the property from the given scope, respecting properties that are just a link to the actual
|
||||
* property document
|
||||
*/
|
||||
export function getFromScope(name: string, scope) {
|
||||
let value = scope?.[name];
|
||||
if (value?._propId) {
|
||||
const [propId, rowIdentifier, rowNumber] = value._propId.split('_');
|
||||
value = getSingleProperty(scope._creatureId, propId);
|
||||
if (rowIdentifier === 'row' && value?.type === 'pointBuy') {
|
||||
value = value.values[rowNumber];
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function getNumberFromScope(name, scope) {
|
||||
const parseNode = getParseNodeFromScope(name, scope);
|
||||
if (!parseNode || !isFiniteNode(parseNode)) {
|
||||
return undefined;
|
||||
}
|
||||
return parseNode.value;
|
||||
}
|
||||
|
||||
export async function getConstantValueFromScope(
|
||||
name, scope
|
||||
) {
|
||||
const parseNode = getParseNodeFromScope(name, scope);
|
||||
if (!parseNode) return;
|
||||
if (parseNode.parseType !== 'constant') return;
|
||||
return parseNode.value;
|
||||
}
|
||||
|
||||
export function getParseNodeFromScope(name, scope): ParseNode | undefined {
|
||||
let value = getFromScope(name, scope);
|
||||
if (!value) return;
|
||||
let valueType = getType(value);
|
||||
// Iterate into object.values
|
||||
while (valueType === 'object') {
|
||||
// Prefer the valueNode over the value
|
||||
if (value.valueNode) {
|
||||
value = value.valueNode;
|
||||
} else {
|
||||
value = value.value;
|
||||
}
|
||||
valueType = getType(value);
|
||||
}
|
||||
// Return a discovered parse node
|
||||
if (valueType === 'parseNode') {
|
||||
return value;
|
||||
}
|
||||
// Return a parse node based on the constant type returned
|
||||
if (valueType === 'string' || valueType === 'number' || valueType === 'boolean') {
|
||||
return constant.create({ value });
|
||||
}
|
||||
// Return a parser array
|
||||
if (valueType === 'array') {
|
||||
// If the first value is a parse node, assume all the values are
|
||||
if (getType(value[0]) === 'parseNode') {
|
||||
return array.create({
|
||||
values: value,
|
||||
});
|
||||
}
|
||||
// Create the array from js primitives instead
|
||||
return array.fromConstantArray(value);
|
||||
}
|
||||
}
|
||||
|
||||
function getType(val) {
|
||||
if (!val) return typeof val;
|
||||
if (Array.isArray(val)) return 'array';
|
||||
if (val.parseType) return 'parseNode';
|
||||
return typeof val;
|
||||
}
|
||||
|
||||
export default CreatureVariables;
|
||||
@@ -1,194 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import ColorSchema from '/imports/api/properties/subSchemas/ColorSchema';
|
||||
import SharingSchema from '/imports/api/sharing/SharingSchema';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
import { InferType, TypedSimpleSchema } from '/imports/api/utility/TypedSimpleSchema';
|
||||
import type { Simplify } from 'type-fest';
|
||||
|
||||
const CreatureSettingsSchema = TypedSimpleSchema.from({
|
||||
//slowed down by carrying too much?
|
||||
useVariantEncumbrance: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
//hide spellcasting tab
|
||||
hideSpellcasting: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
//hide rest buttons
|
||||
hideRestButtons: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Swap around the modifier and stat
|
||||
swapStatAndModifier: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Hide all the unused stats
|
||||
hideUnusedStats: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Show the tree tab
|
||||
showTreeTab: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Hide the spells tab
|
||||
hideSpellsTab: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Hide calculation errors
|
||||
hideCalculationErrors: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// How much each hitDice resets on a long rest
|
||||
hitDiceResetMultiplier: {
|
||||
type: Number,
|
||||
optional: true,
|
||||
min: 0,
|
||||
max: 1,
|
||||
},
|
||||
discordWebhook: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.url,
|
||||
},
|
||||
});
|
||||
|
||||
const CreatureSchema = TypedSimpleSchema.from({
|
||||
// Strings
|
||||
name: {
|
||||
type: String,
|
||||
defaultValue: '',
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
alignment: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
gender: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
picture: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.url,
|
||||
},
|
||||
avatarPicture: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.url,
|
||||
},
|
||||
|
||||
// Libraries
|
||||
allowedLibraries: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
maxCount: 100,
|
||||
},
|
||||
'allowedLibraries.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
allowedLibraryCollections: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
maxCount: 100,
|
||||
},
|
||||
'allowedLibraryCollections.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
|
||||
// Stats that are computed and denormalised outside of recomputation
|
||||
denormalizedStats: {
|
||||
type: Object,
|
||||
defaultValue: {},
|
||||
},
|
||||
// Sum of all XP gained by this character
|
||||
'denormalizedStats.xp': {
|
||||
type: SimpleSchema.Integer,
|
||||
defaultValue: 0,
|
||||
},
|
||||
// Sum of all levels granted by milestone XP
|
||||
'denormalizedStats.milestoneLevels': {
|
||||
type: SimpleSchema.Integer,
|
||||
defaultValue: 0,
|
||||
},
|
||||
propCount: {
|
||||
type: SimpleSchema.Integer,
|
||||
defaultValue: 0,
|
||||
},
|
||||
// Does the character need a recompute?
|
||||
dirty: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// Version of computation engine that was last used to compute this creature
|
||||
computeVersion: {
|
||||
type: String,
|
||||
optional: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
defaultValue: 'pc',
|
||||
allowedValues: ['pc', 'npc', 'monster'],
|
||||
},
|
||||
computeErrors: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'computeErrors.$': {
|
||||
type: Object,
|
||||
},
|
||||
'computeErrors.$.type': {
|
||||
type: String,
|
||||
},
|
||||
'computeErrors.$.details': {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
optional: true,
|
||||
},
|
||||
lastComputedAt: {
|
||||
type: Date,
|
||||
optional: true,
|
||||
},
|
||||
|
||||
// Tabletop
|
||||
tabletopId: {
|
||||
index: 1,
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
},
|
||||
initiativeRoll: {
|
||||
type: SimpleSchema.Integer,
|
||||
optional: true,
|
||||
},
|
||||
|
||||
// Settings
|
||||
settings: {
|
||||
type: CreatureSettingsSchema,
|
||||
defaultValue: {},
|
||||
},
|
||||
})
|
||||
.extend(ColorSchema)
|
||||
.extend(SharingSchema);
|
||||
|
||||
export type Creature = Simplify<{ _id: string } & InferType<typeof CreatureSchema>>;
|
||||
|
||||
//set up the collection for creatures
|
||||
const Creatures = new Mongo.Collection<Creature>('creatures');
|
||||
Creatures.attachSchema(CreatureSchema);
|
||||
|
||||
export default Creatures;
|
||||
export { CreatureSchema };
|
||||
@@ -1,29 +0,0 @@
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import {
|
||||
assertEditPermission as editPermission,
|
||||
assertViewPermission as viewPermission,
|
||||
assertOwnership as ownership
|
||||
} from '/imports/api/sharing/sharingPermissions';
|
||||
|
||||
function getCreature(creature, fields) {
|
||||
if (typeof creature === 'string') {
|
||||
return Creatures.findOne(creature, { fields });
|
||||
} else {
|
||||
return creature;
|
||||
}
|
||||
}
|
||||
|
||||
export function assertOwnership(creature, userId) {
|
||||
creature = getCreature(creature, { owner: 1 });
|
||||
ownership(creature, userId);
|
||||
}
|
||||
|
||||
export function assertEditPermission(creature, userId) {
|
||||
creature = getCreature(creature, { owner: 1, writers: 1 });
|
||||
editPermission(creature, userId);
|
||||
}
|
||||
|
||||
export function assertViewPermission(creature, userId) {
|
||||
creature = getCreature(creature, { owner: 1, readers: 1, writers: 1, public: 1 });
|
||||
viewPermission(creature, userId);
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
import BUILT_IN_TAGS from '/imports/constants/BUILT_IN_TAGS';
|
||||
|
||||
export default function defaultCharacterProperties(creatureId) {
|
||||
if (!creatureId) throw 'creatureId is required';
|
||||
const creatureRef = { collection: 'creatures', id: creatureId };
|
||||
let randomSrc = DDP.randomStream('defaultProperties');
|
||||
const inventoryId = randomSrc.id();
|
||||
return [
|
||||
{
|
||||
type: 'propertySlot',
|
||||
name: 'Ruleset',
|
||||
description: { text: 'Choose a starting point for your character, this will define the basic setup of your character sheet. Without a base ruleset, your sheet will be empty.' },
|
||||
slotTags: ['base'],
|
||||
tags: [],
|
||||
quantityExpected: { calculation: '1' },
|
||||
hideWhenFull: true,
|
||||
spaceLeft: 1,
|
||||
totalFilled: 0,
|
||||
left: 1,
|
||||
right: 2,
|
||||
parentId: creatureId,
|
||||
root: creatureRef,
|
||||
}, {
|
||||
_id: inventoryId,
|
||||
type: 'folder',
|
||||
name: 'Inventory',
|
||||
tags: [BUILT_IN_TAGS.inventory],
|
||||
left: 3,
|
||||
right: 8,
|
||||
parentId: creatureId,
|
||||
root: creatureRef,
|
||||
}, {
|
||||
type: 'folder',
|
||||
name: 'Equipment',
|
||||
tags: [BUILT_IN_TAGS.equipment],
|
||||
left: 4,
|
||||
right: 5,
|
||||
parentId: inventoryId,
|
||||
root: creatureRef,
|
||||
}, {
|
||||
type: 'folder',
|
||||
name: 'Carried',
|
||||
tags: [BUILT_IN_TAGS.carried],
|
||||
left: 6,
|
||||
right: 7,
|
||||
parent: inventoryId,
|
||||
root: creatureRef,
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import getSlug from 'speakingurl';
|
||||
|
||||
export default function getCreatureUrlName({name}){
|
||||
return getSlug(name, {maintainCase: true}) || '-';
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { getUserTier } from '/imports/api/users/patreon/tiers';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
|
||||
export default function assertHasCharactersSlots(userId) {
|
||||
if (characterSlotsRemaining(userId) <= 0) {
|
||||
throw new Meteor.Error('characterSlotLimit',
|
||||
'No character slots left')
|
||||
}
|
||||
}
|
||||
|
||||
export function characterSlotsRemaining(userId) {
|
||||
let tier = getUserTier(userId);
|
||||
const currentCharacterCount = Creatures.find({
|
||||
owner: userId,
|
||||
}, {
|
||||
fields: { _id: 1 },
|
||||
}).count();
|
||||
if (tier.characterSlots === -1) {
|
||||
return Number.POSITIVE_INFINITY;
|
||||
}
|
||||
return tier.characterSlots - currentCharacterCount;
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import simpleSchemaMixin from '/imports/api/creature/mixins/simpleSchemaMixin';
|
||||
|
||||
const changeAllowedLibraries = new ValidatedMethod({
|
||||
name: 'creatures.changeAllowedLibraries',
|
||||
mixins: [RateLimiterMixin, simpleSchemaMixin],
|
||||
schema: new SimpleSchema({
|
||||
_id: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
allowedLibraries: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
maxCount: 100,
|
||||
},
|
||||
'allowedLibraries.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
allowedLibraryCollections: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
maxCount: 100,
|
||||
},
|
||||
'allowedLibraryCollections.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}),
|
||||
rateLimit: {
|
||||
numRequests: 10,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, allowedLibraries, allowedLibraryCollections }) {
|
||||
let creature = Creatures.findOne(_id);
|
||||
assertEditPermission(creature, this.userId);
|
||||
let $set;
|
||||
if (allowedLibraries) {
|
||||
$set = { allowedLibraries }
|
||||
}
|
||||
if (allowedLibraryCollections) {
|
||||
if (!$set) $set = {};
|
||||
$set.allowedLibraryCollections = allowedLibraryCollections;
|
||||
}
|
||||
if (!$set) return;
|
||||
Creatures.update(_id, { $set });
|
||||
},
|
||||
});
|
||||
|
||||
const toggleAllUserLibraries = new ValidatedMethod({
|
||||
name: 'creatures.removeLibraryLimits',
|
||||
mixins: [RateLimiterMixin, simpleSchemaMixin],
|
||||
schema: new SimpleSchema({
|
||||
_id: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
value: {
|
||||
type: Boolean,
|
||||
},
|
||||
}),
|
||||
rateLimit: {
|
||||
numRequests: 10,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, value }) {
|
||||
if (value) {
|
||||
Creatures.update(_id, {
|
||||
$unset: {
|
||||
allowedLibraryCollections: 1,
|
||||
allowedLibraries: 1,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
Creatures.update(_id, {
|
||||
$set: {
|
||||
allowedLibraryCollections: [],
|
||||
allowedLibraries: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export { changeAllowedLibraries, toggleAllUserLibraries };
|
||||
@@ -1,107 +0,0 @@
|
||||
import SCHEMA_VERSION from '/imports/constants/SCHEMA_VERSION';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import CreatureLogs from '/imports/api/creature/log/CreatureLogs';
|
||||
import Experiences from '/imports/api/creature/experience/Experiences';
|
||||
import { removeCreatureWork } from '/imports/api/creature/creatures/methods/removeCreature';
|
||||
import assertHasCharactersSlots from '/imports/api/creature/creatures/methods/assertHasCharacterSlots';
|
||||
import verifyArchiveSafety from '/imports/api/creature/archive/methods/verifyArchiveSafety';
|
||||
|
||||
let migrateApiCreature;
|
||||
if (Meteor.isServer) {
|
||||
migrateApiCreature = require('/imports/migrations/apiCreature/migrateApiCreature.js').default;
|
||||
}
|
||||
|
||||
function importApiCreature(apiCreature, userId) {
|
||||
const apiVersion = apiCreature.meta?.schemaVersion ?? 2;
|
||||
const creature = apiCreature.creatures[0];
|
||||
const creatureId = creature._id;
|
||||
if (SCHEMA_VERSION < apiVersion) {
|
||||
throw new Meteor.Error('Incompatible',
|
||||
'The creature on the remote server is from a newer version of DiceCloud')
|
||||
}
|
||||
|
||||
// Migrate and verify the archive meets the current schema
|
||||
migrateApiCreature(apiCreature);
|
||||
|
||||
|
||||
// Asset that the api creature is (mildly) safe
|
||||
verifyArchiveSafety({
|
||||
creature,
|
||||
properties: apiCreature.creatureProperties ?? [],
|
||||
experiences: apiCreature.experiences ?? [],
|
||||
logs: apiCreature.logs ?? [],
|
||||
});
|
||||
|
||||
// Don't upload creatures twice
|
||||
const existingCreature = Creatures.findOne(creature._id, {
|
||||
fields: { _id: 1 }
|
||||
});
|
||||
|
||||
if (existingCreature) throw new Meteor.Error('Already exists',
|
||||
'The creature you are trying to import already exists in this database.')
|
||||
|
||||
// Ensure the user owns the restored creature
|
||||
creature.owner = userId;
|
||||
// Remove the sharing permissions, the ids of users on this instance aren't going to match
|
||||
creature.readers = [];
|
||||
creature.writers = [];
|
||||
|
||||
// Mark the creature as dirty so that it recomputes
|
||||
creature.dirty = true;
|
||||
|
||||
// Ensure there is only 1 creature being imported
|
||||
if (apiCreature.creatures.length !== 1) {
|
||||
throw new Meteor.Error('invalid-import',
|
||||
'One and only one creature must be imported at a time'
|
||||
)
|
||||
}
|
||||
|
||||
// Insert the creature sub documents
|
||||
// They still have their original _id's
|
||||
Creatures.insert(creature);
|
||||
try {
|
||||
// Add all the properties
|
||||
if (apiCreature.creatureProperties && apiCreature.creatureProperties.length) {
|
||||
CreatureProperties.batchInsert(apiCreature.creatureProperties);
|
||||
}
|
||||
if (apiCreature.experiences && apiCreature.experiences.length) {
|
||||
Experiences.batchInsert(apiCreature.experiences);
|
||||
}
|
||||
if (apiCreature.logs && apiCreature.logs.length) {
|
||||
CreatureLogs.batchInsert(apiCreature.logs);
|
||||
}
|
||||
} catch (e) {
|
||||
// If the above fails, delete the inserted creature
|
||||
removeCreatureWork(creatureId);
|
||||
throw e;
|
||||
}
|
||||
return creatureId;
|
||||
}
|
||||
|
||||
const importCharacterFromDiceCloudInstance = new ValidatedMethod({
|
||||
name: 'Creatures.methods.importFromInstance',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 10,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
async run({ characterData }) {
|
||||
if (Meteor.settings.public.disallowCreatureApiImport) throw new Meteor.Error('not-allowed',
|
||||
'This instance of DiceCloud has disallowed creature imports')
|
||||
// fetch the file
|
||||
if (!characterData) {
|
||||
throw new Meteor.Error('no-input',
|
||||
'No character data was provided');
|
||||
}
|
||||
assertHasCharactersSlots(this.userId);
|
||||
if (Meteor.isServer) {
|
||||
return importApiCreature(characterData, this.userId)
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default importCharacterFromDiceCloudInstance;
|
||||
@@ -1,5 +0,0 @@
|
||||
import '/imports/api/creature/creatures/methods/changeAllowedLibraries';
|
||||
import '/imports/api/creature/creatures/methods/importCharacterFromDiceCloudInstance.js';
|
||||
import '/imports/api/creature/creatures/methods/insertCreature';
|
||||
import '/imports/api/creature/creatures/methods/removeCreature';
|
||||
import '/imports/api/creature/creatures/methods/updateCreature';
|
||||
@@ -1,107 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import simpleSchemaMixin from '/imports/api/creature/mixins/simpleSchemaMixin';
|
||||
import Creatures, { CreatureSchema } from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import defaultCharacterProperties from '/imports/api/creature/creatures/defaultCharacterProperties';
|
||||
import insertPropertyFromLibraryNode from '/imports/api/creature/creatureProperties/methods/insertPropertyFromLibraryNode';
|
||||
import assertHasCharactersSlots from '/imports/api/creature/creatures/methods/assertHasCharacterSlots';
|
||||
import getSlotFillFilter from '/imports/api/creature/creatureProperties/methods/getSlotFillFilter';
|
||||
import getCreatureLibraryIds from '/imports/api/library/getCreatureLibraryIds';
|
||||
import LibraryNodes from '/imports/api/library/LibraryNodes';
|
||||
import { insertExperienceForCreature } from '/imports/api/creature/experience/Experiences';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
|
||||
const insertCreature = new ValidatedMethod({
|
||||
name: 'creatures.insertCreature',
|
||||
mixins: [RateLimiterMixin, simpleSchemaMixin],
|
||||
validate: CreatureSchema.pick(
|
||||
'name',
|
||||
'gender',
|
||||
'alignment',
|
||||
'allowedLibraries',
|
||||
'allowedLibraryCollections',
|
||||
).extend({
|
||||
'startingLevel': {
|
||||
type: SimpleSchema.Integer,
|
||||
min: 0,
|
||||
},
|
||||
}).validator(),
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
|
||||
run({ name, gender, alignment, startingLevel,
|
||||
allowedLibraries, allowedLibraryCollections }) {
|
||||
const userId = this.userId
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('Creatures.methods.insert.denied',
|
||||
'You need to be logged in to insert a creature');
|
||||
}
|
||||
|
||||
assertHasCharactersSlots(userId);
|
||||
|
||||
// Create the creature document
|
||||
let creatureId = Creatures.insert({
|
||||
owner: userId,
|
||||
name,
|
||||
gender,
|
||||
alignment,
|
||||
type: 'pc',
|
||||
allowedLibraries,
|
||||
allowedLibraryCollections,
|
||||
settings: {},
|
||||
readers: [],
|
||||
writers: [],
|
||||
public: false,
|
||||
});
|
||||
|
||||
// Insert experience to get character to starting level
|
||||
if (startingLevel) {
|
||||
insertExperienceForCreature({
|
||||
experience: {
|
||||
name: 'Starting level',
|
||||
levels: startingLevel,
|
||||
creatureId
|
||||
},
|
||||
creatureId,
|
||||
});
|
||||
}
|
||||
|
||||
// Insert the default properties
|
||||
// Not batchInsert because we want the properties cleaned by the schema
|
||||
let baseId, rulesetSlot;
|
||||
defaultCharacterProperties(creatureId).forEach(prop => {
|
||||
let id = CreatureProperties.insert(prop);
|
||||
if (prop.name === 'Ruleset') {
|
||||
baseId = id;
|
||||
rulesetSlot = prop;
|
||||
}
|
||||
});
|
||||
|
||||
// If the user only has a single ruleset subscribed, use it by default
|
||||
if (Meteor.isServer) {
|
||||
insertDefaultRuleset(creatureId, baseId, userId, rulesetSlot);
|
||||
}
|
||||
|
||||
return creatureId;
|
||||
},
|
||||
});
|
||||
|
||||
// If the user only has a single ruleset subscribed, insert it by default
|
||||
function insertDefaultRuleset(creatureId, baseId, userId, slot) {
|
||||
const libraryIds = getCreatureLibraryIds(creatureId, userId);
|
||||
const filter = getSlotFillFilter({ slot, libraryIds });
|
||||
const fillCursor = LibraryNodes.find(filter, { fields: { _id: 1 } });
|
||||
const numRulesets = fillCursor.count();
|
||||
if (numRulesets === 1) {
|
||||
const ruleset = fillCursor.fetch()[0]
|
||||
insertPropertyFromLibraryNode.call({
|
||||
nodeIds: [ruleset._id],
|
||||
parentRef: { id: baseId, collection: 'creatureProperties' },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default insertCreature;
|
||||
@@ -1,44 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import { assertOwnership } from '/imports/api/creature/creatures/creaturePermissions';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureVariables from '/imports/api/creature/creatures/CreatureVariables';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
import CreatureLogs from '/imports/api/creature/log/CreatureLogs';
|
||||
import Experiences from '/imports/api/creature/experience/Experiences';
|
||||
import { getFilter } from '/imports/api/parenting/parentingFunctions';
|
||||
|
||||
function removeRelatedDocuments(creatureId) {
|
||||
CreatureVariables.remove({ _creatureId: creatureId });
|
||||
CreatureProperties.remove(getFilter.descendantsOfRoot(creatureId));
|
||||
CreatureLogs.remove({ creatureId });
|
||||
Experiences.remove({ creatureId });
|
||||
}
|
||||
|
||||
const removeCreature = new ValidatedMethod({
|
||||
name: 'Creatures.methods.removeCreature', // DDP method name
|
||||
validate: new SimpleSchema({
|
||||
charId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ charId }) {
|
||||
assertOwnership(charId, this.userId)
|
||||
this.unblock();
|
||||
removeCreatureWork(charId)
|
||||
},
|
||||
});
|
||||
|
||||
export function removeCreatureWork(creatureId) {
|
||||
Creatures.remove(creatureId);
|
||||
removeRelatedDocuments(creatureId);
|
||||
}
|
||||
|
||||
export default removeCreature;
|
||||
@@ -1,45 +0,0 @@
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import { assertEditPermission } from '/imports/api/sharing/sharingPermissions';
|
||||
|
||||
const updateCreature = new ValidatedMethod({
|
||||
name: 'creatures.update',
|
||||
validate({ _id, path }) {
|
||||
if (!_id) return false;
|
||||
// Allowed fields
|
||||
let allowedFields = [
|
||||
'name',
|
||||
'alignment',
|
||||
'gender',
|
||||
'picture',
|
||||
'avatarPicture',
|
||||
'color',
|
||||
'settings',
|
||||
];
|
||||
if (!allowedFields.includes(path[0])) {
|
||||
throw new Meteor.Error('Creatures.methods.update.denied',
|
||||
'This field can\'t be updated using this method');
|
||||
}
|
||||
},
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, value }) {
|
||||
let creature = Creatures.findOne(_id);
|
||||
assertEditPermission(creature, this.userId);
|
||||
if (value === undefined || value === null) {
|
||||
Creatures.update(_id, {
|
||||
$unset: { [path.join('.')]: 1 },
|
||||
});
|
||||
} else {
|
||||
Creatures.update(_id, {
|
||||
$set: { [path.join('.')]: value },
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
export default updateCreature;
|
||||
@@ -1,186 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import { assertEditPermission } from '/imports/api/creature/creatures/creaturePermissions';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
|
||||
let Experiences = new Mongo.Collection('experiences');
|
||||
|
||||
let ExperienceSchema = new SimpleSchema({
|
||||
name: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
// The amount of XP this experience gives
|
||||
xp: {
|
||||
type: SimpleSchema.Integer,
|
||||
optional: true,
|
||||
min: 0,
|
||||
},
|
||||
// Setting levels instead of value grants whole levels
|
||||
levels: {
|
||||
type: SimpleSchema.Integer,
|
||||
optional: true,
|
||||
min: 0,
|
||||
index: 1,
|
||||
},
|
||||
// The real-world date that it occurred, usually sorted by date
|
||||
date: {
|
||||
type: Date,
|
||||
autoValue: function () {
|
||||
// If the date isn't set, set it to now
|
||||
if (!this.isSet) {
|
||||
return new Date();
|
||||
}
|
||||
},
|
||||
index: 1,
|
||||
},
|
||||
creatureId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
index: 1,
|
||||
},
|
||||
});
|
||||
|
||||
Experiences.attachSchema(ExperienceSchema);
|
||||
|
||||
const insertExperienceForCreature = function ({ experience, creatureId }) {
|
||||
if (experience.xp) {
|
||||
Creatures.update(creatureId, {
|
||||
$inc: { 'denormalizedStats.xp': experience.xp },
|
||||
$set: { dirty: true },
|
||||
});
|
||||
}
|
||||
if (experience.levels) {
|
||||
Creatures.update(creatureId, {
|
||||
$inc: { 'denormalizedStats.milestoneLevels': experience.levels },
|
||||
$set: { dirty: true },
|
||||
});
|
||||
}
|
||||
experience.creatureId = creatureId;
|
||||
let id = Experiences.insert(experience);
|
||||
return id;
|
||||
};
|
||||
|
||||
const insertExperience = new ValidatedMethod({
|
||||
name: 'experiences.insert',
|
||||
validate: new SimpleSchema({
|
||||
experience: {
|
||||
type: ExperienceSchema.omit('creatureId'),
|
||||
},
|
||||
creatureIds: {
|
||||
type: Array,
|
||||
max: 12,
|
||||
},
|
||||
'creatureIds.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ experience, creatureIds }) {
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('Experiences.methods.insert.denied',
|
||||
'You need to be logged in to insert an experience');
|
||||
}
|
||||
let insertedIds = [];
|
||||
creatureIds.forEach(creatureId => {
|
||||
assertEditPermission(creatureId, userId);
|
||||
let id = insertExperienceForCreature({ experience, creatureId });
|
||||
insertedIds.push(id);
|
||||
});
|
||||
return insertedIds;
|
||||
},
|
||||
});
|
||||
|
||||
const removeExperience = new ValidatedMethod({
|
||||
name: 'experiences.remove',
|
||||
validate: new SimpleSchema({
|
||||
experienceId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ experienceId }) {
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('Experiences.methods.remove.denied',
|
||||
'You need to be logged in to remove an experience');
|
||||
}
|
||||
let experience = Experiences.findOne(experienceId);
|
||||
if (!experience) return;
|
||||
let creatureId = experience.creatureId
|
||||
assertEditPermission(creatureId, userId);
|
||||
if (experience.xp) {
|
||||
Creatures.update(creatureId, {
|
||||
$inc: { 'denormalizedStats.xp': -experience.xp },
|
||||
$set: { dirty: true },
|
||||
});
|
||||
}
|
||||
if (experience.levels) {
|
||||
Creatures.update(creatureId, {
|
||||
$inc: { 'denormalizedStats.milestoneLevels': -experience.levels },
|
||||
$set: { dirty: true },
|
||||
});
|
||||
}
|
||||
experience.creatureId = creatureId;
|
||||
let numRemoved = Experiences.remove(experienceId);
|
||||
return numRemoved;
|
||||
},
|
||||
});
|
||||
|
||||
const recomputeExperiences = new ValidatedMethod({
|
||||
name: 'experiences.recompute',
|
||||
validate: new SimpleSchema({
|
||||
creatureId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ creatureId }) {
|
||||
let userId = this.userId;
|
||||
if (!userId) {
|
||||
throw new Meteor.Error('Experiences.methods.recompute.denied',
|
||||
'You need to be logged in to recompute a creature\'s experiences');
|
||||
}
|
||||
assertEditPermission(creatureId, userId);
|
||||
|
||||
let xp = 0;
|
||||
let milestoneLevels = 0;
|
||||
Experiences.find({
|
||||
creatureId
|
||||
}, {
|
||||
fields: { xp: 1, levels: 1 }
|
||||
}).forEach(experience => {
|
||||
xp += experience.xp || 0;
|
||||
milestoneLevels += experience.levels || 0;
|
||||
});
|
||||
Creatures.update(creatureId, {
|
||||
$set: {
|
||||
'denormalizedStats.xp': xp,
|
||||
'denormalizedStats.milestoneLevels': milestoneLevels,
|
||||
dirty: true,
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export default Experiences;
|
||||
export { ExperienceSchema, insertExperience, insertExperienceForCreature, removeExperience, recomputeExperiences };
|
||||
@@ -1,50 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
|
||||
let ExperienceSchema = new SimpleSchema({
|
||||
title: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
// Potentially long description of the event
|
||||
description: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.description,
|
||||
},
|
||||
// The real-world date that it occured
|
||||
date: {
|
||||
type: Date,
|
||||
autoValue: function () {
|
||||
// If the date isn't set, set it to now
|
||||
if (!this.isSet) {
|
||||
return new Date();
|
||||
}
|
||||
},
|
||||
},
|
||||
// The date in-world of this event
|
||||
worldDate: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
// Tags to better find this entry later
|
||||
tags: {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
maxCount: STORAGE_LIMITS.tagCount,
|
||||
},
|
||||
'tags.$': {
|
||||
type: String,
|
||||
max: STORAGE_LIMITS.tagLength,
|
||||
},
|
||||
// ID of the journal this entry belongs to
|
||||
journalId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
index: 1,
|
||||
}
|
||||
});
|
||||
|
||||
export { ExperienceSchema };
|
||||
@@ -1,270 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import Creatures from '/imports/api/creature/creatures/Creatures';
|
||||
import CreatureVariables from '/imports/api/creature/creatures/CreatureVariables';
|
||||
import LogContentSchema from '/imports/api/creature/log/LogContentSchema';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import { assertEditPermission } from '/imports/api/creature/creatures/creaturePermissions';
|
||||
import { parse, prettifyParseError } from '/imports/parser/parser';
|
||||
import resolve from '/imports/parser/resolve';
|
||||
import toString from '/imports/parser/toString';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
|
||||
const PER_CREATURE_LOG_LIMIT = 100;
|
||||
|
||||
if (Meteor.isServer) {
|
||||
var sendWebhookAsCreature = require('/imports/server/discord/sendWebhook').sendWebhookAsCreature;
|
||||
}
|
||||
|
||||
let CreatureLogs = new Mongo.Collection('creatureLogs');
|
||||
|
||||
let CreatureLogSchema = new SimpleSchema({
|
||||
content: {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
maxCount: STORAGE_LIMITS.logContentCount,
|
||||
},
|
||||
'content.$': {
|
||||
type: LogContentSchema,
|
||||
},
|
||||
// The real-world date that it occured, usually sorted by date
|
||||
date: {
|
||||
type: Date,
|
||||
autoValue: function () {
|
||||
// If the date isn't set, set it to now
|
||||
if (!this.isSet) {
|
||||
return new Date();
|
||||
}
|
||||
},
|
||||
index: 1,
|
||||
},
|
||||
// The acting creature initiating the logged events
|
||||
creatureId: {
|
||||
type: String,
|
||||
index: 1,
|
||||
},
|
||||
// The tabletop this log is associated with
|
||||
tabletopId: {
|
||||
type: String,
|
||||
optional: true,
|
||||
index: 1,
|
||||
},
|
||||
// The action that caused this log entry
|
||||
actionId: {
|
||||
type: String,
|
||||
optional: true,
|
||||
},
|
||||
creatureName: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
});
|
||||
|
||||
CreatureLogs.attachSchema(CreatureLogSchema);
|
||||
|
||||
function removeOldLogs({ creatureId, tabletopId }) {
|
||||
let filter;
|
||||
if (creatureId && tabletopId || (!creatureId && !tabletopId)) {
|
||||
throw Error('Provide either creatureId or tabletopId')
|
||||
} else if (creatureId) {
|
||||
filter = { creatureId };
|
||||
} else if (tabletopId) {
|
||||
filter = { tabletopId }
|
||||
}
|
||||
// Find the first log that is over the limit
|
||||
let firstExpiredLog = CreatureLogs.find(filter, {
|
||||
sort: { date: -1 },
|
||||
skip: PER_CREATURE_LOG_LIMIT,
|
||||
});
|
||||
if (!firstExpiredLog) return;
|
||||
// Remove all logs older than the one over the limit
|
||||
CreatureLogs.remove({
|
||||
creatureId,
|
||||
date: { $lte: firstExpiredLog.date },
|
||||
});
|
||||
}
|
||||
|
||||
function logToMessageData(log) {
|
||||
let embed = {
|
||||
fields: [],
|
||||
};
|
||||
log.content.forEach((field, index) => {
|
||||
// Empty character for blank names
|
||||
if (!field.name) field.name = '\u200b';
|
||||
if (!field.value) field.value = '\u200b';
|
||||
// Enforce Discord field character limits
|
||||
if (field.name?.length > 256) {
|
||||
field.name = field.name.substring(0, 255);
|
||||
}
|
||||
if (field.value?.length > 1024) {
|
||||
field.value = field.value.substring(0, 1024 - 3) + '...';
|
||||
}
|
||||
// Enforce Discord 25 field limit
|
||||
if (index < 25) {
|
||||
embed.fields.push(field);
|
||||
}
|
||||
});
|
||||
return { embeds: [embed] };
|
||||
}
|
||||
|
||||
function logWebhook({ log, creature }) {
|
||||
if (Meteor.isServer) {
|
||||
sendWebhookAsCreature({
|
||||
creature,
|
||||
data: logToMessageData(log),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const insertCreatureLog = new ValidatedMethod({
|
||||
name: 'creatureLogs.methods.insert',
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
validate: new SimpleSchema({
|
||||
log: CreatureLogSchema.omit('date'),
|
||||
}).validator(),
|
||||
run({ log }) {
|
||||
const creatureId = log.creatureId;
|
||||
const creature = Creatures.findOne(creatureId, {
|
||||
fields: {
|
||||
readers: 1,
|
||||
writers: 1,
|
||||
owner: 1,
|
||||
'settings.discordWebhook': 1,
|
||||
name: 1,
|
||||
avatarPicture: 1,
|
||||
tabletop: 1,
|
||||
}
|
||||
});
|
||||
assertEditPermission(creature, this.userId);
|
||||
// Build the new log
|
||||
let id = insertCreatureLogWork({ log, creature, method: this })
|
||||
return id;
|
||||
},
|
||||
});
|
||||
|
||||
export function insertCreatureLogWork({ log, creature, method }) {
|
||||
// Build the new log
|
||||
if (typeof log === 'string') {
|
||||
log = { content: [{ value: log }] };
|
||||
}
|
||||
if (!log.content?.length) return;
|
||||
|
||||
// Truncate the string lengths to fit the log content schema
|
||||
log.content.forEach((logItem) => {
|
||||
if (logItem.value?.length > STORAGE_LIMITS.summary) {
|
||||
logItem.value = logItem.value.substring(0, STORAGE_LIMITS.summary - 3) + '...';
|
||||
}
|
||||
});
|
||||
log.date = new Date();
|
||||
if (creature && creature.tabletop) log.tabletopId = creature.tabletop;
|
||||
// Insert it
|
||||
let id = CreatureLogs.insert(log);
|
||||
if (Meteor.isServer) {
|
||||
method?.unblock();
|
||||
if (creature) {
|
||||
logWebhook({ log, creature });
|
||||
}
|
||||
if (log.tabletopId) {
|
||||
removeOldLogs({ tabletopId: log.tabletopId });
|
||||
} else {
|
||||
removeOldLogs({ creatureId: creature._id });
|
||||
}
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
|
||||
function equalIgnoringWhitespace(a, b) {
|
||||
if (typeof a !== 'string' || typeof b !== 'string') return a === b;
|
||||
return a.replace(/\s/g, '') === b.replace(/\s/g, '');
|
||||
}
|
||||
|
||||
const logRoll = new ValidatedMethod({
|
||||
name: 'creatureLogs.methods.logForCreature',
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
validate: new SimpleSchema({
|
||||
roll: {
|
||||
type: String,
|
||||
},
|
||||
creatureId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
},
|
||||
}).validator(),
|
||||
async run({ roll, creatureId }) {
|
||||
if (!creatureId) throw new Meteor.Error('no-id',
|
||||
'A creature id must be given'
|
||||
);
|
||||
let creature;
|
||||
if (creatureId) {
|
||||
creature = Creatures.findOne(creatureId, {
|
||||
fields: {
|
||||
readers: 1,
|
||||
writers: 1,
|
||||
owner: 1,
|
||||
'settings.discordWebhook': 1,
|
||||
name: 1,
|
||||
avatarPicture: 1,
|
||||
}
|
||||
});
|
||||
assertEditPermission(creature, this.userId);
|
||||
}
|
||||
const variables = CreatureVariables.findOne({ _creatureId: creatureId }) || {};
|
||||
let logContent = []
|
||||
let parsedResult = undefined;
|
||||
try {
|
||||
parsedResult = parse(roll);
|
||||
} catch (e) {
|
||||
let error = prettifyParseError(e);
|
||||
logContent.push({ name: 'Parse Error', value: error });
|
||||
}
|
||||
if (parsedResult) try {
|
||||
let {
|
||||
result: compiled,
|
||||
context
|
||||
} = await resolve('compile', parsedResult, variables);
|
||||
const compiledString = toString(compiled);
|
||||
if (!equalIgnoringWhitespace(compiledString, roll)) logContent.push({
|
||||
value: roll
|
||||
});
|
||||
logContent.push({
|
||||
value: compiledString
|
||||
});
|
||||
let { result: rolled } = await resolve('roll', compiled, variables, context);
|
||||
let rolledString = toString(rolled);
|
||||
if (rolledString !== compiledString) logContent.push({
|
||||
value: rolledString
|
||||
});
|
||||
let { result } = await resolve('reduce', rolled, variables, context);
|
||||
let resultString = toString(result);
|
||||
if (resultString !== rolledString) logContent.push({
|
||||
value: resultString
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
logContent = [{ name: 'Calculation error' }];
|
||||
}
|
||||
const log = {
|
||||
content: logContent,
|
||||
creatureId,
|
||||
date: new Date(),
|
||||
};
|
||||
|
||||
let id = insertCreatureLogWork({ log, creature, method: this });
|
||||
|
||||
return id;
|
||||
},
|
||||
});
|
||||
|
||||
export default CreatureLogs;
|
||||
export { CreatureLogSchema, insertCreatureLog, logRoll, PER_CREATURE_LOG_LIMIT };
|
||||
@@ -1,74 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import ErrorSchema from '/imports/api/properties/subSchemas/ErrorSchema';
|
||||
import RollDetailsSchema from '/imports/api/properties/subSchemas/RollDetailsSchema';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
|
||||
export interface LogContent {
|
||||
name?: string
|
||||
value?: string
|
||||
inline?: boolean
|
||||
context?: {
|
||||
errors: any[]
|
||||
rolls: any[]
|
||||
doubleRolls?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
let LogContentSchema = new SimpleSchema({
|
||||
// The name of the field, included in discord webhook message
|
||||
name: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.name,
|
||||
},
|
||||
// The details of the field, included in discord webhook message
|
||||
// Markdown support
|
||||
value: {
|
||||
type: String,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.summary,
|
||||
},
|
||||
// Inline with other content fields
|
||||
inline: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
// This log entry was silenced
|
||||
silenced: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
context: {
|
||||
type: Object,
|
||||
optional: true,
|
||||
},
|
||||
'context.errors': {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
maxCount: STORAGE_LIMITS.errorCount,
|
||||
},
|
||||
'context.errors.$': {
|
||||
type: ErrorSchema,
|
||||
},
|
||||
'context.rolls': {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
maxCount: STORAGE_LIMITS.rollCount,
|
||||
},
|
||||
'context.rolls.$': {
|
||||
type: RollDetailsSchema,
|
||||
},
|
||||
'context.doubleRolls': {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
targetIds: {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'targetIds.$': {
|
||||
type: String,
|
||||
}
|
||||
});
|
||||
|
||||
export default LogContentSchema;
|
||||
@@ -1,52 +0,0 @@
|
||||
import {
|
||||
assertEditPermission,
|
||||
assertViewPermission,
|
||||
assertOwnership,
|
||||
} from '/imports/api/creature/creatures/creaturePermissions';
|
||||
|
||||
// Checks if the method has permission to run on the document. If the document
|
||||
// has a charId, that creature is checked, otherwise if it has an _id and the
|
||||
// collection is defined in the method options, that document is fetched to
|
||||
// determine its charId, otherwise a getCharId method can be defined to perform
|
||||
// a special search for the required creature
|
||||
//
|
||||
// Because this mixin injects the charId into argument objects that don't
|
||||
// already contain it, it should always come last in the mixin list, so that it
|
||||
// the outermost wrapper of the run function
|
||||
export default function creaturePermissionMixin(methodOptions) {
|
||||
let assertPermission;
|
||||
if (methodOptions.permission === 'owner') {
|
||||
assertPermission = assertOwnership;
|
||||
} else if (methodOptions.permission === 'edit') {
|
||||
assertPermission = assertEditPermission;
|
||||
} else if (methodOptions.permission === 'view') {
|
||||
assertPermission = assertViewPermission;
|
||||
} else {
|
||||
throw "`permission` missing in method options";
|
||||
}
|
||||
|
||||
let getCharId;
|
||||
if (methodOptions.getCharId) {
|
||||
getCharId = methodOptions.getCharId;
|
||||
} else if (methodOptions.collection) {
|
||||
getCharId = function ({ _id }) {
|
||||
return methodOptions.collection.findOne(_id, {
|
||||
fields: { charId: 1 }
|
||||
}).charId;
|
||||
};
|
||||
} else {
|
||||
getCharId = function () {
|
||||
throw "`getCharId` or `collection` missing in method options," +
|
||||
" or {charId} missing in call";
|
||||
};
|
||||
}
|
||||
|
||||
let runFunc = methodOptions.run;
|
||||
methodOptions.run = function (doc, ...rest) {
|
||||
// Store the charId on the doc for other mixins if it had to be fetched
|
||||
doc.charId = doc.charId || getCharId.apply(this, arguments);
|
||||
assertPermission(doc.charId, this.userId);
|
||||
return runFunc.call(this, doc, ...rest);
|
||||
};
|
||||
return methodOptions;
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
// Copied from https://github.com/sethjgore/meteor-simple-schema-mixin
|
||||
// and updated to simpl-schema npm package
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
|
||||
export default function simpleSchemaMixin(methodOptions) {
|
||||
// If the user didn't give us a schema and they did give us a validate, assume
|
||||
// that they are choosing to use the validate way of doing things in this call.
|
||||
// If they've built a wrapper around ValidateMethod that includes this mixin
|
||||
// all the time, this could happen semi-"intentionally". There may be times they
|
||||
// just don't want to use a schema and have specified a "validate" option. So
|
||||
// returning the unchanged options instead of an error seems proper.
|
||||
if ((typeof methodOptions.schema === 'undefined'
|
||||
&& typeof methodOptions.validate !== 'undefined')
|
||||
|| (typeof methodOptions.schema !== 'undefined' && methodOptions.schema === null
|
||||
&& typeof methodOptions.validate !== 'undefined' && methodOptions.validate !== null)) {
|
||||
return methodOptions;
|
||||
}
|
||||
|
||||
// If they truly gave us both... that just doesn't seem proper.
|
||||
if (methodOptions.validate && methodOptions.validate !== null) {
|
||||
throw new Meteor.Error(
|
||||
'simpleSchemaMixin.options',
|
||||
'"schema" and "validate" options cannot be used together');
|
||||
}
|
||||
|
||||
// Note that setting them both null will make it through, defaulting to the
|
||||
// schema = null behavior (enforce no args) instead of the validate = null
|
||||
// behavior (do no validation).
|
||||
|
||||
const newOptions = methodOptions;
|
||||
newOptions.schemaValidatorOptions =
|
||||
newOptions.schemaValidatorOptions ||
|
||||
{ clean: true, filter: false };
|
||||
let simpleSchema;
|
||||
if (!newOptions.schema || newOptions.schema === null) {
|
||||
// Allow simply leaving off both the schema and validate specifications
|
||||
// or setting them to "null" as a shorthand. In this case, unlike
|
||||
// the straight default validate or typical coder's call to validator,
|
||||
// we will ENFORCE the Method be called without parameters because of
|
||||
// the "filter: false" above.
|
||||
simpleSchema = new SimpleSchema({});
|
||||
} else if (newOptions.schema instanceof SimpleSchema) {
|
||||
// In this one case, we can save ourselves the time to make a schema out
|
||||
// of the schema.
|
||||
simpleSchema = newOptions.schema;
|
||||
} else {
|
||||
simpleSchema = new SimpleSchema(newOptions.schema);
|
||||
}
|
||||
newOptions.validate = simpleSchema.validator(newOptions.schemaValidatorOptions);
|
||||
return newOptions;
|
||||
};
|
||||
@@ -1,67 +0,0 @@
|
||||
const argumentSchema = new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id,
|
||||
update: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Modified simpleSchemaMixin
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
|
||||
export default function updateSchemaMixin(methodOptions) {
|
||||
// If the user didn't give us a schema and they did give us a validate, assume
|
||||
// that they are choosing to use the validate way of doing things in this call.
|
||||
if ((
|
||||
typeof methodOptions.schema === 'undefined' &&
|
||||
typeof methodOptions.validate !== 'undefined'
|
||||
) || (
|
||||
typeof methodOptions.schema !== 'undefined' &&
|
||||
methodOptions.schema === null &&
|
||||
typeof methodOptions.validate !== 'undefined' &&
|
||||
methodOptions.validate !== null
|
||||
)) {
|
||||
return methodOptions;
|
||||
}
|
||||
|
||||
// If they truly gave us both... that just doesn't seem proper.
|
||||
if (methodOptions.validate && methodOptions.validate !== null) {
|
||||
throw new Meteor.Error(
|
||||
'simpleSchemaMixin.options',
|
||||
'"schema" and "validate" options cannot be used together');
|
||||
}
|
||||
|
||||
// Note that setting them both null will make it through, defaulting to the
|
||||
// schema = null behavior (enforce no args) instead of the validate = null
|
||||
// behavior (do no validation).
|
||||
|
||||
// Apply default validator options if none are provided
|
||||
methodOptions.schemaValidatorOptions =
|
||||
methodOptions.schemaValidatorOptions ||
|
||||
{ clean: true, modifier: true };
|
||||
|
||||
// Make the update schema a SimpleSchema, if it isn't already
|
||||
let updateSchema;
|
||||
if (methodOptions.schema instanceof SimpleSchema) {
|
||||
updateSchema = methodOptions.schema;
|
||||
} else {
|
||||
updateSchema = new SimpleSchema(methodOptions.schema);
|
||||
}
|
||||
|
||||
// Set up the new validation
|
||||
methodOptions.validate = function(args){
|
||||
argumentSchema.validate(args);
|
||||
updateSchema.validate(
|
||||
{$set: args.update},
|
||||
methodOptions.schemaValidatorOptions
|
||||
);
|
||||
};
|
||||
|
||||
// Give a default run function if one isn't supplied
|
||||
if (!methodOptions.run){
|
||||
methodOptions.run = function({_id, update}){
|
||||
return methodOptions.collection.update(_id, {$set: update});
|
||||
};
|
||||
}
|
||||
return methodOptions;
|
||||
}
|
||||
@@ -1,287 +0,0 @@
|
||||
import { Meteor } from 'meteor/meteor';
|
||||
import { ValidatedMethod } from 'meteor/mdg:validated-method';
|
||||
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import { softRemove } from '/imports/api/parenting/softRemove';
|
||||
import SoftRemovableSchema from '/imports/api/parenting/SoftRemovableSchema';
|
||||
import { storedIconsSchema } from '/imports/api/icons/Icons';
|
||||
import '/imports/api/library/methods/index';
|
||||
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS';
|
||||
import { restore } from '/imports/api/parenting/softRemove';
|
||||
import { getFilter, rebuildNestedSets, moveDocWithinRoot } from '/imports/api/parenting/parentingFunctions';
|
||||
import ChildSchema, { TreeDoc } from '/imports/api/parenting/ChildSchema';
|
||||
|
||||
// Give the docs a common root, so they can share parenting logic
|
||||
export const DOC_ROOT_ID = 'DDDDDDDDDDDDDDDDD'
|
||||
|
||||
type Doc = {
|
||||
_id: string,
|
||||
name: string,
|
||||
urlName: string,
|
||||
href: string,
|
||||
description?: string,
|
||||
published?: true,
|
||||
icon?: {
|
||||
name: string,
|
||||
shape: string,
|
||||
},
|
||||
} & TreeDoc;
|
||||
|
||||
const Docs: Mongo.Collection<Doc> & {
|
||||
getJsonDocs?: () => string
|
||||
} = new Mongo.Collection<Doc>('docs');
|
||||
|
||||
const DocSchema = new SimpleSchema({
|
||||
_id: {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
max: STORAGE_LIMITS.description,
|
||||
},
|
||||
urlName: {
|
||||
type: String,
|
||||
regEx: /[a-z]+(?:[a-z]|-)+/,
|
||||
min: 2,
|
||||
max: STORAGE_LIMITS.variableName,
|
||||
},
|
||||
href: {
|
||||
type: String,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
optional: true,
|
||||
},
|
||||
published: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
},
|
||||
icon: {
|
||||
type: storedIconsSchema,
|
||||
optional: true,
|
||||
max: STORAGE_LIMITS.icon,
|
||||
},
|
||||
});
|
||||
|
||||
const schema = new SimpleSchema({});
|
||||
schema.extend(DocSchema);
|
||||
schema.extend(ChildSchema);
|
||||
schema.extend(SoftRemovableSchema);
|
||||
// @ts-expect-error No attach schema in types
|
||||
Docs.attachSchema(schema);
|
||||
|
||||
function assertDocsEditPermission(userId) {
|
||||
if (!userId || typeof userId !== 'string') throw new Meteor.Error('No user id provided');
|
||||
const user = Meteor.users.findOne(userId);
|
||||
if (!user) throw new Meteor.Error('User does not exist');
|
||||
if (!user?.roles?.includes?.('docsWriter')) throw ('Permission denied')
|
||||
}
|
||||
|
||||
function getDocLink(doc: Doc, urlName?: string) {
|
||||
if (!urlName) urlName = doc.urlName;
|
||||
const address = ['/docs'];
|
||||
const ancestorDocs = Docs.find(getFilter.ancestors(doc));
|
||||
ancestorDocs?.forEach(a => {
|
||||
address.push(a.urlName);
|
||||
});
|
||||
address.push(urlName);
|
||||
return address.join('/');
|
||||
}
|
||||
|
||||
// Add a means of seeding new servers with documentation
|
||||
if (Meteor.isClient) {
|
||||
Docs.getJsonDocs = function () {
|
||||
return JSON.stringify(Docs.find({}).fetch(), null, 2);
|
||||
}
|
||||
} else if (Meteor.isServer) {
|
||||
Meteor.startup(() => {
|
||||
if (!Docs.findOne()) {
|
||||
console.info('No docs found, filling documentation with defaults');
|
||||
Assets.getText('docs/defaultDocs.json', (error, string) => {
|
||||
const docs = JSON.parse(string)
|
||||
docs.forEach(doc => Docs.insert(doc));
|
||||
rebuildNestedSets(Docs, DOC_ROOT_ID);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const insertDoc = new ValidatedMethod({
|
||||
name: 'docs.insert',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ doc, parentId }) {
|
||||
delete doc._id;
|
||||
assertDocsEditPermission(this.userId);
|
||||
|
||||
doc.parentId = parentId;
|
||||
doc.root = {
|
||||
collection: 'docs',
|
||||
id: DOC_ROOT_ID,
|
||||
};
|
||||
|
||||
const lastOrder = Docs.find({}, { sort: { left: -1 }, limit: 1 }).fetch()[0]?.left || 0;
|
||||
doc.urlName = 'new-doc-' + (lastOrder + 1);
|
||||
doc.href = getDocLink(doc);
|
||||
if (Docs.findOne({ href: doc.href })) {
|
||||
throw new Meteor.Error('Link collision', 'A document with the same URL already exists');
|
||||
}
|
||||
|
||||
const docId = Docs.insert(doc);
|
||||
rebuildNestedSets(Docs, DOC_ROOT_ID);
|
||||
return docId;
|
||||
},
|
||||
});
|
||||
|
||||
const updateDoc = new ValidatedMethod({
|
||||
name: 'docs.update',
|
||||
validate({ _id, path }) {
|
||||
if (!_id) return false;
|
||||
// We cannot change these fields with a simple update
|
||||
switch (path[0]) {
|
||||
case '_is':
|
||||
return false;
|
||||
}
|
||||
},
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, value }) {
|
||||
assertDocsEditPermission(this.userId);
|
||||
const pathString = path.join('.');
|
||||
let modifier;
|
||||
// unset empty values
|
||||
if (value === null || value === undefined) {
|
||||
modifier = { $unset: { [pathString]: 1 } };
|
||||
} else {
|
||||
modifier = { $set: { [pathString]: value } };
|
||||
}
|
||||
if (pathString === 'urlName') {
|
||||
const doc = Docs.findOne(_id);
|
||||
if (!doc) throw new Meteor.Error('Not Found', 'The document you are trying to edit was not found');
|
||||
const newLink = getDocLink(doc, value);
|
||||
if (Docs.findOne({ href: newLink })) {
|
||||
throw new Meteor.Error('Link collision', 'A document with the same URL already exists');
|
||||
}
|
||||
modifier.$set = modifier.$set || {};
|
||||
modifier.$set.href = newLink;
|
||||
}
|
||||
const updates = Docs.update(_id, modifier);
|
||||
rebuildNestedSets(Docs, DOC_ROOT_ID);
|
||||
return updates;
|
||||
},
|
||||
});
|
||||
|
||||
const pushToDoc = new ValidatedMethod({
|
||||
name: 'docs.push',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, value }) {
|
||||
assertDocsEditPermission(this.userId);
|
||||
return Docs.update(_id, {
|
||||
$push: { [path.join('.')]: value },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const pullFromDoc = new ValidatedMethod({
|
||||
name: 'docs.pull',
|
||||
validate: null,
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id, path, itemId }) {
|
||||
assertDocsEditPermission(this.userId);
|
||||
return Docs.update(_id, {
|
||||
$pull: { [path.join('.')]: { _id: itemId } },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const softRemoveDoc = new ValidatedMethod({
|
||||
name: 'docs.softRemove',
|
||||
validate: new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
assertDocsEditPermission(this.userId);
|
||||
softRemove(Docs, _id);
|
||||
rebuildNestedSets(Docs, DOC_ROOT_ID);
|
||||
}
|
||||
});
|
||||
|
||||
const restoreDoc = new ValidatedMethod({
|
||||
name: 'docs.restore',
|
||||
validate: new SimpleSchema({
|
||||
_id: SimpleSchema.RegEx.Id
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
run({ _id }) {
|
||||
assertDocsEditPermission(this.userId);
|
||||
restore('docs', _id);
|
||||
rebuildNestedSets(Docs, DOC_ROOT_ID);
|
||||
}
|
||||
});
|
||||
|
||||
const organizeDoc = new ValidatedMethod({
|
||||
name: 'docs.organizeDoc',
|
||||
validate: new SimpleSchema({
|
||||
docId: String,
|
||||
newPosition: Number,
|
||||
skipClient: {
|
||||
type: Boolean,
|
||||
optional: true,
|
||||
}
|
||||
}).validator(),
|
||||
mixins: [RateLimiterMixin],
|
||||
rateLimit: {
|
||||
numRequests: 5,
|
||||
timeInterval: 5000,
|
||||
},
|
||||
async run({ docId, newPosition, skipClient }: { docId: string, newPosition: number, skipClient?: boolean }) {
|
||||
if (skipClient && this.isSimulation) {
|
||||
return;
|
||||
}
|
||||
assertDocsEditPermission(this.userId);
|
||||
|
||||
const doc = Docs.findOne(docId);
|
||||
if (!doc) throw new Meteor.Error('not found', 'The doc you are moving was not found');
|
||||
// Move the doc
|
||||
await moveDocWithinRoot(doc, Docs, newPosition);
|
||||
},
|
||||
});
|
||||
|
||||
export {
|
||||
DocSchema,
|
||||
insertDoc,
|
||||
updateDoc,
|
||||
pushToDoc,
|
||||
pullFromDoc,
|
||||
softRemoveDoc,
|
||||
restoreDoc,
|
||||
organizeDoc,
|
||||
};
|
||||
|
||||
export default Docs;
|
||||
@@ -1,136 +0,0 @@
|
||||
import SimpleSchema from 'simpl-schema';
|
||||
import TaskResult from './tasks/TaskResult';
|
||||
import LogContentSchema from '/imports/api/creature/log/LogContentSchema';
|
||||
import Task from './tasks/Task';
|
||||
|
||||
const EngineActions = new Mongo.Collection<EngineAction>('actions');
|
||||
|
||||
export interface EngineAction {
|
||||
_id?: string;
|
||||
_isSimulation?: boolean;
|
||||
_stepThrough?: boolean;
|
||||
_decisions?: any[],
|
||||
task: Task;
|
||||
creatureId: string;
|
||||
tabletopId?: string;
|
||||
results: TaskResult[];
|
||||
taskCount: number;
|
||||
}
|
||||
|
||||
const ActionSchema = new SimpleSchema({
|
||||
creatureId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
// @ts-expect-error index not defined
|
||||
index: 1,
|
||||
},
|
||||
rootPropId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
},
|
||||
tabletopId: {
|
||||
type: String,
|
||||
max: 32,
|
||||
optional: true,
|
||||
// @ts-expect-error index not defined
|
||||
index: 1,
|
||||
},
|
||||
task: {
|
||||
type: Object,
|
||||
blackbox: true,
|
||||
},
|
||||
// Applied properties
|
||||
results: {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
},
|
||||
'results.$': {
|
||||
type: Object,
|
||||
},
|
||||
// The property and target ids popped off the task stack
|
||||
// Pushing these to the top of the stack and deleting the results from this point onwards
|
||||
// Should re-run the action identically from this point
|
||||
'results.$.propId': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
'results.$.targetIds': {
|
||||
type: Array,
|
||||
defaultValue: [],
|
||||
},
|
||||
'results.$.targetIds.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
// Changes that override the local scope
|
||||
'results.$.scope': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
blackbox: true,
|
||||
},
|
||||
// Changes that consume pushed values from the local scope
|
||||
'results.$.popScope': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
blackbox: true,
|
||||
},
|
||||
// Changes that push values to the local scope
|
||||
'results.$.pushScope': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
blackbox: true,
|
||||
},
|
||||
// database changes
|
||||
'results.$.mutations': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'results.$.mutations.$': {
|
||||
type: Object,
|
||||
},
|
||||
'results.$.mutations.$.targetIds': {
|
||||
type: Array,
|
||||
},
|
||||
'results.$.mutations.$.targetIds.$': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
'results.$.mutations.$.updates': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'results.$.mutations.$.updates.$': {
|
||||
type: Object,
|
||||
},
|
||||
'results.$.mutations.$.updates.$.propId': {
|
||||
type: String,
|
||||
max: 32,
|
||||
},
|
||||
// Required, because CreatureProperties.update requires a selector of { type }
|
||||
'results.$.mutations.$.updates.$.type': {
|
||||
type: String,
|
||||
},
|
||||
'results.$.mutations.$.updates.$.set': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
blackbox: true,
|
||||
},
|
||||
'results.$.mutations.$.updates.$.inc': {
|
||||
type: Object,
|
||||
optional: true,
|
||||
blackbox: true,
|
||||
},
|
||||
'results.$.mutations.$.contents': {
|
||||
type: Array,
|
||||
optional: true,
|
||||
},
|
||||
'results.$.mutations.$.contents.$': {
|
||||
type: LogContentSchema,
|
||||
},
|
||||
});
|
||||
|
||||
EngineActions.attachSchema(ActionSchema);
|
||||
|
||||
export default EngineActions;
|
||||
export { ActionSchema }
|
||||
@@ -1,466 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allLogContent,
|
||||
allMutations,
|
||||
allUpdates,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
import { LogContent, Mutation, Update } from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import Alea from 'alea';
|
||||
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
|
||||
const [
|
||||
creatureId, targetCreatureId, targetCreature2Id, emptyActionId, selfActionId, attackActionId,
|
||||
usesActionId, attackMissId, attackNoTargetId, usesResourcesActionId, ammoId, resourceAttId,
|
||||
consumeAmmoId, consumeResourceId, noUsesActionId, insufficientResourcesActionId,
|
||||
attributeResetByEventId, eventActionId, advantageAttackId, advantageEffectId, disadvantageAttackId, disadvantageEffectId,
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
// Empty
|
||||
{
|
||||
_id: emptyActionId,
|
||||
type: 'action',
|
||||
summary: { text: 'Summary text 1 + 1 = {1 + 1}' }
|
||||
},
|
||||
// Attack that targets self
|
||||
{
|
||||
_id: selfActionId,
|
||||
type: 'action',
|
||||
target: 'self',
|
||||
},
|
||||
// Attack that hits
|
||||
{
|
||||
_id: attackActionId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '10' },
|
||||
},
|
||||
// Attack that misses
|
||||
{
|
||||
_id: attackMissId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '-5' },
|
||||
},
|
||||
// Attack that has Advantage
|
||||
{
|
||||
_id: advantageAttackId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '0' },
|
||||
tags: ['hasAdvantage'],
|
||||
},
|
||||
{
|
||||
_id: advantageEffectId,
|
||||
type: 'effect',
|
||||
operation: 'advantage',
|
||||
targetByTags: true,
|
||||
targetTags: ['hasAdvantage'],
|
||||
},
|
||||
// Attack that has Disadvantage
|
||||
{
|
||||
_id: disadvantageAttackId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '0' },
|
||||
tags: ['hasDisadvantage'],
|
||||
},
|
||||
{
|
||||
_id: disadvantageEffectId,
|
||||
type: 'effect',
|
||||
operation: 'disadvantage',
|
||||
targetByTags: true,
|
||||
targetTags: ['hasDisadvantage'],
|
||||
},
|
||||
// Attack that has no target
|
||||
{
|
||||
_id: attackNoTargetId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '1' },
|
||||
},
|
||||
// Disable crits
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: '~criticalHitTarget',
|
||||
baseValue: { calculation: '21' },
|
||||
},
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: '~criticalMissTarget',
|
||||
baseValue: { calculation: '0' },
|
||||
},
|
||||
// Has uses
|
||||
{
|
||||
_id: usesActionId,
|
||||
type: 'action',
|
||||
uses: { calculation: '3' },
|
||||
usesUsed: 1,
|
||||
reset: 'longRest',
|
||||
},
|
||||
// Not enough uses
|
||||
{
|
||||
_id: noUsesActionId,
|
||||
type: 'action',
|
||||
uses: { calculation: '5' },
|
||||
usesUsed: 5,
|
||||
reset: 'longRest',
|
||||
},
|
||||
// Uses Resources
|
||||
{
|
||||
_id: ammoId,
|
||||
type: 'item',
|
||||
quantity: 12,
|
||||
tags: ['ammo']
|
||||
},
|
||||
{
|
||||
_id: resourceAttId,
|
||||
type: 'attribute',
|
||||
name: 'Resource Name',
|
||||
attributeType: 'stat',
|
||||
baseValue: { calculation: '7' },
|
||||
variableName: 'resourceVar',
|
||||
},
|
||||
{
|
||||
_id: usesResourcesActionId,
|
||||
type: 'action',
|
||||
resources: {
|
||||
itemsConsumed: [{
|
||||
_id: consumeAmmoId,
|
||||
tag: 'ammo',
|
||||
quantity: { calculation: '3' },
|
||||
itemId: ammoId,
|
||||
}],
|
||||
attributesConsumed: [{
|
||||
_id: consumeResourceId,
|
||||
variableName: 'resourceVar',
|
||||
quantity: { calculation: '2' },
|
||||
}],
|
||||
conditions: [],
|
||||
}
|
||||
},
|
||||
{
|
||||
_id: insufficientResourcesActionId,
|
||||
type: 'action',
|
||||
resources: {
|
||||
attributesConsumed: [{
|
||||
_id: consumeResourceId,
|
||||
variableName: 'resourceVar',
|
||||
quantity: { calculation: '9001' },
|
||||
}],
|
||||
itemsConsumed: [],
|
||||
conditions: [],
|
||||
}
|
||||
},
|
||||
// Events and resetting attributes
|
||||
{
|
||||
_id: attributeResetByEventId,
|
||||
type: 'attribute',
|
||||
name: 'Attribute Reset By testEvent Event',
|
||||
attributeType: 'stat',
|
||||
baseValue: { calculation: '27' },
|
||||
damage: 13,
|
||||
variableName: 'resetByEventAtt',
|
||||
reset: 'testEvent'
|
||||
},
|
||||
{
|
||||
_id: eventActionId,
|
||||
type: 'action',
|
||||
actionType: 'event',
|
||||
variableName: 'testEvent',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const actionTargetCreature: TestCreature = {
|
||||
_id: targetCreatureId,
|
||||
props: [
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'armor',
|
||||
baseValue: { calculation: '10' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const actionTargetCreature2: TestCreature = {
|
||||
_id: targetCreature2Id,
|
||||
props: [
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'armor',
|
||||
baseValue: { calculation: '10' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('Apply Action Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionTargetCreature);
|
||||
await createTestCreature(actionTargetCreature2);
|
||||
});
|
||||
|
||||
it('should generate random numbers reliably given consistent seeds', function () {
|
||||
const aleaFraction = Alea('test', 'seeds');
|
||||
const randomNumbers = [aleaFraction(), aleaFraction(), aleaFraction()];
|
||||
assert.deepEqual(randomNumbers, [
|
||||
0.19889510236680508, 0.9176857066340744, 0.042551583144813776
|
||||
]);
|
||||
});
|
||||
|
||||
it('should run empty actions', async function () {
|
||||
const action = await runActionById(emptyActionId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{
|
||||
name: 'Action',
|
||||
value: 'Summary text 1 + 1 = 2',
|
||||
}],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('should target self when set', async function () {
|
||||
const action = await runActionById(selfActionId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{
|
||||
name: 'Action',
|
||||
}],
|
||||
targetIds: [creatureId],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('should make attack rolls against no targets', async function () {
|
||||
const action = await runActionById(attackNoTargetId, []);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [],
|
||||
}, {
|
||||
contents: [{
|
||||
name: 'To Hit',
|
||||
value: '1d20 [10] + 1\n**11**',
|
||||
inline: true,
|
||||
}],
|
||||
targetIds: [],
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
})
|
||||
|
||||
it('should make attack rolls against multiple creatures', async function () {
|
||||
const action = await runActionById(attackActionId, [
|
||||
targetCreatureId,
|
||||
targetCreature2Id,
|
||||
]);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId, targetCreature2Id]
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Hit!',
|
||||
value: '1d20 [10] + 10\n**20**',
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Hit!',
|
||||
value: '1d20 [10] + 10\n**20**',
|
||||
}],
|
||||
targetIds: [targetCreature2Id],
|
||||
},
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('should make attack rolls that use uses', async function () {
|
||||
const action = await runActionById(usesActionId, [targetCreatureId]);
|
||||
const expectedUpdates: Update[] = [
|
||||
{
|
||||
propId: usesActionId,
|
||||
type: 'action',
|
||||
inc: { usesUsed: 1, usesLeft: -1 },
|
||||
}
|
||||
]
|
||||
assert.deepEqual(allUpdates(action), expectedUpdates);
|
||||
});
|
||||
|
||||
it('should fail to make attacks that have no uses left', async function () {
|
||||
const action = await runActionById(noUsesActionId, [targetCreatureId]);
|
||||
const expectedContent: LogContent[] = [
|
||||
{
|
||||
name: 'Action'
|
||||
}, {
|
||||
name: 'Error',
|
||||
value: 'Action does not have enough uses left'
|
||||
}
|
||||
]
|
||||
assert.deepEqual(allLogContent(action), expectedContent);
|
||||
});
|
||||
|
||||
it('should make attack rolls that miss', async function () {
|
||||
const action = await runActionById(attackMissId, [targetCreatureId]);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Miss!',
|
||||
value: '1d20 [10] − 5\n**5**', // DiceCloud uses unicode minus
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('should make attack rolls that roll with advantage', async function () {
|
||||
const prop = await CreatureProperties.findOneAsync(advantageAttackId);
|
||||
assert(prop);
|
||||
assert(prop.type === 'action')
|
||||
assert.equal(prop.attackRoll?.advantage, 1, 'The attack roll should have advantage');
|
||||
const action = await runActionById(advantageAttackId, [targetCreatureId]);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Hit! (Advantage)',
|
||||
value: '1d20 [ ~~10~~, 11 ] + 0\n**11**',
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('should make attack rolls that roll with disadvantage', async function () {
|
||||
const prop = await CreatureProperties.findOneAsync(disadvantageAttackId);
|
||||
assert(prop);
|
||||
assert(prop.type === 'action');
|
||||
assert.equal(prop.attackRoll?.disadvantage, 1, 'The attack roll should have disadvantage');
|
||||
const action = await runActionById(disadvantageAttackId, [targetCreatureId]);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Hit! (Disadvantage)',
|
||||
value: '1d20 [ 10, ~~11~~ ] + 0\n**10**',
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('actions should consume resources', async function () {
|
||||
const action = await runActionById(usesResourcesActionId, []);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: []
|
||||
},
|
||||
{
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Stat damaged',
|
||||
value: '−2 Resource Name',
|
||||
}],
|
||||
targetIds: [creatureId],
|
||||
updates: [{
|
||||
inc: {
|
||||
damage: 2,
|
||||
value: -2
|
||||
},
|
||||
propId: resourceAttId,
|
||||
type: 'attribute'
|
||||
}],
|
||||
},
|
||||
{
|
||||
targetIds: [],
|
||||
updates: [
|
||||
{
|
||||
inc: {
|
||||
quantity: -3
|
||||
},
|
||||
propId: ammoId,
|
||||
type: 'item',
|
||||
}
|
||||
]
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('should handle insufficient resources', async function () {
|
||||
const action = await runActionById(insufficientResourcesActionId, []);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{
|
||||
name: 'Action'
|
||||
}, {
|
||||
name: 'Error',
|
||||
value: 'This creature doesn\'t have sufficient resources to perform this action',
|
||||
}],
|
||||
targetIds: [],
|
||||
},
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
it('should reset attributes when events happen', async function () {
|
||||
const action = await runActionById(eventActionId, []);
|
||||
const expectedMutations: Mutation[] = [
|
||||
{
|
||||
contents: [{
|
||||
name: 'Action'
|
||||
}],
|
||||
targetIds: [],
|
||||
},
|
||||
{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Stat restored',
|
||||
value: '+13 Attribute Reset By testEvent Event',
|
||||
},
|
||||
],
|
||||
targetIds: [creatureId],
|
||||
updates: [
|
||||
{
|
||||
inc: {
|
||||
damage: -13,
|
||||
value: 13,
|
||||
},
|
||||
propId: attributeResetByEventId,
|
||||
type: 'attribute',
|
||||
},
|
||||
],
|
||||
}
|
||||
];
|
||||
assert.deepEqual(allMutations(action), expectedMutations);
|
||||
});
|
||||
|
||||
});
|
||||
@@ -1,268 +0,0 @@
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import { PropTask } from '../tasks/Task';
|
||||
import TaskResult, { LogContent } from '../tasks/TaskResult';
|
||||
import { getVariables } from '/imports/api/engine/loadCreatures';
|
||||
import getPropertyTitle from '/imports/api/utility/getPropertyTitle';
|
||||
import recalculateInlineCalculations from '/imports/api/engine/action/functions/recalculateInlineCalculations';
|
||||
import spendResources from '/imports/api/engine/action/functions/spendResources';
|
||||
import { applyAfterChildrenTriggers, applyAfterTriggers, applyChildren } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import recalculateCalculation from '/imports/api/engine/action/functions/recalculateCalculation';
|
||||
import { getEffectiveActionScope } from '/imports/api/engine/action/functions/getEffectiveActionScope';
|
||||
import numberToSignedString from '/imports/api/utility/numberToSignedString';
|
||||
import { getNumberFromScope } from '/imports/api/creature/creatures/CreatureVariables';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import { CalculatedField } from '/imports/api/properties/subSchemas/computedField';
|
||||
import applyResetTask from '/imports/api/engine/action/tasks/applyResetTask';
|
||||
import { CreaturePropertyTypes } from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
|
||||
export default async function applyActionProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, userInput: InputProvider
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
if (prop.type !== 'action' && prop.type !== 'spell') {
|
||||
throw new Meteor.Error('wrong-property', `Expected an action or a spell, got ${prop.type} instead`);
|
||||
}
|
||||
const targetIds = prop.target === 'self' ? [action.creatureId] : task.targetIds;
|
||||
|
||||
// If the action is a a spell, make sure we have spell slot defined
|
||||
if (prop.type === 'spell') {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
if (!('slotLevel' in scope)) {
|
||||
result.pushScope = {
|
||||
'~slotLevel': { value: prop.level },
|
||||
'slotLevel': { value: prop.level },
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
//Log the name and summary, check that the property has enough resources to fire
|
||||
if (prop.summary?.text) {
|
||||
await recalculateInlineCalculations(prop.summary, action, 'reduce', userInput);
|
||||
}
|
||||
result.appendLog({
|
||||
name: getPropertyTitle(prop),
|
||||
...prop.summary && { value: prop.summary.value },
|
||||
silenced: prop.silent,
|
||||
}, targetIds);
|
||||
|
||||
// Check Uses
|
||||
if (prop.usesLeft !== undefined && prop.usesLeft <= 0) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: `${getPropertyTitle(prop)} does not have enough uses left`,
|
||||
silenced: prop.silent,
|
||||
}, targetIds);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check Resources
|
||||
if (prop.insufficientResources) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: 'This creature doesn\'t have sufficient resources to perform this action',
|
||||
silenced: prop.silent,
|
||||
}, targetIds);
|
||||
return;
|
||||
}
|
||||
|
||||
await spendResources(action, prop, targetIds, result, userInput);
|
||||
|
||||
const attack = prop.attackRoll;
|
||||
|
||||
// Attack if there is an attack roll
|
||||
if (attack && attack.calculation) {
|
||||
if (targetIds.length) {
|
||||
for (const targetId of targetIds) {
|
||||
await applyAttackToTarget(task, action, attack, targetId, result, userInput);
|
||||
await applyAfterTriggers(action, prop, [targetId], userInput);
|
||||
await applyChildren(action, prop, [targetId], userInput);
|
||||
}
|
||||
} else {
|
||||
await applyAttackWithoutTarget(action, prop, attack, result, userInput);
|
||||
await applyAfterTriggers(action, prop, targetIds, userInput);
|
||||
await applyChildren(action, prop, targetIds, userInput);
|
||||
}
|
||||
} else {
|
||||
await applyAfterTriggers(action, prop, targetIds, userInput);
|
||||
await applyChildren(action, prop, targetIds, userInput);
|
||||
}
|
||||
if (prop.actionType === 'event' && prop.variableName) {
|
||||
await applyResetTask({
|
||||
subtaskFn: 'reset',
|
||||
eventName: prop.variableName,
|
||||
targetIds: [action.creatureId],
|
||||
}, action, result, userInput);
|
||||
}
|
||||
|
||||
// Finish
|
||||
return await applyAfterChildrenTriggers(action, prop, targetIds, userInput);
|
||||
}
|
||||
|
||||
async function applyAttackToTarget(
|
||||
task: PropTask, action: EngineAction, attack: CalculatedField, targetId: string,
|
||||
taskResult: TaskResult, userInput: InputProvider
|
||||
) {
|
||||
const prop = task.prop as CreaturePropertyTypes['action'] | CreaturePropertyTypes['spell'];
|
||||
|
||||
taskResult.pushScope = {
|
||||
'~attackHit': {},
|
||||
'~attackMiss': {},
|
||||
'~criticalHit': {},
|
||||
'~criticalMiss': {},
|
||||
'~attackRoll': {},
|
||||
}
|
||||
|
||||
await recalculateCalculation(attack, action, 'reduce', userInput);
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
const contents: LogContent[] = [];
|
||||
|
||||
const {
|
||||
resultPrefix,
|
||||
result,
|
||||
criticalHit,
|
||||
criticalMiss,
|
||||
advantage
|
||||
} = await rollAttack(attack, scope, taskResult.pushScope, userInput);
|
||||
|
||||
const targetScope = getVariables(targetId);
|
||||
const targetArmor = getNumberFromScope('armor', targetScope)
|
||||
|
||||
if (targetArmor !== undefined) {
|
||||
let name = criticalHit ? 'Critical Hit!' :
|
||||
criticalMiss ? 'Critical Miss!' :
|
||||
result >= targetArmor ? 'Hit!' : 'Miss!';
|
||||
if (advantage === 1) {
|
||||
name += ' (Advantage)';
|
||||
} else if (advantage === -1) {
|
||||
name += ' (Disadvantage)';
|
||||
}
|
||||
|
||||
contents.push({
|
||||
name,
|
||||
value: `${resultPrefix}\n**${result}**`,
|
||||
inline: true,
|
||||
...prop.silent && { silenced: true },
|
||||
});
|
||||
|
||||
if (criticalMiss || result < targetArmor) {
|
||||
taskResult.pushScope['~attackMiss'] = { value: true };
|
||||
} else {
|
||||
taskResult.pushScope['~attackHit'] = { value: true };
|
||||
}
|
||||
} else {
|
||||
contents.push({
|
||||
name: 'Error',
|
||||
value: 'Target has no `armor`',
|
||||
inline: true,
|
||||
...prop.silent && { silenced: true },
|
||||
}, {
|
||||
name: criticalHit ? 'Critical Hit!' : criticalMiss ? 'Critical Miss!' : 'To Hit',
|
||||
value: `${resultPrefix}\n**${result}**`,
|
||||
inline: true,
|
||||
...prop.silent && { silenced: true },
|
||||
});
|
||||
}
|
||||
if (contents.length) {
|
||||
taskResult.mutations.push({
|
||||
contents,
|
||||
targetIds: [targetId],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function applyAttackWithoutTarget(action, prop, attack, taskResult: TaskResult, userInput: InputProvider) {
|
||||
taskResult.pushScope = {
|
||||
'~attackHit': {},
|
||||
'~attackMiss': {},
|
||||
'~criticalHit': {},
|
||||
'~criticalMiss': {},
|
||||
'~attackRoll': {},
|
||||
}
|
||||
await recalculateCalculation(attack, action, 'reduce', userInput);
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
const {
|
||||
resultPrefix,
|
||||
result,
|
||||
criticalHit,
|
||||
criticalMiss,
|
||||
advantage,
|
||||
} = await rollAttack(attack, scope, taskResult.pushScope, userInput);
|
||||
let name = criticalHit ? 'Critical Hit!' : criticalMiss ? 'Critical Miss!' : 'To Hit';
|
||||
if (advantage === 1) {
|
||||
name += ' (Advantage)';
|
||||
} else if (advantage === -1) {
|
||||
name += ' (Disadvantage)';
|
||||
}
|
||||
if (!criticalMiss) {
|
||||
taskResult.pushScope['~attackHit'] = { value: true }
|
||||
}
|
||||
if (!criticalHit) {
|
||||
taskResult.pushScope['~attackMiss'] = { value: true };
|
||||
}
|
||||
taskResult.mutations.push({
|
||||
contents: [{
|
||||
name,
|
||||
value: `${resultPrefix}\n**${result}**`,
|
||||
inline: true,
|
||||
...prop.silent && { silenced: true },
|
||||
}],
|
||||
targetIds: [],
|
||||
});
|
||||
}
|
||||
|
||||
async function rollAttack(attack, scope: any, resultPushScope, userInput: InputProvider) {
|
||||
const advantage: 0 | 1 | -1 = await userInput.advantage(
|
||||
(!!attack.advantage && !attack.disadvantage) ? 1 :
|
||||
(!attack.advantage && !!attack.disadvantage) ? -1 :
|
||||
0
|
||||
);
|
||||
const rollModifierText = numberToSignedString(attack.value, true);
|
||||
let value, resultPrefix;
|
||||
|
||||
if (advantage === 1) {
|
||||
const [[a, b]] = await userInput.rollDice([{ number: 2, diceSize: 20 }]);
|
||||
if (a >= b) {
|
||||
value = a;
|
||||
resultPrefix = `1d20 [ ${a}, ~~${b}~~ ] ${rollModifierText}`;
|
||||
} else {
|
||||
value = b;
|
||||
resultPrefix = `1d20 [ ~~${a}~~, ${b} ] ${rollModifierText}`;
|
||||
}
|
||||
} else if (advantage === -1) {
|
||||
const [[a, b]] = await userInput.rollDice([{ number: 2, diceSize: 20 }]);
|
||||
if (a <= b) {
|
||||
value = a;
|
||||
resultPrefix = `1d20 [ ${a}, ~~${b}~~ ] ${rollModifierText}`;
|
||||
} else {
|
||||
value = b;
|
||||
resultPrefix = `1d20 [ ~~${a}~~, ${b} ] ${rollModifierText}`;
|
||||
}
|
||||
} else {
|
||||
[[value]] = await userInput.rollDice([{ number: 1, diceSize: 20 }]);
|
||||
resultPrefix = `1d20 [${value}] ${rollModifierText}`
|
||||
}
|
||||
resultPushScope['~attackDiceRoll'] = { value };
|
||||
const result = value + attack.value;
|
||||
resultPushScope['~attackRoll'] = { value: result };
|
||||
const { criticalHit, criticalMiss } = applyCrits(value, scope, resultPushScope);
|
||||
return { resultPrefix, result, value, criticalHit, criticalMiss, advantage };
|
||||
}
|
||||
|
||||
function applyCrits(value, scope, resultPushScope) {
|
||||
const scopeCritTarget = getNumberFromScope('~criticalHitTarget', scope);
|
||||
const criticalHitTarget = scopeCritTarget !== undefined &&
|
||||
Number.isFinite(scopeCritTarget) ? scopeCritTarget : 20;
|
||||
|
||||
const scopeCritMissTarget = getNumberFromScope('~criticalMissTarget', scope);
|
||||
const criticalMissTarget = scopeCritMissTarget !== undefined &&
|
||||
Number.isFinite(scopeCritMissTarget) ? scopeCritMissTarget : 1;
|
||||
|
||||
const criticalHit = value >= criticalHitTarget;
|
||||
const criticalMiss = value <= criticalMissTarget;
|
||||
if (criticalHit) {
|
||||
resultPushScope['~criticalHit'] = { value: true };
|
||||
} else if (criticalMiss) {
|
||||
resultPushScope['~criticalMiss'] = { value: true };
|
||||
}
|
||||
return { criticalHit, criticalMiss };
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, targetCreatureId, targetCreature2Id, adjustmentToTargetId, adjustmentToSelfId, targetCreatureStrengthId, targetCreature2StrengthId, selfDexterityId
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: adjustmentToTargetId,
|
||||
type: 'adjustment',
|
||||
target: 'target',
|
||||
stat: 'strength',
|
||||
operation: 'increment',
|
||||
amount: { calculation: '2' }
|
||||
},
|
||||
{
|
||||
_id: adjustmentToSelfId,
|
||||
type: 'adjustment',
|
||||
target: 'self',
|
||||
stat: 'dexterity',
|
||||
operation: 'set',
|
||||
amount: { calculation: '11' }
|
||||
},
|
||||
{
|
||||
_id: selfDexterityId,
|
||||
type: 'attribute',
|
||||
name: 'Dexterity',
|
||||
attributeType: 'ability',
|
||||
variableName: 'dexterity',
|
||||
baseValue: { calculation: '13' },
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const actionTargetCreature: TestCreature = {
|
||||
_id: targetCreatureId,
|
||||
props: [
|
||||
{
|
||||
_id: targetCreatureStrengthId,
|
||||
type: 'attribute',
|
||||
attributeType: 'ability',
|
||||
variableName: 'strength',
|
||||
baseValue: { calculation: '12' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const actionTargetCreature2: TestCreature = {
|
||||
_id: targetCreature2Id,
|
||||
props: [
|
||||
{
|
||||
_id: targetCreature2StrengthId,
|
||||
type: 'attribute',
|
||||
attributeType: 'ability',
|
||||
variableName: 'strength',
|
||||
baseValue: { calculation: '18' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('Apply Adjustment Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionTargetCreature);
|
||||
await createTestCreature(actionTargetCreature2);
|
||||
});
|
||||
|
||||
it('Adjusts the attributes of self', async function () {
|
||||
const action = await runActionById(adjustmentToSelfId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Attribute damage',
|
||||
value: 'Dexterity set from 13 to 11',
|
||||
}
|
||||
],
|
||||
targetIds: [creatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: selfDexterityId,
|
||||
type: 'attribute',
|
||||
set: { damage: 2, value: 11 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Adjusts the attributes of a single target', async function () {
|
||||
const action = await runActionById(adjustmentToTargetId, [targetCreatureId]);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Ability damaged',
|
||||
value: '−2 Attribute',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreatureStrengthId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 2, value: -2 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Adjusts the attributes of multiple targets', async function () {
|
||||
const action = await runActionById(adjustmentToTargetId, [
|
||||
targetCreatureId, targetCreature2Id
|
||||
]);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Ability damaged',
|
||||
value: '−2 Attribute',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreatureStrengthId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 2, value: -2 },
|
||||
},
|
||||
],
|
||||
}, {
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Ability damaged',
|
||||
value: '−2 Attribute',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreature2Id],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreature2StrengthId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 2, value: -2 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,77 +0,0 @@
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import { applyDefaultAfterPropTasks, applyTaskToEachTarget } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import recalculateCalculation from '/imports/api/engine/action/functions/recalculateCalculation';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import TaskResult from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import applyTask from '/imports/api/engine/action/tasks/applyTask';
|
||||
import { getSingleProperty, getVariables } from '/imports/api/engine/loadCreatures';
|
||||
import getPropertyTitle from '/imports/api/utility/getPropertyTitle';
|
||||
import { CreatureProperty } from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
|
||||
export default async function applyAdjustmentProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, userInput: InputProvider
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'adjustment') {
|
||||
throw new Meteor.Error('wrong-property', `Expected an adjustment, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
const damageTargetIds = prop.target === 'self' ? [action.creatureId] : task.targetIds;
|
||||
|
||||
if (damageTargetIds.length > 1) {
|
||||
return applyTaskToEachTarget(action, task, damageTargetIds, userInput);
|
||||
}
|
||||
|
||||
// Get the operation and value and push the damage hooks to the queue
|
||||
if (!prop.amount) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: 'Attribute damage does not have an amount set',
|
||||
silenced: prop.silent,
|
||||
}, damageTargetIds);
|
||||
return;
|
||||
}
|
||||
|
||||
// Evaluate the amount
|
||||
await recalculateCalculation(prop.amount, action, 'reduce', userInput);
|
||||
const value = Number(prop.amount.value ?? 0);
|
||||
if (!isFinite(value)) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: 'Attribute damage does not have a finite amount set',
|
||||
silenced: prop.silent,
|
||||
}, damageTargetIds);
|
||||
return;
|
||||
}
|
||||
|
||||
if (damageTargetIds.length && damageTargetIds.length !== 1) {
|
||||
throw new Meteor.Error('1 target Expected', 'At this step, only a single target is supported');
|
||||
}
|
||||
const targetId = damageTargetIds[0];
|
||||
let stat: CreatureProperty | undefined;
|
||||
if (targetId && prop.stat) {
|
||||
const statId = getVariables(targetId)?.[prop.stat]?._propId;
|
||||
stat = statId && getSingleProperty(targetId, statId);
|
||||
if (!stat?.type) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: `Could not apply attribute damage, creature does not have \`${prop.stat}\` set`,
|
||||
silenced: prop.silent,
|
||||
}, damageTargetIds);
|
||||
return;
|
||||
}
|
||||
}
|
||||
await applyTask(action, {
|
||||
targetIds: damageTargetIds,
|
||||
subtaskFn: 'damageProp',
|
||||
params: {
|
||||
title: getPropertyTitle(prop),
|
||||
operation: prop.operation,
|
||||
value,
|
||||
targetProp: stat ?? { name: prop.stat ?? '' },
|
||||
},
|
||||
}, userInput);
|
||||
return applyDefaultAfterPropTasks(action, prop, damageTargetIds, userInput);
|
||||
}
|
||||
@@ -1,376 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, targetCreatureId, ifTrueBranchId, ifFalseBranchId, indexBranchId, attackHitId, attackMissId, saveSucceedId, saveFailId, randomBranchId, targetCreature2Id, eachTargetBranchId, choiceBranchId,
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
// If branch
|
||||
{
|
||||
_id: ifTrueBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'if',
|
||||
condition: { calculation: 'true' },
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'this should run' },
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
_id: ifFalseBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'if',
|
||||
condition: { calculation: 'false' },
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'this should not run' },
|
||||
},
|
||||
],
|
||||
},
|
||||
// index branch
|
||||
{
|
||||
_id: indexBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'index',
|
||||
condition: { calculation: '1 + 1' },
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'FAIL: index child 1 should not run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'Child 2 should run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'FAIL: index child 3 should not run' },
|
||||
},
|
||||
],
|
||||
},
|
||||
// Hit and miss branches
|
||||
{
|
||||
_id: attackHitId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '1' },
|
||||
children: [
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'hit',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'attack hit branch' }
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'miss',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'attack miss branch' }
|
||||
}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
_id: attackMissId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '-1' },
|
||||
children: [
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'hit',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'attack hit branch' }
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'miss',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'attack miss branch' }
|
||||
}],
|
||||
},
|
||||
]
|
||||
},
|
||||
|
||||
// Save and fail save branch
|
||||
{
|
||||
_id: saveSucceedId,
|
||||
type: 'savingThrow',
|
||||
dc: { calculation: '10' },
|
||||
target: 'target',
|
||||
stat: 'strengthSave',
|
||||
children: [
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'successfulSave',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'made save branch' }
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'failedSave',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'failed save branch' }
|
||||
}],
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
_id: saveFailId,
|
||||
type: 'savingThrow',
|
||||
dc: { calculation: '15' },
|
||||
target: 'target',
|
||||
stat: 'strengthSave',
|
||||
children: [
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'successfulSave',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'made save branch' }
|
||||
}],
|
||||
},
|
||||
{
|
||||
type: 'branch',
|
||||
branchType: 'failedSave',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'failed save branch' }
|
||||
}],
|
||||
},
|
||||
]
|
||||
},
|
||||
|
||||
// Random branch
|
||||
{
|
||||
_id: randomBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'random',
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'FAIL: random child 1 should not run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'Random child 2 should run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'FAIL: random child 3 should not run' },
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
// Each target branch
|
||||
{
|
||||
_id: eachTargetBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'eachTarget',
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'some note' }
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
// Choice branch
|
||||
{
|
||||
_id: choiceBranchId,
|
||||
type: 'branch',
|
||||
branchType: 'choice',
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'Choice child 1 should run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'Fail: choice child 2 should not run' },
|
||||
},
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'Fail: choice child 3 should not run' },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const actionTargetCreature = {
|
||||
_id: targetCreatureId,
|
||||
props: [
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'armor',
|
||||
baseValue: { calculation: '10' },
|
||||
},
|
||||
{
|
||||
type: 'skill',
|
||||
skillType: 'save',
|
||||
variableName: 'strengthSave',
|
||||
baseValue: { calculation: '3' },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
const actionTargetCreature2 = {
|
||||
_id: targetCreature2Id,
|
||||
props: [
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'armor',
|
||||
baseValue: { calculation: '15' },
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
describe('Apply Branch Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionTargetCreature);
|
||||
await createTestCreature(actionTargetCreature2);
|
||||
});
|
||||
|
||||
// If branch
|
||||
it('Runs an if branch with a true condition', async function () {
|
||||
const action = await runActionById(ifTrueBranchId);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ value: 'this should run' }],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
it('runs an if branch with a false condition', async function () {
|
||||
const action = await runActionById(ifFalseBranchId);
|
||||
assert.deepEqual(allMutations(action), []);
|
||||
});
|
||||
it('runs an if branch and chooses the correct child', async function () {
|
||||
const action = await runActionById(indexBranchId);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ value: 'Child 2 should run' }],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
|
||||
// Hit and miss branch
|
||||
it('Runs only hit branches on an attack that hits', async function () {
|
||||
const action = await runActionById(attackHitId, [targetCreatureId]);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ inline: true, name: 'Hit!', value: '1d20 [10] + 1\n**11**' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ value: 'attack hit branch' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}]);
|
||||
});
|
||||
it('Runs only miss branches on an attack that misses', async function () {
|
||||
const action = await runActionById(attackMissId, [targetCreatureId]);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ name: 'Action' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ inline: true, name: 'Miss!', value: '1d20 [10] − 1\n**9**' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ value: 'attack miss branch' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}]);
|
||||
});
|
||||
|
||||
// Save succeed and fail branches
|
||||
it('Runs only miss branches on an attack that misses', async function () {
|
||||
const action = await runActionById(saveSucceedId, [targetCreatureId]);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{
|
||||
name: 'Saving throw',
|
||||
value: 'DC **10**',
|
||||
inline: true
|
||||
}, {
|
||||
name: 'Successful save',
|
||||
value: '1d20 [ 10 ] + 3\n**13**',
|
||||
inline: true
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ value: 'made save branch' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}]);
|
||||
});
|
||||
it('Runs only miss branches on an attack that misses', async function () {
|
||||
const action = await runActionById(saveFailId, [targetCreatureId]);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{
|
||||
name: 'Saving throw',
|
||||
value: 'DC **15**',
|
||||
inline: true
|
||||
}, {
|
||||
name: 'Failed save',
|
||||
value: '1d20 [ 10 ] + 3\n**13**',
|
||||
inline: true
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ value: 'failed save branch' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}]);
|
||||
});
|
||||
|
||||
// Random branches, RNG is fixed at average for testing, so child 2 should run
|
||||
it('runs a random branch and chooses the correct child', async function () {
|
||||
const action = await runActionById(randomBranchId);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ value: 'Random child 2 should run' }],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
|
||||
// Branches can split actions across targets
|
||||
it('Can split actions to targets using a branch', async function () {
|
||||
const action = await runActionById(eachTargetBranchId, [targetCreatureId, targetCreature2Id]);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ value: 'some note' }],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [{ value: 'some note' }],
|
||||
targetIds: [targetCreature2Id],
|
||||
}]);
|
||||
});
|
||||
|
||||
// Choice branches, choices are fixed to first option for testing
|
||||
it('runs a choice branch and chooses the correct child', async function () {
|
||||
const action = await runActionById(choiceBranchId);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [{ value: 'Choice child 1 should run' }],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,153 +0,0 @@
|
||||
import { filter } from 'lodash';
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import { applyAfterPropTasksForSingleChild, applyAfterPropTasksForSomeChildren, applyAfterTasksSkipChildren, applyDefaultAfterPropTasks, applyTaskToEachTarget } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import { getEffectiveActionScope } from '/imports/api/engine/action/functions/getEffectiveActionScope';
|
||||
import recalculateCalculation from '/imports/api/engine/action/functions/recalculateCalculation';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import TaskResult from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import { getPropertyChildren } from '/imports/api/engine/loadCreatures';
|
||||
|
||||
export default async function applyBranchProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, userInput: InputProvider
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'branch') {
|
||||
throw new Meteor.Error('wrong-property', `Expected a branch, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
const targets = task.targetIds;
|
||||
|
||||
switch (prop.branchType) {
|
||||
case 'if': {
|
||||
if (!prop.condition) {
|
||||
result.appendLog({
|
||||
name: 'Branch Error',
|
||||
value: 'If branch does not have a condition set',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
await recalculateCalculation(prop.condition, action, 'reduce', userInput);
|
||||
if (prop.condition?.value) {
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'index': {
|
||||
const children = await getPropertyChildren(action.creatureId, prop);
|
||||
if (!children.length) {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
if (!prop.condition) {
|
||||
result.appendLog({
|
||||
name: 'Branch Error',
|
||||
value: 'Index branch does not have a condition set',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
await recalculateCalculation(prop.condition, action, 'reduce', userInput);
|
||||
let index = Number(prop.condition.value);
|
||||
if (!isFinite(index)) {
|
||||
result.appendLog({
|
||||
name: 'Branch Error',
|
||||
value: `Index did not resolve into a valid number, got \`${prop.condition?.value}\` instead`,
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
index = Math.floor(index);
|
||||
if (index < 1) index = 1;
|
||||
if (index > children.length) index = children.length;
|
||||
const child = children[index - 1];
|
||||
return applyAfterPropTasksForSingleChild(action, prop, child, targets, userInput);
|
||||
}
|
||||
case 'hit': {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
if (scope['~attackHit']?.value) {
|
||||
if (!targets.length && !prop.silent) {
|
||||
result.appendLog({
|
||||
value: '**On hit**',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'miss': {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
if (scope['~attackMiss']?.value) {
|
||||
if (!targets.length && !prop.silent) {
|
||||
result.appendLog({
|
||||
value: '**On miss**',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'failedSave': {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
if (scope['~saveFailed']?.value) {
|
||||
if (!targets.length && !prop.silent) {
|
||||
result.appendLog({
|
||||
value: '**On failed save**',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'successfulSave': {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
if (scope['~saveSucceeded']?.value) {
|
||||
if (!targets.length && !prop.silent) {
|
||||
result.appendLog({
|
||||
value: '**On save**',
|
||||
silenced: prop.silent,
|
||||
}, targets);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'random': {
|
||||
const children = await getPropertyChildren(action.creatureId, prop);
|
||||
if (children.length) {
|
||||
const index = (await userInput.rollDice([{ number: 1, diceSize: children.length }]))[0][0];
|
||||
const child = children[index - 1];
|
||||
return applyAfterPropTasksForSingleChild(action, prop, child, targets, userInput);
|
||||
} else {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
}
|
||||
case 'eachTarget':
|
||||
if (targets.length > 1) {
|
||||
return applyTaskToEachTarget(action, task, targets, userInput);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, targets, userInput);
|
||||
case 'choice': {
|
||||
const children = await getPropertyChildren(action.creatureId, prop);
|
||||
let choices: string[];
|
||||
let chosenChildren: typeof children = [];
|
||||
if (children.length) {
|
||||
choices = await userInput.choose(children);
|
||||
chosenChildren = filter(children, child => choices.includes(child._id));
|
||||
}
|
||||
if (!children.length || !chosenChildren.length) {
|
||||
return applyAfterTasksSkipChildren(action, prop, targets, userInput);
|
||||
}
|
||||
return applyAfterPropTasksForSomeChildren(action, prop, chosenChildren, targets, userInput);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, targetCreatureId, buffId
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: buffId,
|
||||
type: 'buff',
|
||||
description: { text: 'This buff reduces AC of target by difference between the strength of caster {strength} and the target {~target.strength}' },
|
||||
children: [
|
||||
{
|
||||
type: 'effect',
|
||||
stats: ['armor'],
|
||||
operation: 'add',
|
||||
amount: { calculation: '~target.strength - strength' },
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'strength',
|
||||
baseValue: { calculation: '18' },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const actionTargetCreature: TestCreature = {
|
||||
_id: targetCreatureId,
|
||||
props: [
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'armor',
|
||||
baseValue: { calculation: '10' },
|
||||
},
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'ability',
|
||||
variableName: 'strength',
|
||||
baseValue: { calculation: '12' },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('Apply Buff Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionTargetCreature);
|
||||
});
|
||||
|
||||
it('Applies a buff and freezes some variables', async function () {
|
||||
const action = await runActionById(buffId, [targetCreatureId]);
|
||||
const mutations = allMutations(action);
|
||||
// Get random Ids of inserted props
|
||||
const insertedBuffId = mutations?.[1]?.inserts?.[0]?._id;
|
||||
const insertedEffectId = mutations?.[1]?.inserts?.[1]?._id;
|
||||
assert.deepEqual(mutations, [{
|
||||
contents: [{
|
||||
name: 'Buff',
|
||||
// TODO Make target strength available in action scope to fix: 'target 0' -> 'target 12'
|
||||
value: 'This buff reduces AC of target by difference between the strength of caster 18 and the target 0',
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [],
|
||||
inserts: [{
|
||||
_id: insertedBuffId,
|
||||
type: 'buff',
|
||||
description: {
|
||||
text: 'This buff reduces AC of target by difference between the strength of caster {18} and the target {strength}'
|
||||
},
|
||||
left: 1,
|
||||
right: 4,
|
||||
root: {
|
||||
collection: 'creatures',
|
||||
id: targetCreatureId,
|
||||
},
|
||||
tags: [],
|
||||
target: 'target',
|
||||
}, {
|
||||
_id: insertedEffectId,
|
||||
type: 'effect',
|
||||
stats: ['armor'],
|
||||
operation: 'add',
|
||||
amount: { calculation: 'strength - 18' },
|
||||
left: 2,
|
||||
right: 3,
|
||||
parentId: insertedBuffId,
|
||||
root: {
|
||||
collection: 'creatures',
|
||||
id: targetCreatureId,
|
||||
},
|
||||
tags: [],
|
||||
}],
|
||||
targetIds: [targetCreatureId],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,178 +0,0 @@
|
||||
import { get } from 'lodash';
|
||||
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import { getPropertyDescendants } from '/imports/api/engine/loadCreatures';
|
||||
import resolve from '/imports/parser/resolve';
|
||||
import map from '/imports/parser/map';
|
||||
import toString from '/imports/parser/toString';
|
||||
import computedSchemas from '/imports/api/properties/computedOnlyPropertySchemasIndex.js';
|
||||
import applyFnToKey, { applyFnToKeyAsync } from '/imports/api/engine/computation/utility/applyFnToKey';
|
||||
import accessor from '/imports/parser/parseTree/accessor';
|
||||
import TaskResult, { Mutation } from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import { getEffectiveActionScope } from '/imports/api/engine/action/functions/getEffectiveActionScope';
|
||||
import cyrb53 from '/imports/api/engine/computation/utility/cyrb53';
|
||||
import { renewDocIds } from '/imports/api/parenting/parentingFunctions';
|
||||
import { cleanProps } from '/imports/api/creature/creatureProperties/methods/copyPropertyToLibrary';
|
||||
import recalculateInlineCalculations from '/imports/api/engine/action/functions/recalculateInlineCalculations';
|
||||
import getPropertyTitle from '/imports/api/utility/getPropertyTitle';
|
||||
import INLINE_CALCULATION_REGEX from '/imports/constants/INLINE_CALCULATION_REGEX';
|
||||
import { applyAfterTasksSkipChildren } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
|
||||
export default async function applyBuffProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, userInput: InputProvider
|
||||
) {
|
||||
const prop = EJSON.clone(task.prop);
|
||||
|
||||
if (prop.type !== 'buff') {
|
||||
throw new Meteor.Error('wrong-property', `Expected a buff, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
const targetIds = prop.target === 'self' ? [action.creatureId] : task.targetIds;
|
||||
|
||||
// Log the buff and return if there are no targets
|
||||
if (!targetIds.length) {
|
||||
await logBuff(prop, targetIds, action, userInput, result);
|
||||
await applyAfterTasksSkipChildren(action, prop, targetIds, userInput);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the buff and its descendants
|
||||
const propList = [
|
||||
EJSON.clone(prop),
|
||||
...getPropertyDescendants(action.creatureId, prop._id),
|
||||
];
|
||||
|
||||
// Crystallize the variables
|
||||
if (!prop.skipCrystalization) {
|
||||
await crystallizeVariables(action, propList, task, result);
|
||||
}
|
||||
|
||||
for (const target of targetIds) {
|
||||
// Create a per-target mutation
|
||||
const mutation: Mutation = { targetIds: [target], contents: [] };
|
||||
|
||||
// Create a per-target copy of the propList
|
||||
let targetPropList = EJSON.clone(propList);
|
||||
|
||||
// Give the properties new IDs as descendants of the target
|
||||
renewDocIds({
|
||||
docArray: targetPropList,
|
||||
idMap: {
|
||||
...prop.parentId && { [prop.parentId]: null },
|
||||
[prop.root.id]: target,
|
||||
},
|
||||
collectionMap: { [prop.root.collection]: 'creatures' }
|
||||
});
|
||||
|
||||
//Log the buff
|
||||
await logBuff(prop, targetIds, action, userInput, result);
|
||||
|
||||
// remove all the computed fields
|
||||
targetPropList = cleanProps(targetPropList);
|
||||
|
||||
// Insert the props in the mutation
|
||||
mutation.inserts = targetPropList;
|
||||
|
||||
// Add the mutation to the results
|
||||
result.mutations.push(mutation);
|
||||
}
|
||||
await applyAfterTasksSkipChildren(action, prop, targetIds, userInput);
|
||||
}
|
||||
|
||||
async function logBuff(prop, targetIds, action, userInput, result) {
|
||||
//Log the buff
|
||||
let logValue = prop.description?.value
|
||||
if (prop.description?.text) {
|
||||
await recalculateInlineCalculations(prop.description, action, 'reduce', userInput);
|
||||
logValue = prop.description?.value;
|
||||
}
|
||||
result.appendLog({
|
||||
name: getPropertyTitle(prop),
|
||||
...logValue && { value: logValue },
|
||||
silenced: prop.silent,
|
||||
}, targetIds);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces all variables with their resolved values
|
||||
* except variables of the form `~target.thing.total` become `thing.total`
|
||||
*/
|
||||
async function crystallizeVariables(
|
||||
action: EngineAction, propList: any[], task: PropTask, result: TaskResult
|
||||
) {
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
for (const prop of propList) {
|
||||
// Iterate through all the calculations and crystallize them
|
||||
for (const calcKey of computedSchemas[prop.type].computedFields()) {
|
||||
await applyFnToKeyAsync(prop, calcKey, async (prop, key) => {
|
||||
const calcObj = get(prop, key);
|
||||
if (!calcObj?.parseNode) return;
|
||||
calcObj.parseNode = await map(calcObj.parseNode, async node => {
|
||||
// Skip nodes that aren't symbols or accessors
|
||||
if (
|
||||
node.parseType !== 'accessor'
|
||||
) return node;
|
||||
// Handle variables
|
||||
if (node.parseType === 'accessor' && node.name === '~target') {
|
||||
// strip ~target
|
||||
if (node.path?.length > 0) {
|
||||
const name = node.path.shift();
|
||||
return accessor.create({
|
||||
name,
|
||||
path: node.path?.length ? node.path : undefined,
|
||||
});
|
||||
} else {
|
||||
// Can't strip if there isn't anything in the path after ~target
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: 'Variable `~target` should not be used without a property: ~target.property',
|
||||
silenced: prop.silent,
|
||||
}, task.targetIds);
|
||||
}
|
||||
return node;
|
||||
} else {
|
||||
// Resolve all other variables
|
||||
const { result: nodeResult, context } = await resolve('reduce', node, scope);
|
||||
result.appendParserContextErrors(context, task.targetIds);
|
||||
return nodeResult;
|
||||
}
|
||||
});
|
||||
calcObj.calculation = toString(calcObj.parseNode);
|
||||
calcObj.hash = cyrb53(calcObj.calculation);
|
||||
});
|
||||
}
|
||||
// For each key in the schema
|
||||
for (const calcKey of computedSchemas[prop.type].inlineCalculationFields()) {
|
||||
// That ends in .inlineCalculations
|
||||
applyFnToKey(prop, calcKey, (prop, key) => {
|
||||
const inlineCalcObj = get(prop, key);
|
||||
if (!inlineCalcObj) return;
|
||||
|
||||
// If there is no text, skip
|
||||
if (!inlineCalcObj.text) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Replace all the existing calculations
|
||||
let index = -1;
|
||||
inlineCalcObj.text = inlineCalcObj.text.replace(INLINE_CALCULATION_REGEX, () => {
|
||||
index += 1;
|
||||
return `{${inlineCalcObj.inlineCalculations[index].calculation}}`;
|
||||
});
|
||||
|
||||
// Set the value to the uncomputed string
|
||||
inlineCalcObj.value = inlineCalcObj.text;
|
||||
|
||||
// Write a new hash
|
||||
const inlineCalcHash = cyrb53(inlineCalcObj.text);
|
||||
if (inlineCalcHash === inlineCalcObj.hash) {
|
||||
// Skip if nothing changed
|
||||
return;
|
||||
}
|
||||
inlineCalcObj.hash = inlineCalcHash;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, otherCreatureId, buffId, removeParentBuffId, removeTargetBuffsId,
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: buffId,
|
||||
type: 'buff',
|
||||
description: { text: 'This buff reduces AC of target by difference between the strength of caster {strength} and the target {~target.strength}' },
|
||||
tags: ['some buff'],
|
||||
children: [
|
||||
{
|
||||
type: 'effect',
|
||||
stats: ['armor'],
|
||||
operation: 'add',
|
||||
amount: { calculation: '~target.strength - strength' },
|
||||
},
|
||||
{
|
||||
_id: removeParentBuffId,
|
||||
type: 'buffRemover',
|
||||
targetParentBuff: true,
|
||||
target: 'self',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'attribute',
|
||||
attributeType: 'stat',
|
||||
variableName: 'strength',
|
||||
baseValue: { calculation: '18' },
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const actionOtherCreature: TestCreature = {
|
||||
_id: otherCreatureId,
|
||||
props: [
|
||||
{
|
||||
_id: removeTargetBuffsId,
|
||||
type: 'buffRemover',
|
||||
target: 'target',
|
||||
targetTags: ['some buff']
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('Apply Buff Remover Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
beforeEach(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionOtherCreature);
|
||||
});
|
||||
|
||||
it('removes a parent buff', async function () {
|
||||
const action = await runActionById(removeParentBuffId);
|
||||
const mutations = allMutations(action);
|
||||
assert.deepEqual(mutations, [{
|
||||
contents: [{
|
||||
name: 'Removed',
|
||||
value: 'Buff',
|
||||
}],
|
||||
removals: [{
|
||||
propId: buffId,
|
||||
}],
|
||||
targetIds: []
|
||||
}]);
|
||||
});
|
||||
|
||||
it('removes a tag targeted buff', async function () {
|
||||
const action = await runActionById(removeTargetBuffsId, [creatureId]);
|
||||
const mutations = allMutations(action);
|
||||
assert.deepEqual(mutations, [{
|
||||
contents: [{
|
||||
name: 'Removed',
|
||||
value: 'Buff',
|
||||
}],
|
||||
removals: [{
|
||||
propId: buffId,
|
||||
}],
|
||||
targetIds: [creatureId]
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,123 +0,0 @@
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import TaskResult from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import getPropertyTitle from '/imports/api/utility/getPropertyTitle';
|
||||
import { findLast, filter, difference, intersection } from 'lodash';
|
||||
import { getPropertiesOfType, getPropertyAncestors } from '/imports/api/engine/loadCreatures';
|
||||
import getEffectivePropTags from '/imports/api/engine/computation/utility/getEffectivePropTags';
|
||||
import { applyDefaultAfterPropTasks, applyTaskToEachTarget } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import { CreaturePropertyTypes } from '/imports/api/creature/creatureProperties/CreatureProperties';
|
||||
|
||||
export default async function applyBuffRemoverProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, userInput: InputProvider
|
||||
) {
|
||||
const prop = task.prop as CreaturePropertyTypes['buffRemover'];
|
||||
|
||||
const targetIds = prop.target === 'self' ? [action.creatureId] : task.targetIds;
|
||||
|
||||
if (prop.name) {
|
||||
// Log Name
|
||||
result.appendLog({
|
||||
name: getPropertyTitle(prop),
|
||||
silenced: prop.silent,
|
||||
}, task.targetIds)
|
||||
}
|
||||
|
||||
if (targetIds.length > 1) {
|
||||
return applyTaskToEachTarget(action, task, targetIds, userInput);
|
||||
}
|
||||
|
||||
if (!targetIds.length) {
|
||||
return applyDefaultAfterPropTasks(action, prop, task.targetIds, userInput);
|
||||
}
|
||||
|
||||
if (targetIds.length !== 1) {
|
||||
throw 'At this step, only a single target is supported'
|
||||
}
|
||||
const targetId = targetIds[0];
|
||||
|
||||
// Remove buffs
|
||||
if (prop.targetParentBuff) {
|
||||
// Remove nearest ancestor buff
|
||||
const ancestors = getPropertyAncestors(action.creatureId, prop._id);
|
||||
const nearestBuff = findLast(ancestors, ancestor => ancestor.type === 'buff');
|
||||
if (!nearestBuff) {
|
||||
result.appendLog({
|
||||
name: 'Error',
|
||||
value: 'Buff remover does not have a parent buff to remove',
|
||||
silenced: prop.silent,
|
||||
}, [targetId]);
|
||||
return;
|
||||
}
|
||||
removeBuff(nearestBuff, prop, result);
|
||||
} else {
|
||||
// Get all the buffs targeted by tags
|
||||
const allBuffs = getPropertiesOfType(targetId, 'buff');
|
||||
const targetedBuffs = filter(allBuffs, (buff): boolean => {
|
||||
if (buff.inactive) return false;
|
||||
if (buffRemoverMatchTags(prop, buff)) return true;
|
||||
return false;
|
||||
});
|
||||
// Remove the buffs
|
||||
if (prop.removeAll) {
|
||||
// Remove all matching buffs
|
||||
targetedBuffs.forEach(buff => {
|
||||
removeBuff(buff, prop, result);
|
||||
});
|
||||
} else {
|
||||
// Sort in reverse order
|
||||
targetedBuffs.sort((a, b) => b.left - a.left);
|
||||
// Remove the one with the highest order
|
||||
const buff = targetedBuffs[0];
|
||||
if (buff) {
|
||||
removeBuff(buff, prop, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, task.targetIds, userInput);
|
||||
}
|
||||
|
||||
function removeBuff(buff: any, prop, result: TaskResult) {
|
||||
result.mutations.push({
|
||||
targetIds: result.targetIds,
|
||||
removals: [{ propId: buff._id }],
|
||||
contents: [{
|
||||
name: 'Removed',
|
||||
value: `${buff.name || 'Buff'}`,
|
||||
...prop.silent && { silenced: true },
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
function buffRemoverMatchTags(buffRemover, prop) {
|
||||
let matched = false;
|
||||
const propTags = getEffectivePropTags(prop);
|
||||
// Check the target tags
|
||||
if (
|
||||
!buffRemover.targetTags?.length ||
|
||||
difference(buffRemover.targetTags, propTags).length === 0
|
||||
) {
|
||||
matched = true;
|
||||
}
|
||||
// Check the extra tags
|
||||
buffRemover.extraTags?.forEach(extra => {
|
||||
if (extra.operation === 'OR') {
|
||||
if (matched) return;
|
||||
if (
|
||||
!extra.tags.length ||
|
||||
difference(extra.tags, propTags).length === 0
|
||||
) {
|
||||
matched = true;
|
||||
}
|
||||
} else if (extra.operation === 'NOT') {
|
||||
if (
|
||||
extra.tags.length &&
|
||||
intersection(extra.tags, propTags)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
return matched;
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import recalculateInlineCalculations from '/imports/api/engine/action/functions/recalculateInlineCalculations';
|
||||
import getPropertyTitle from '/imports/api/utility/getPropertyTitle';
|
||||
|
||||
export default async function applyCreatureTemplateProperty(
|
||||
task: PropTask, action: EngineAction, result, userInput
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'creature') {
|
||||
throw new Meteor.Error('wrong-property', `Expected a creature, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
// Log the Creature that is about to be summoned
|
||||
let logValue = prop.description?.value
|
||||
if (prop.description?.text) {
|
||||
await recalculateInlineCalculations(prop.description, action, 'reduce', userInput);
|
||||
logValue = prop.description?.value;
|
||||
}
|
||||
// There are no targets for creature templates
|
||||
// Creatures are always summoned as children of the action's creature
|
||||
result.appendLog({
|
||||
name: getPropertyTitle(prop),
|
||||
value: logValue,
|
||||
silenced: prop.silent,
|
||||
}, []);
|
||||
|
||||
result.appendLog({
|
||||
name: 'Warning',
|
||||
value: 'Creature summoning is not yet implemented...',
|
||||
silenced: prop.silent,
|
||||
}, []);
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -1,292 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
import { critInputProvider } from '../functions/userInput/inputProviderForTests.testFn';
|
||||
|
||||
const [
|
||||
creatureId, targetCreatureId, targetCreature2Id, damageTargetId, damageSelfId, targetCreatureHitPointsId, targetCreature2HitPointsId, selfHitPointsId, damageWithEffectsId, effectId, effect2Id,
|
||||
] = getRandomIds(20);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: damageTargetId,
|
||||
type: 'damage',
|
||||
target: 'target',
|
||||
amount: { calculation: '2d6 + 7' }
|
||||
},
|
||||
{
|
||||
_id: damageSelfId,
|
||||
type: 'damage',
|
||||
target: 'self',
|
||||
amount: { calculation: '1d12 + 7' }
|
||||
},
|
||||
{
|
||||
_id: selfHitPointsId,
|
||||
type: 'attribute',
|
||||
name: 'Hit Points',
|
||||
attributeType: 'healthBar',
|
||||
variableName: 'hitPoints',
|
||||
baseValue: { calculation: '20' },
|
||||
},
|
||||
{
|
||||
_id: damageWithEffectsId,
|
||||
type: 'damage',
|
||||
target: 'target',
|
||||
amount: { calculation: '1d13 + 3' },
|
||||
tags: ['tag']
|
||||
},
|
||||
{
|
||||
_id: effectId,
|
||||
type: 'effect',
|
||||
operation: 'add',
|
||||
amount: { calculation: '1' },
|
||||
targetByTags: true,
|
||||
targetTags: ['tag'],
|
||||
},
|
||||
{
|
||||
_id: effect2Id,
|
||||
type: 'effect',
|
||||
operation: 'mul',
|
||||
amount: { calculation: '2' },
|
||||
targetByTags: true,
|
||||
targetTags: ['tag'],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const actionTargetCreature: TestCreature = {
|
||||
_id: targetCreatureId,
|
||||
props: [
|
||||
{
|
||||
_id: targetCreatureHitPointsId,
|
||||
type: 'attribute',
|
||||
name: 'Hit Points',
|
||||
attributeType: 'healthBar',
|
||||
variableName: 'hitPoints',
|
||||
baseValue: { calculation: '33' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
const actionTargetCreature2: TestCreature = {
|
||||
_id: targetCreature2Id,
|
||||
props: [
|
||||
{
|
||||
_id: targetCreature2HitPointsId,
|
||||
type: 'attribute',
|
||||
name: 'Hit Points',
|
||||
attributeType: 'healthBar',
|
||||
variableName: 'hitPoints',
|
||||
baseValue: { calculation: '47' },
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('Apply Damage Properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
await createTestCreature(actionTargetCreature);
|
||||
await createTestCreature(actionTargetCreature2);
|
||||
});
|
||||
|
||||
it('Damages self', async function () {
|
||||
const action = await runActionById(damageSelfId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Damage',
|
||||
value: '1d12 [6] + 7',
|
||||
}
|
||||
],
|
||||
targetIds: [creatureId],
|
||||
}, {
|
||||
contents: [{
|
||||
inline: true,
|
||||
name: 'Health bar damaged',
|
||||
value: '−13 Hit Points',
|
||||
}],
|
||||
updates: [
|
||||
{
|
||||
propId: selfHitPointsId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 13, value: -13 },
|
||||
},
|
||||
],
|
||||
targetIds: [creatureId],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Damages a single target', async function () {
|
||||
const action = await runActionById(damageTargetId, [targetCreatureId]);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Damage',
|
||||
value: '2d6 [3, 4] + 7',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Health bar damaged',
|
||||
value: '−14 Hit Points',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreatureHitPointsId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 14, value: -14 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Damages multiple targets', async function () {
|
||||
const action = await runActionById(damageTargetId, [
|
||||
targetCreatureId, targetCreature2Id
|
||||
]);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Damage',
|
||||
value: '2d6 [3, 4] + 7',
|
||||
}
|
||||
],
|
||||
targetIds: [
|
||||
targetCreatureId,
|
||||
targetCreature2Id,
|
||||
],
|
||||
}, {
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Health bar damaged',
|
||||
value: '−14 Hit Points',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreatureHitPointsId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 14, value: -14 },
|
||||
},
|
||||
],
|
||||
}, {
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Health bar damaged',
|
||||
value: '−14 Hit Points',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreature2Id],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreature2HitPointsId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 14, value: -14 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Applies effects when doing damage', async function () {
|
||||
const action = await runActionById(damageWithEffectsId, [targetCreatureId]);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Damage',
|
||||
value: '(1d13 [7] + 4) * 2',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
}, {
|
||||
contents: [
|
||||
{
|
||||
inline: true,
|
||||
name: 'Health bar damaged',
|
||||
value: '−22 Hit Points',
|
||||
}
|
||||
],
|
||||
targetIds: [targetCreatureId],
|
||||
updates: [
|
||||
{
|
||||
propId: targetCreatureHitPointsId,
|
||||
type: 'attribute',
|
||||
inc: { damage: 22, value: -22 },
|
||||
},
|
||||
],
|
||||
}]);
|
||||
});
|
||||
|
||||
it('Doubles damage on a critical hit', async function () {
|
||||
const [
|
||||
creatureId, damageId, actionId
|
||||
] = getRandomIds(3);
|
||||
const testCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: actionId,
|
||||
type: 'action',
|
||||
attackRoll: { calculation: '10' },
|
||||
children: [
|
||||
{
|
||||
_id: damageId,
|
||||
type: 'damage',
|
||||
target: 'target',
|
||||
amount: { calculation: '2d6 + 7' }
|
||||
},
|
||||
]
|
||||
},
|
||||
],
|
||||
};
|
||||
await createTestCreature(testCreature);
|
||||
|
||||
const action = await runActionById(actionId, [], critInputProvider);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
'contents': [{ 'name': 'Action' }],
|
||||
'targetIds': []
|
||||
}, {
|
||||
'contents': [{
|
||||
'inline': true,
|
||||
'name': 'Critical Hit!',
|
||||
'value': '1d20 [20] + 10\n**30**'
|
||||
}],
|
||||
'targetIds': [],
|
||||
}, {
|
||||
'contents': [{
|
||||
'inline': true,
|
||||
'name': 'Damage',
|
||||
'value': '2d6 [3, 4, 5, 6] + 7\n**25** critical slashing damage',
|
||||
}],
|
||||
'targetIds': [],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,332 +0,0 @@
|
||||
import { some, includes, difference, intersection } from 'lodash';
|
||||
|
||||
import { getConstantValueFromScope } from '/imports/api/creature/creatures/CreatureVariables';
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import { applyDefaultAfterPropTasks } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import { getEffectiveActionScope } from '/imports/api/engine/action/functions/getEffectiveActionScope';
|
||||
import recalculateCalculation from '/imports/api/engine/action/functions/recalculateCalculation';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import TaskResult from '/imports/api/engine/action/tasks/TaskResult';
|
||||
import { isFiniteNode } from '/imports/parser/parseTree/constant';
|
||||
import resolve from '/imports/parser/resolve';
|
||||
import toString from '/imports/parser/toString';
|
||||
import { getPropertiesOfType } from '/imports/api/engine/loadCreatures';
|
||||
import applyTask from '/imports/api/engine/action/tasks/applyTask';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import getEffectivePropTags from '/imports/api/engine/computation/utility/getEffectivePropTags';
|
||||
import Context from '/imports/parser/types/Context';
|
||||
import applySavingThrowProperty from '/imports/api/engine/action/applyProperties/applySavingThrowProperty';
|
||||
import assert from 'node:assert';
|
||||
|
||||
export default async function applyDamageProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, inputProvider: InputProvider
|
||||
) {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'damage') {
|
||||
throw new Meteor.Error('wrong-property', `Expected damage, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
const scope = await getEffectiveActionScope(action);
|
||||
|
||||
// Choose target
|
||||
const damageTargets = prop.target === 'self' ? [action.creatureId] : task.targetIds;
|
||||
|
||||
// Skip if there is no parse node to work with
|
||||
if (!prop.amount?.valueNode) {
|
||||
return applyDefaultAfterPropTasks(action, prop, damageTargets, inputProvider);
|
||||
}
|
||||
|
||||
// Determine if the hit is critical
|
||||
const criticalHit = await getConstantValueFromScope('~criticalHit', scope)
|
||||
&& prop.damageType !== 'healing'; // Can't critically heal
|
||||
// Double the damage rolls if the hit is critical
|
||||
const context = new Context({
|
||||
options: { doubleRolls: criticalHit },
|
||||
});
|
||||
|
||||
// Gather all the lines we need to log into an array
|
||||
const logValue: string[] = [];
|
||||
const logName = prop.damageType === 'healing' ? 'Healing' : 'Damage';
|
||||
|
||||
// roll the dice only and store that string
|
||||
await recalculateCalculation(prop.amount, action, 'compile', inputProvider);
|
||||
const { result: rolled } = await resolve('roll', prop.amount.valueNode, scope, context, inputProvider);
|
||||
if (rolled.parseType !== 'constant') {
|
||||
logValue.push(toString(rolled));
|
||||
}
|
||||
result.appendParserContextErrors(context, damageTargets);
|
||||
|
||||
// Reset the errors so we don't log the same errors twice
|
||||
context.errors = [];
|
||||
|
||||
// Resolve the roll to a final value
|
||||
const { result: reduced } = await resolve('reduce', rolled, scope, context, inputProvider);
|
||||
result.appendParserContextErrors(context, damageTargets);
|
||||
|
||||
// Store the result
|
||||
let damage: number | undefined = undefined;
|
||||
if (reduced.parseType === 'constant') {
|
||||
prop.amount.value = reduced.value;
|
||||
if (typeof reduced.value === 'number') {
|
||||
damage = reduced.value;
|
||||
}
|
||||
} else if (reduced.parseType === 'error') {
|
||||
prop.amount.value = undefined;
|
||||
} else {
|
||||
prop.amount.value = toString(reduced);
|
||||
}
|
||||
|
||||
// If we didn't end up with damage of finite amount, give up
|
||||
if (
|
||||
typeof damage !== 'number'
|
||||
|| !isFinite(damage)
|
||||
) {
|
||||
return applyDefaultAfterPropTasks(action, prop, damageTargets, inputProvider);
|
||||
}
|
||||
|
||||
// Round the damage to a whole number
|
||||
damage = Math.floor(damage);
|
||||
scope['~damage'] = { value: damage };
|
||||
|
||||
// Convert extra damage into the stored type
|
||||
const lastDamageType = await getConstantValueFromScope('~lastDamageType', scope);
|
||||
if (prop.damageType === 'extra' && typeof lastDamageType === 'string') {
|
||||
prop.damageType = lastDamageType;
|
||||
}
|
||||
// Store current damage type
|
||||
if (prop.damageType !== 'healing') {
|
||||
scope['~lastDamageType'] = { value: prop.damageType };
|
||||
}
|
||||
|
||||
// Memoise the damage suffix for the log
|
||||
const suffix = (criticalHit ? 'critical ' : '') +
|
||||
prop.damageType +
|
||||
(prop.damageType !== 'healing' ? ' damage' : '');
|
||||
|
||||
// If there is a save, calculate the save damage
|
||||
let damageOnSave, saveProp, saveRoll;
|
||||
if (prop.save) {
|
||||
if (prop.save.damageFunction?.calculation) {
|
||||
await recalculateCalculation(prop.save.damageFunction, action, 'compile', inputProvider);
|
||||
context.errors = [];
|
||||
assert(prop.save.damageFunction.valueNode, 'Expected value to be defined after recalculateCalculation');
|
||||
const { result: saveDamageRolled } = await resolve(
|
||||
'roll', prop.save.damageFunction.valueNode, scope, context, inputProvider
|
||||
);
|
||||
saveRoll = toString(saveDamageRolled);
|
||||
const { result: saveDamageResult } = await resolve(
|
||||
'reduce', saveDamageRolled, scope, context, inputProvider
|
||||
);
|
||||
result.appendParserContextErrors(context, damageTargets);
|
||||
// If we didn't end up with a constant of finite amount, give up
|
||||
if (
|
||||
!isFiniteNode(saveDamageResult)
|
||||
) {
|
||||
return applyDefaultAfterPropTasks(action, prop, damageTargets, inputProvider);
|
||||
}
|
||||
// Round the damage to a whole number
|
||||
damageOnSave = Math.floor(saveDamageResult.value);
|
||||
} else {
|
||||
damageOnSave = Math.floor(damage / 2);
|
||||
}
|
||||
saveProp = {
|
||||
node: {
|
||||
...prop.save,
|
||||
name: prop.save.stat,
|
||||
silent: prop.silent,
|
||||
},
|
||||
children: [],
|
||||
}
|
||||
}
|
||||
|
||||
if (damageTargets && damageTargets.length) {
|
||||
// Iterate through all the targets
|
||||
for (const target of damageTargets) {
|
||||
let damageToApply = damage || 0;
|
||||
|
||||
// If there is a saving throw, apply that first
|
||||
if (prop.save) {
|
||||
await applySavingThrowProperty({
|
||||
prop: saveProp,
|
||||
targetIds: task.targetIds,
|
||||
}, action, result, inputProvider);
|
||||
if (await getConstantValueFromScope('~saveSucceeded', scope)) {
|
||||
// Log the total damage
|
||||
logValue.push(toString(reduced));
|
||||
// Log the save damage
|
||||
const damageText = damageFunctionText(prop.save);
|
||||
if (damageText) {
|
||||
logValue.push(damageText);
|
||||
} else {
|
||||
logValue.push(
|
||||
'**Damage on successful save**',
|
||||
prop.save.damageFunction?.calculation ?? '',
|
||||
saveRoll
|
||||
);
|
||||
}
|
||||
damageToApply = damageOnSave;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply weaknesses/resistances/immunities
|
||||
damageToApply = applyDamageMultipliers({
|
||||
target,
|
||||
damage: damageToApply,
|
||||
damageProp: prop,
|
||||
logValue
|
||||
});
|
||||
|
||||
// Deal the damage to the target
|
||||
await dealDamage(
|
||||
action, prop, result, inputProvider, target, prop.damageType, damageToApply
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// There are no targets, just log the result
|
||||
logValue.push(`**${damage}** ${suffix}`);
|
||||
if (prop.save) {
|
||||
await applySavingThrowProperty(saveProp, action, result, inputProvider);
|
||||
await applySavingThrowProperty({
|
||||
prop: saveProp,
|
||||
targetIds: task.targetIds,
|
||||
}, action, result, inputProvider);
|
||||
logValue.push(`**${damageOnSave}** ${suffix} on a successful save`);
|
||||
}
|
||||
}
|
||||
if (logValue.length) result.appendLog({
|
||||
name: logName,
|
||||
value: logValue.join('\n'),
|
||||
inline: true,
|
||||
silenced: prop.silent,
|
||||
}, damageTargets);
|
||||
return applyDefaultAfterPropTasks(action, prop, damageTargets, inputProvider);
|
||||
}
|
||||
|
||||
function damageFunctionText(save) {
|
||||
if (!save) return;
|
||||
if (!save.damageFunction) {
|
||||
return '**Half damage on successful save**';
|
||||
}
|
||||
if (save.damageFunction.calculation == '0' || save.damageFunction.value === 0) {
|
||||
return '**No damage on successful save**'
|
||||
}
|
||||
}
|
||||
|
||||
function applyDamageMultipliers({ target, damage, damageProp, logValue }) {
|
||||
const damageType = damageProp?.damageType;
|
||||
if (!damageType) return damage;
|
||||
|
||||
const multiplier = target?.variables?.[damageType];
|
||||
if (!multiplier) return damage;
|
||||
|
||||
const damageTypeText = damageType == 'healing' ? 'healing' : `${damageType} damage`;
|
||||
|
||||
if (
|
||||
multiplier.immunity &&
|
||||
some(multiplier.immunities, multiplierAppliesTo(damageProp, 'immunity'))
|
||||
) {
|
||||
logValue.push(`Immune to ${damageTypeText}`);
|
||||
return 0;
|
||||
} else {
|
||||
if (
|
||||
multiplier.resistance &&
|
||||
some(multiplier.resistances, multiplierAppliesTo(damageProp, 'resistance'))
|
||||
) {
|
||||
logValue.push(`Resistant to ${damageTypeText}`);
|
||||
damage = Math.floor(damage / 2);
|
||||
}
|
||||
if (
|
||||
multiplier.vulnerability &&
|
||||
some(multiplier.vulnerabilities, multiplierAppliesTo(damageProp, 'vulnerability'))
|
||||
) {
|
||||
logValue.push(`Vulnerable to ${damageTypeText}`);
|
||||
damage = Math.floor(damage * 2);
|
||||
}
|
||||
}
|
||||
return damage;
|
||||
}
|
||||
|
||||
function multiplierAppliesTo(damageProp, multiplierType) {
|
||||
return multiplier => {
|
||||
// Apply the default 'ignore x' tags
|
||||
const effectiveTags = getEffectivePropTags(damageProp);
|
||||
if (includes(effectiveTags, `ignore ${multiplierType}`)) return false;
|
||||
|
||||
const hasRequiredTags = difference(
|
||||
multiplier.includeTags, effectiveTags
|
||||
).length === 0;
|
||||
|
||||
const hasNoExcludedTags = intersection(
|
||||
multiplier.excludeTags, effectiveTags
|
||||
).length === 0;
|
||||
|
||||
return hasRequiredTags && hasNoExcludedTags;
|
||||
}
|
||||
}
|
||||
|
||||
async function dealDamage(
|
||||
action: EngineAction, prop: any, result: TaskResult, userInput: InputProvider,
|
||||
targetId: string, damageType: string, amount: number
|
||||
) {
|
||||
// Get all the health bars and do damage to them
|
||||
let healthBars = getPropertiesOfType(targetId, 'attribute');
|
||||
|
||||
// Keep only the healthbars that can take damage/healing
|
||||
healthBars = healthBars.filter((bar) => {
|
||||
if (bar.attributeType !== 'healthBar' || bar.inactive || bar.removed || bar.overridden) {
|
||||
return false;
|
||||
}
|
||||
if (damageType === 'healing' && bar.healthBarNoHealing) {
|
||||
return false;
|
||||
}
|
||||
if (damageType !== 'healing' && amount >= 0 && bar.healthBarNoDamage) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
// Sort healthbars by damage/healing order or tree order as a fallback
|
||||
healthBars.sort((a, b) => {
|
||||
let diff;
|
||||
if (amount >= 0) {
|
||||
diff = (a.healthBarDamageOrder ?? 0) - (b.healthBarDamageOrder ?? 0);
|
||||
} else {
|
||||
diff = (a.healthBarHealingOrder ?? 0) - (b.healthBarHealingOrder ?? 0);
|
||||
}
|
||||
if (Number.isFinite(diff)) {
|
||||
return diff;
|
||||
} else {
|
||||
return a.left - b.left;
|
||||
}
|
||||
});
|
||||
|
||||
// Deal the damage to each healthbar in order until all damage is done
|
||||
const totalDamage = amount;
|
||||
let damageLeft = totalDamage;
|
||||
if (damageType === 'healing') damageLeft = -totalDamage;
|
||||
for (const healthBar of healthBars) {
|
||||
if (damageLeft === 0) return;
|
||||
// Do the damage
|
||||
const damageAdded = await applyTask(action, {
|
||||
targetIds: [targetId],
|
||||
subtaskFn: 'damageProp',
|
||||
params: {
|
||||
operation: 'increment',
|
||||
value: +damageLeft || 0,
|
||||
targetProp: healthBar,
|
||||
},
|
||||
}, userInput);
|
||||
|
||||
damageLeft -= damageAdded;
|
||||
// Prevent overflow
|
||||
if (
|
||||
damageType === 'healing' ?
|
||||
healthBar.healthBarNoHealingOverflow :
|
||||
healthBar.healthBarNoDamageOverflow
|
||||
) {
|
||||
damageLeft = 0;
|
||||
}
|
||||
}
|
||||
return totalDamage;
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, folderId
|
||||
] = getRandomIds(100);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: folderId,
|
||||
type: 'folder',
|
||||
children: [{
|
||||
type: 'note',
|
||||
summary: { text: 'this should run' },
|
||||
}],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
describe('Apply folder properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
});
|
||||
|
||||
it('Applies the children of the folder', async function () {
|
||||
const action = await runActionById(folderId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
value: 'this should run'
|
||||
}
|
||||
],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,16 +0,0 @@
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import { applyDefaultAfterPropTasks } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
|
||||
|
||||
export default async function applyFolderProperty(
|
||||
task: PropTask, action: EngineAction, result, userInput
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'folder' && prop.type !== 'propertySlot') {
|
||||
throw new Meteor.Error('wrong-property', `Expected a folder, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
return applyDefaultAfterPropTasks(action, prop, task.targetIds, userInput);
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allMutations,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, noteId
|
||||
] = getRandomIds(2);
|
||||
|
||||
const actionTestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: noteId,
|
||||
type: 'note',
|
||||
name: 'Note Name',
|
||||
summary: { text: 'Note summary {1 + 2}' }
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
describe('Apply note properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
});
|
||||
|
||||
it('Applies the note text', async function () {
|
||||
const action = await runActionById(noteId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allMutations(action), [{
|
||||
contents: [
|
||||
{
|
||||
name: 'Note Name',
|
||||
value: 'Note summary 3'
|
||||
}
|
||||
],
|
||||
targetIds: [],
|
||||
}]);
|
||||
});
|
||||
});
|
||||
@@ -1,38 +0,0 @@
|
||||
import { EngineAction } from '/imports/api/engine/action/EngineActions';
|
||||
import InputProvider from '/imports/api/engine/action/functions/userInput/InputProvider';
|
||||
import { applyDefaultAfterPropTasks } from '/imports/api/engine/action/functions/applyTaskGroups';
|
||||
import recalculateInlineCalculations from '/imports/api/engine/action/functions/recalculateInlineCalculations';
|
||||
import { PropTask } from '/imports/api/engine/action/tasks/Task';
|
||||
import TaskResult, { LogContent } from '/imports/api/engine/action/tasks/TaskResult';
|
||||
|
||||
export default async function applyNoteProperty(
|
||||
task: PropTask, action: EngineAction, result: TaskResult, inputProvider: InputProvider
|
||||
): Promise<void> {
|
||||
const prop = task.prop;
|
||||
|
||||
if (prop.type !== 'note') {
|
||||
throw new Meteor.Error('wrong-property', `Expected a note, got ${prop.type} instead`);
|
||||
}
|
||||
|
||||
const logContent: LogContent & { silenced: boolean | undefined; } = {
|
||||
silenced: prop.silent,
|
||||
};
|
||||
if (prop.name) logContent.name = prop.name;
|
||||
if (prop.summary?.text) {
|
||||
await recalculateInlineCalculations(prop.summary, action, 'reduce', inputProvider);
|
||||
logContent.value = prop.summary.value;
|
||||
}
|
||||
|
||||
if (logContent.name || logContent.value) {
|
||||
result.appendLog(logContent, task.targetIds);
|
||||
}
|
||||
// Log description
|
||||
if (prop.description?.text) {
|
||||
await recalculateInlineCalculations(prop.description, action, 'reduce', inputProvider);
|
||||
result.appendLog({
|
||||
value: prop.description.value,
|
||||
silenced: prop.silent,
|
||||
}, task.targetIds);
|
||||
}
|
||||
return applyDefaultAfterPropTasks(action, prop, task.targetIds, inputProvider);
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
import { assert } from 'chai';
|
||||
import {
|
||||
allLogContent,
|
||||
createTestCreature,
|
||||
getRandomIds,
|
||||
removeAllCreaturesAndProps,
|
||||
runActionById,
|
||||
TestCreature
|
||||
} from '/imports/api/engine/action/functions/actionEngineTest.testFn';
|
||||
|
||||
const [
|
||||
creatureId, rollId,
|
||||
] = getRandomIds(2);
|
||||
|
||||
const actionTestCreature: TestCreature = {
|
||||
_id: creatureId,
|
||||
props: [
|
||||
{
|
||||
_id: rollId,
|
||||
type: 'roll',
|
||||
name: 'Roll Name',
|
||||
variableName: 'roll1',
|
||||
roll: { calculation: '7 + 15' },
|
||||
children: [
|
||||
{
|
||||
type: 'note',
|
||||
summary: { text: 'roll: {roll1}' },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
describe('Apply roll properties', function () {
|
||||
// Increase timeout
|
||||
this.timeout(8000);
|
||||
|
||||
before(async function () {
|
||||
await removeAllCreaturesAndProps();
|
||||
await createTestCreature(actionTestCreature);
|
||||
});
|
||||
|
||||
it('Saves the value of the roll into the variable name', async function () {
|
||||
const action = await runActionById(rollId);
|
||||
assert.exists(action);
|
||||
assert.deepEqual(allLogContent(action), [{
|
||||
inline: true,
|
||||
name: 'Roll Name',
|
||||
value: '**22**',
|
||||
}, {
|
||||
value: 'roll: 22',
|
||||
}]);
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user