Implemented archive upload handling

This commit is contained in:
Stefan Zermatten
2022-06-07 23:01:06 +02:00
parent 385ac17812
commit 28934baac9
13 changed files with 139 additions and 196 deletions

View File

@@ -1,4 +1,6 @@
import { createS3FilesCollection } from '/imports/api/files/s3FileStorage.js';
import SimpleSchema from 'simpl-schema';
import { incrementFileStorageUsed } from '/imports/api/users/methods/updateFileStorageUsed.js';
const ArchiveCreatureFiles = createS3FilesCollection({
collectionName: 'archiveCreatureFiles',
@@ -11,7 +13,44 @@ const ArchiveCreatureFiles = createS3FilesCollection({
if (!/json/i.test(file.extension)){
return 'Please upload only a JSON file';
}
return true;
},
onAfterUpload(file) {
incrementFileStorageUsed(file.userId, file.size);
}
});
let archiveSchema = new SimpleSchema({
meta: {
type: Object,
blackbox: true,
},
creature: {
type: Object,
blackbox: true,
},
properties: {
type: Array,
},
'properties.$': {
type: Object,
blackbox: true,
},
experiences: {
type: Array,
},
'experiences.$': {
type: Object,
blackbox: true,
},
logs: {
type: Array,
},
'logs.$': {
type: Object,
blackbox: true,
},
});
export default ArchiveCreatureFiles;
export { archiveSchema };

View File

@@ -1,57 +0,0 @@
import SimpleSchema from 'simpl-schema';
// Archived creatures is an immutable collection of creatures that are no longer
// in use and can be safely archived by the mongoDB hosting service.
// It keeps the working datasets like creatureProperties much smaller
// than they would otherwise be.
let ArchivedCreatures = new Mongo.Collection('archivedCreatures');
// We use blackbox objects for everything:
// - saves time checking every object against a schema
// - doesn't accidentaly create indices defined in subschemas
// - The objects we are archiving have already been checked against their
// own schemas
let ArchivedCreatureSchema = new SimpleSchema({
owner: {
type: String,
regEx: SimpleSchema.RegEx.Id,
// The primary index on this collection
index: 1,
},
archiveDate: {
type: Date,
// Indexed so the archiving system can archive documents when they
// get to a certain age
index: 1,
},
creature: {
type: Object,
blackbox: true,
},
properties: {
type: Array,
},
'properties.$': {
type: Object,
blackbox: true,
},
experiences: {
type: Array,
},
'experiences.$': {
type: Object,
blackbox: true,
},
logs: {
type: Array,
},
'logs.$': {
type: Object,
blackbox: true,
},
});
ArchivedCreatures.attachSchema(ArchivedCreatureSchema);
import '/imports/api/creature/archive/methods/index.js';
export default ArchivedCreatures;

View File

@@ -1,5 +1,3 @@
// import '/imports/api/creature/archive/methods/archiveCreatures.js';
import '/imports/api/creature/archive/methods/archiveCreatureToFile.js';
import '/imports/api/creature/archive/methods/restoreCreatures.js';
import '/imports/api/creature/archive/methods/restoreCreatureFromFile.js';
import '/imports/api/creature/archive/methods/removeArchiveCreature.js';

View File

@@ -1,14 +1,7 @@
import SCHEMA_VERSION from '/imports/constants/SCHEMA_VERSION.js';
import SimpleSchema from 'simpl-schema';
import { ValidatedMethod } from 'meteor/mdg:validated-method';
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
import Creatures from '/imports/api/creature/creatures/Creatures.js';
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties.js';
import CreatureLogs from '/imports/api/creature/log/CreatureLogs.js';
import Experiences from '/imports/api/creature/experience/Experiences.js';
import { removeCreatureWork } from '/imports/api/creature/creatures/methods/removeCreature.js';
import ArchiveCreatureFiles from '/imports/api/creature/archive/ArchiveCreatureFiles.js';
import assertHasCharactersSlots from '/imports/api/creature/creatures/methods/assertHasCharacterSlots.js';
import { incrementFileStorageUsed } from '/imports/api/users/methods/updateFileStorageUsed.js';
const removeArchiveCreature = new ValidatedMethod({

View File

@@ -16,7 +16,7 @@ if (Meteor.isServer){
migrateArchive = require('/imports/migrations/server/migrateArchive.js').default;
}
function restoreCreature(archive){
function restoreCreature(archive, userId){
if (SCHEMA_VERSION < archive.meta.schemaVersion){
throw new Meteor.Error('Incompatible',
'The archive file is from a newer version. Update required to read.')
@@ -25,6 +25,16 @@ function restoreCreature(archive){
// Migrate and verify the archive meets the current schema
migrateArchive(archive);
// Don't upload creatures twice
const existingCreature = Creatures.findOne(archive.creature._id, {
fields: { _id: 1 }
});
if (existingCreature) throw new Meteor.Error('Already exists',
'The creature you are trying to restore already exists.')
// Ensure the user owns the restored creature
archive.creature.owner = userId;
// Insert the creature sub documents
// They still have their original _id's
Creatures.insert(archive.creature);
@@ -78,7 +88,7 @@ const restoreCreaturefromFile = new ValidatedMethod({
if (Meteor.isServer){
// Read the file data
const archive = await ArchiveCreatureFiles.readJSONFile(file);
restoreCreature(archive);
restoreCreature(archive, this.userId);
}
//Remove the archive once the restore succeeded
ArchiveCreatureFiles.remove({ _id: fileId });

View File

@@ -1,77 +0,0 @@
import SimpleSchema from 'simpl-schema';
import { ValidatedMethod } from 'meteor/mdg:validated-method';
import { RateLimiterMixin } from 'ddp-rate-limiter-mixin';
import { assertOwnership } from '/imports/api/sharing/sharingPermissions.js';
import Creatures from '/imports/api/creature/creatures/Creatures.js';
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties.js';
import CreatureLogs from '/imports/api/creature/log/CreatureLogs.js';
import Experiences from '/imports/api/creature/experience/Experiences.js';
import ArchivedCreatures from '/imports/api/creature/archive/ArchivedCreatures.js';
import { removeCreatureWork } from '/imports/api/creature/creatures/methods/removeCreature.js';
export function restoreCreature(archiveId){
// Get the archive
const archivedCreature = ArchivedCreatures.findOne(archiveId);
// Insert the creature sub documents
// They still have their original _id's
Creatures.insert(archivedCreature.creature);
try {
// Add all the properties
if (archivedCreature.properties && archivedCreature.properties.length){
CreatureProperties.batchInsert(archivedCreature.properties);
}
if (archivedCreature.experiences && archivedCreature.experiences.length){
Experiences.batchInsert(archivedCreature.experiences);
}
if (archivedCreature.logs && archivedCreature.logs.length){
CreatureLogs.batchInsert(archivedCreature.logs);
}
// Remove the archived creature
ArchivedCreatures.remove(archiveId);
} catch (e) {
// If the above fails, delete the inserted creature
removeCreatureWork(archivedCreature.creature._id);
throw e;
}
// Do not recompute. The creature was in a computed and ordered state when
// we archived it, just restore everything as-is
return archivedCreature.creature._id;
}
const restoreCreatures = new ValidatedMethod({
name: 'Creatures.methods.restoreCreatures',
validate: new SimpleSchema({
archiveIds: {
type: Array,
max: 10,
},
'archiveIds.$': {
type: String,
regEx: SimpleSchema.RegEx.Id,
},
}).validator(),
mixins: [RateLimiterMixin],
rateLimit: {
numRequests: 1,
timeInterval: 5000,
},
run({archiveIds}) {
for (let id of archiveIds){
let archivedCreature = ArchivedCreatures.findOne(id, {
fields: {owner: 1}
});
assertOwnership(archivedCreature, this.userId)
}
let creatureIds = [];
for (let id of archiveIds){
let creatureId = restoreCreature(id);
creatureIds.push(creatureId);
}
return creatureIds;
},
});
export default restoreCreatures;

View File

@@ -3,7 +3,7 @@ import { EJSON } from 'meteor/ejson';
export default function writeScope(creatureId, computation) {
const scope = computation.scope;
const variables = computation.creature.variables || {};
const variables = computation.creature?.variables || {};
let $set;
for (const key in scope){
// Remove large properties that aren't likely to be accessed

View File

@@ -57,7 +57,7 @@ if (Meteor.isServer && Meteor.settings.useS3) {
onBeforeUpload,
onAfterUpload(fileRef) {
// Call the provided afterUpload hook first
onAfterUpload(fileRef);
onAfterUpload?.(fileRef);
// Start moving files to AWS:S3
// after fully received by the Meteor server
@@ -221,6 +221,7 @@ if (Meteor.isServer && Meteor.settings.useS3) {
collectionName,
storagePath,
onBeforeUpload,
onAfterUpload,
debug = Meteor.isProduction,
allowClientCode = false,
}){
@@ -228,11 +229,12 @@ if (Meteor.isServer && Meteor.settings.useS3) {
collectionName,
storagePath,
onBeforeUpload,
onAfterUpload,
debug,
allowClientCode,
});
if (Meteor.isServer){
if (Meteor.isServer) {
// Use the normal file system to read files
collection.readJSONFile = async function(file){
const fileString = await fsp.readFile(file.path, 'utf8');

View File

@@ -1,9 +1,6 @@
import { Migrations } from 'meteor/percolate:migrations';
import CreatureProperties from '/imports/api/creature/creatureProperties/CreatureProperties.js';
import LibraryNodes from '/imports/api/library/LibraryNodes.js';
import ArchivedCreatures from '/imports/api/creature/archive/ArchivedCreatures.js';
import { restoreCreature } from '/imports/api/creature/archive/methods/restoreCreatures.js';
import { archiveCreature } from '/imports/api/creature/archive/methods/archiveCreatureToFile.js';
import transformFields from '/imports/migrations/server/transformFields.js';
import SCHEMA_VERSION from '/imports/constants/SCHEMA_VERSION.js';
import STORAGE_LIMITS from '/imports/constants/STORAGE_LIMITS.js';
@@ -22,34 +19,11 @@ Migrations.add({
});
function migrate({reversed} = {}){
console.log('restoring all characters from database archive');
const restoredIds = restoreAllCreatures();
console.log('migrating creature properties');
migrateCollection({collection: CreatureProperties, reversed});
console.log('migrating library nodes')
migrateCollection({collection: LibraryNodes, reversed});
console.log('archiving characters to file system archive');
rearchiveAllCreatures(restoredIds);
}
function restoreAllCreatures(){
const ids = [];
ArchivedCreatures.find({}, {
fields: {_id: 1}
}).forEach(archive => {
const id = restoreCreature(archive._id);
ids.push(id);
});
return ids;
}
function rearchiveAllCreatures(ids){
ids.forEach(id => {
archiveCreature(id);
});
}
function migrateCollection({collection, reversed}){

View File

@@ -1,19 +0,0 @@
import ArchivedCreatures from '/imports/api/creature/archive/ArchivedCreatures.js';
Meteor.publish('archivedCreatures', function(){
this.autorun(function (){
var userId = this.userId;
if (!userId) {
return [];
}
return ArchivedCreatures.find({
owner: userId,
}, {
fields: {
creature: 1,
owner: 1,
}
}
);
});
});

View File

@@ -8,7 +8,6 @@ import '/imports/server/publications/icons.js';
import '/imports/server/publications/tabletops.js';
import '/imports/server/publications/slotFillers.js';
import '/imports/server/publications/ownedDocuments.js';
import '/imports/server/publications/archivedCreatures.js';
import '/imports/server/publications/searchLibraryNodes.js';
import '/imports/server/publications/archiveFiles.js';
import '/imports/server/publications/userImages.js';

View File

@@ -41,7 +41,7 @@
>
<v-btn
outlined
style="height: 100%; width: 100%;"
style="height: 100%; width: 100%; min-height: 120px;"
:color="archiveFileError ? 'error' : undefined"
@click="$refs.archiveFileInput.click()"
>
@@ -54,9 +54,15 @@
<template v-else>
Upload archive
</template>
<v-progress-linear
v-if="archiveUploadInProgress"
:progress="archiveUploadProgress"
:indeterminate="archiveUploadIndeterminate"
/>
</v-btn>
</v-col>
</v-row>
<!--
<v-row dense>
<v-col cols="12">
<v-subheader> Images </v-subheader>
@@ -86,6 +92,7 @@
<image-upload-input />
</v-col>
</v-row>
-->
</v-container>
</template>
@@ -97,6 +104,8 @@ import ArchiveFileCard from '/imports/ui/files/ArchiveFileCard.vue';
import FileStorageStats from '/imports/ui/files/FileStorageStats.vue';
import ImageUploadInput from '/imports/ui/components/ImageUploadInput.vue';
import UserImageCard from '/imports/ui/files/UserImageCard.vue';
import { snackbar } from '/imports/ui/components/snackbars/SnackbarQueue.js';
import { archiveSchema } from '/imports/api/creature/archive/ArchiveCreatureFiles.js';
export default {
components: {
@@ -109,6 +118,9 @@ export default {
updateStorageUsedLoading: false,
archiveFileError: undefined,
archiveFile: undefined,
archiveUploadInProgress: false,
archiveUploadProgress: undefined,
archiveUploadIndeterminate: false,
}},
meteor: {
$subscribe: {
@@ -160,7 +172,75 @@ export default {
return;
}
this.archiveFile = file;
console.log(this.archiveFile);
this.archiveUploadIndeterminate = true;
const fr = new FileReader();
const self = this;
fr.addEventListener('load', () => {
let data;
try {
data = JSON.parse(fr.result);
} catch (e){
self.archiveFileError = 'File could not be parsed';
}
console.log(data);
try {
archiveSchema.validate(data);
} catch (e){
self.archiveFileError = e.reason || e.message || e.toString();
}
let uploadInstance = ArchiveCreatureFiles.insert({
file: file,
meta: {
creatureName: data?.creature?.name,
userId: Meteor.userId()
},
chunkSize: 'dynamic',
allowWebWorkers: true // If you see issues with uploads, change this to false
}, false)
// These are the event functions, don't need most of them, it shows where we are in the process
uploadInstance.on('start', function () {
console.log('Starting');
self.archiveUploadIndeterminate = false;
self.archiveUploadInProgress = true;
});
uploadInstance.on('end', function (error, fileObj) {
console.log('On end File Object: ', fileObj);
self.archiveUploadInProgress = false;
});
uploadInstance.on('uploaded', function (error, fileObj) {
console.log('uploaded: ', fileObj);
// Remove the file from the input box
self.file = undefined;
// Reset our state for the next file
self.archiveUploadInProgress = false;
self.archiveUploadProgress = 0;
});
uploadInstance.on('error', function (error, fileObj) {
console.log('Error during upload: ' + error, fileObj)
const text = error.reason || error.message || error;
snackbar({text});
self.archiveFileError = text;
});
uploadInstance.on('progress', function (progress, fileObj) {
console.log('Upload Percentage: ' + progress, fileObj)
// Update our progress bar
self.archiveUploadProgress = progress;
});
uploadInstance.start(); // Must manually start the upload
});
fr.readAsText(file);
}
},
}

View File

@@ -14,4 +14,5 @@ import '/imports/migrations/server/index.js';
import '/imports/migrations/methods/index.js'
import '/imports/constants/MAINTENANCE_MODE.js';
import '/imports/api/creature/creatureProperties/methods/index.js';
import '/imports/api/creature/archive/methods/index.js';