Validate file uniqueness (fixes #78)

This commit is contained in:
Paul Bienkowski 2021-06-21 19:36:28 +02:00
parent e6350cdb66
commit 7162589617
2 changed files with 24 additions and 0 deletions

View file

@ -1,3 +1,4 @@
const crypto = require('crypto');
const mongoose = require('mongoose'); const mongoose = require('mongoose');
const _ = require('lodash'); const _ = require('lodash');
const uniqueValidator = require('mongoose-unique-validator'); const uniqueValidator = require('mongoose-unique-validator');
@ -82,6 +83,12 @@ const schema = new mongoose.Schema(
}, },
}, },
// A hash of the original file's contents. Nobody can upload the same track twice.
originalFileHash: {
type: String,
required: true,
},
// Where the files are stored, relative to a group directory like // Where the files are stored, relative to a group directory like
// TRACKS_DIR or PROCESSING_DIR. // TRACKS_DIR or PROCESSING_DIR.
filePath: String, filePath: String,
@ -94,6 +101,8 @@ const schema = new mongoose.Schema(
{ timestamps: true }, { timestamps: true },
); );
schema.index({ author: 1, originalFileHash: 1 }, { unique: true });
schema.plugin(uniqueValidator, { message: 'is already taken' }); schema.plugin(uniqueValidator, { message: 'is already taken' });
schema.pre('validate', async function (next) { schema.pre('validate', async function (next) {
@ -184,6 +193,19 @@ class Track extends mongoose.Model {
await fs.promises.writeFile(this.getOriginalFilePath(), fileBody); await fs.promises.writeFile(this.getOriginalFilePath(), fileBody);
} }
async validateFileBodyUniqueness(fileBody) {
// Generate hash
const hash = crypto.createHash('sha512').update(fileBody).digest('hex');
const existingTracks = await Track.find({ originalFileHash: hash, author: this.author });
if (existingTracks.length === 0 || (existingTracks.length === 1 && existingTracks[0]._id.equals(this._id))) {
this.originalFileHash = hash;
return;
}
throw new Error('Track file already uploaded.');
}
/** /**
* Marks this track as needing processing. * Marks this track as needing processing.
* *

View file

@ -194,6 +194,7 @@ router.post(
track.slugify(); track.slugify();
if (fileBody) { if (fileBody) {
await track.validateFileBodyUniqueness(fileBody)
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']); track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv'; track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
await track.writeToOriginalFile(fileBody) await track.writeToOriginalFile(fileBody)
@ -256,6 +257,7 @@ router.put(
} }
if (fileBody) { if (fileBody) {
await track.validateFileBodyUniqueness(fileBody)
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv'; track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']); track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
await track.writeToOriginalFile(fileBody) await track.writeToOriginalFile(fileBody)