Upload tracks to files

This commit is contained in:
Paul Bienkowski 2020-12-13 20:45:26 +01:00 committed by uwewoessner
parent e455ff5b68
commit 0edb1cc8eb
8 changed files with 165 additions and 38 deletions

View file

@ -17,9 +17,11 @@ services:
dockerfile: ./Dockerfile dockerfile: ./Dockerfile
volumes: volumes:
- ./src:/opt/obsAPI/src - ./src:/opt/obsAPI/src
- ./local/api-data:/data
environment: environment:
- PORT=3000 - PORT=3000
- MONGODB_URL=mongodb://mongo/obsTest - MONGODB_URL=mongodb://mongo/obsTest
- UPLOADS_DIR=/data
links: links:
- mongo - mongo
ports: ports:

View file

@ -0,0 +1,21 @@
const Track = require('../src/models/Track');
module.exports = {
async up(next) {
try {
for await (const track of Track.find()) {
track.originalFileName = track.slug + '.csv'
await track.generateOriginalFilePath();
await track.save()
}
next();
} catch(err) {
next(err)
}
},
async down(next) {
next();
},
};

View file

@ -0,0 +1,25 @@
const Track = require('../src/models/Track');
module.exports = {
async up(next) {
try {
for await (const track of Track.find()) {
if (!track.body) {
continue
}
await track.writeToOriginalFile(track.body)
delete track.body;
await track.save()
}
next();
} catch(err) {
next(err)
}
},
async down(next) {
next();
},
};

21
package-lock.json generated
View file

@ -9308,6 +9308,14 @@
} }
} }
}, },
"sanitize-filename": {
"version": "1.6.3",
"resolved": "https://registry.npmjs.org/sanitize-filename/-/sanitize-filename-1.6.3.tgz",
"integrity": "sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg==",
"requires": {
"truncate-utf8-bytes": "^1.0.0"
}
},
"sanitize-html": { "sanitize-html": {
"version": "1.20.1", "version": "1.20.1",
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-1.20.1.tgz", "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-1.20.1.tgz",
@ -10167,6 +10175,14 @@
"punycode": "^2.1.1" "punycode": "^2.1.1"
} }
}, },
"truncate-utf8-bytes": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz",
"integrity": "sha1-QFkjkJWS1W94pYGENLC3hInKXys=",
"requires": {
"utf8-byte-length": "^1.0.1"
}
},
"tsconfig-paths": { "tsconfig-paths": {
"version": "3.9.0", "version": "3.9.0",
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz",
@ -10957,6 +10973,11 @@
"integrity": "sha1-Qw/VEKt/yVtdWRDJAteYgMIIQ2s=", "integrity": "sha1-Qw/VEKt/yVtdWRDJAteYgMIIQ2s=",
"dev": true "dev": true
}, },
"utf8-byte-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
"integrity": "sha1-9F8VDExm7uloGGUFq5P8u4rWv2E="
},
"util-deprecate": { "util-deprecate": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

View file

@ -45,6 +45,7 @@
"passport": "0.4.1", "passport": "0.4.1",
"passport-local": "1.0.0", "passport-local": "1.0.0",
"request": "2.88.2", "request": "2.88.2",
"sanitize-filename": "^1.6.3",
"slug": "^3.3.5", "slug": "^3.3.5",
"turf": "^3.0.14", "turf": "^3.0.14",
"underscore": "^1.11.0" "underscore": "^1.11.0"

View file

@ -64,6 +64,9 @@ function replaceDollarNewlinesHack(body) {
} }
function* parseTrackPoints(body, format = null) { function* parseTrackPoints(body, format = null) {
if (body instanceof Buffer) {
body = body.toString('utf-8')
}
body = replaceDollarNewlinesHack(body); body = replaceDollarNewlinesHack(body);
const detectedFormat = format != null ? format : detectFormat(body); const detectedFormat = format != null ? format : detectFormat(body);

View file

@ -1,35 +1,60 @@
const mongoose = require('mongoose'); const mongoose = require('mongoose');
const uniqueValidator = require('mongoose-unique-validator'); const uniqueValidator = require('mongoose-unique-validator');
const slug = require('slug'); const slug = require('slug');
const path = require('path');
const sanitize = require('sanitize-filename');
const fs = require('fs')
const { parseTrackPoints } = require('../logic/tracks'); const { parseTrackPoints } = require('../logic/tracks');
const TrackData = require('./TrackData'); const TrackData = require('./TrackData');
const DATA_DIR = process.env.DATA_DIR || path.resolve(__dirname, '../../data/')
const schema = new mongoose.Schema( const schema = new mongoose.Schema(
{ {
slug: { type: String, lowercase: true, unique: true }, slug: { type: String, lowercase: true, unique: true },
title: String, title: String,
description: String, description: String,
body: String,
visible: Boolean, visible: Boolean,
uploadedByUserAgent: String, uploadedByUserAgent: String,
body: String, // deprecated, remove after migration has read it
comments: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Comment' }], comments: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Comment' }],
author: { type: mongoose.Schema.Types.ObjectId, ref: 'User' }, author: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
trackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' }, trackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
publicTrackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' }, publicTrackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
originalFileName: {
type: String,
required: true,
validate: {
validator: function (v) {
// Must be a sane filename, i.e. not change when being sanitized
return sanitize(v) === v && v.length > 0 && /.+\.csv$/i.test(v);
},
message: (props) => `${props.value} is not a valid filename`,
},
},
originalFilePath: String,
}, },
{ timestamps: true }, { timestamps: true },
); );
schema.plugin(uniqueValidator, { message: 'is already taken' }); schema.plugin(uniqueValidator, { message: 'is already taken' });
schema.pre('validate', function (next) { schema.pre('validate', async function (next) {
try {
if (!this.slug) { if (!this.slug) {
this.slugify(); this.slugify();
} }
if (!this.originalFilePath) {
await this.generateOriginalFilePath();
}
next(); next();
} catch (err) {
next(err);
}
}); });
class Track extends mongoose.Model { class Track extends mongoose.Model {
@ -37,6 +62,11 @@ class Track extends mongoose.Model {
this.slug = slug(this.title) + '-' + ((Math.random() * Math.pow(36, 6)) | 0).toString(36); this.slug = slug(this.title) + '-' + ((Math.random() * Math.pow(36, 6)) | 0).toString(36);
} }
async generateOriginalFilePath() {
await this.populate('author').execPopulate();
this.originalFilePath = path.join('uploads', 'originals', this.author.username, this.slug, this.originalFileName);
}
isVisibleTo(user) { isVisibleTo(user) {
if (this.visible) { if (this.visible) {
return true; return true;
@ -57,6 +87,24 @@ class Track extends mongoose.Model {
return user && user._id.equals(this.author._id); return user && user._id.equals(this.author._id);
} }
async _ensureDirectoryExists() {
if (!this.originalFilePath) {
await this.generateOriginalFilePath()
}
const dir = path.join(DATA_DIR, path.dirname(this.originalFilePath))
await fs.promises.mkdir(dir, {recursive: true})
}
get fullOriginalFilePath() {
return path.join(DATA_DIR, this.originalFilePath)
}
async writeToOriginalFile(fileBody) {
await this._ensureDirectoryExists()
await fs.promises.writeFile(this.fullOriginalFilePath, fileBody)
}
/** /**
* Fills the trackData and publicTrackData with references to correct * Fills the trackData and publicTrackData with references to correct
* TrackData objects. For now, this is either the same, or publicTrackData * TrackData objects. For now, this is either the same, or publicTrackData
@ -76,8 +124,11 @@ class Track extends mongoose.Model {
await TrackData.findByIdAndDelete(this.publicTrackData); await TrackData.findByIdAndDelete(this.publicTrackData);
} }
// parse the points from the body // Parse the points from the body.
const points = Array.from(parseTrackPoints(this.body)); // TODO: Stream file contents, if possible
const body = await fs.promises.readFile(this.fullOriginalFilePath)
const points = Array.from(parseTrackPoints(body));
const trackData = TrackData.createFromPoints(points); const trackData = TrackData.createFromPoints(points);
await trackData.save(); await trackData.save();
@ -102,7 +153,12 @@ class Track extends mongoose.Model {
updatedAt: this.updatedAt, updatedAt: this.updatedAt,
visible: this.visible, visible: this.visible,
author: this.author.toProfileJSONFor(user), author: this.author.toProfileJSONFor(user),
...(includePrivateFields ? { uploadedByUserAgent: this.uploadedByUserAgent } : {}), ...(includePrivateFields
? {
uploadedByUserAgent: this.uploadedByUserAgent,
originalFileName: this.originalFileName,
}
: {}),
}; };
} }
} }

View file

@ -175,23 +175,28 @@ router.post(
auth.required, auth.required,
busboy(), // parse multipart body busboy(), // parse multipart body
wrapRoute(async (req, res) => { wrapRoute(async (req, res) => {
const { body } = await getMultipartOrJsonBody(req, (body) => body.track); // Read the whole file into memory. This is not optimal, instead, we should
// write the file data directly to the target file. However, we first have
// to parse the rest of the track data to know where to place the file.
// TODO: Stream into temporary file, then move it later.
const { body, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
const track = new Track(body); const {body: fileBody, visible, ...trackBody} = body
track.author = req.user;
if (body.visible != null) { const track = new Track({
track.visible = Boolean(body.visible); ...trackBody,
} else { author: req.user,
track.visible = track.author.areTracksVisibleForAll; visible: visible == null ? req.user.areTracksVisibleForAll : Boolean(trackBody.visible)
} })
track.slugify();
if (track.body) { if (fileBody) {
track.body = track.body.trim();
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']); track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
await track.writeToOriginalFile(fileBody)
await track.rebuildTrackDataAndSave(); await track.rebuildTrackDataAndSave();
} else { } else {
await track.save(); await track.save()
} }
// console.log(track.author); // console.log(track.author);
@ -224,32 +229,25 @@ router.put(
return res.sendStatus(403); return res.sendStatus(403);
} }
const { body } = await getMultipartOrJsonBody(req, (body) => body.track); const { body: {body: fileBody, ...trackBody}, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
if (typeof body.title !== 'undefined') { if (typeof trackBody.title !== 'undefined') {
track.title = (body.title || '').trim() || null; track.title = (trackBody.title || '').trim() || null;
} }
if (typeof body.description !== 'undefined') { if (typeof trackBody.description !== 'undefined') {
track.description = (body.description || '').trim() || null; track.description = (trackBody.description || '').trim() || null;
} }
if (body.visible != null) { if (trackBody.visible != null) {
track.visible = Boolean(body.visible); track.visible = Boolean(trackBody.visible);
} }
if (typeof body.tagList !== 'undefined') { if (fileBody) {
track.tagList = body.tagList; track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
}
if (body.body && body.body.trim()) {
// delete existing
if (track.trackData) {
await TrackData.findByIdAndDelete(track.trackData);
}
track.body = body.body.trim();
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']); track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
await track.writeToOriginalFile(fileBody)
await track.rebuildTrackDataAndSave(); await track.rebuildTrackDataAndSave();
} else { } else {
await track.save(); await track.save();