Upload tracks to files

This commit is contained in:
Paul Bienkowski 2020-12-13 20:45:26 +01:00 committed by uwewoessner
parent e455ff5b68
commit 0edb1cc8eb
8 changed files with 165 additions and 38 deletions

View file

@ -17,9 +17,11 @@ services:
dockerfile: ./Dockerfile
volumes:
- ./src:/opt/obsAPI/src
- ./local/api-data:/data
environment:
- PORT=3000
- MONGODB_URL=mongodb://mongo/obsTest
- UPLOADS_DIR=/data
links:
- mongo
ports:

View file

@ -0,0 +1,21 @@
const Track = require('../src/models/Track');
module.exports = {
async up(next) {
try {
for await (const track of Track.find()) {
track.originalFileName = track.slug + '.csv'
await track.generateOriginalFilePath();
await track.save()
}
next();
} catch(err) {
next(err)
}
},
async down(next) {
next();
},
};

View file

@ -0,0 +1,25 @@
const Track = require('../src/models/Track');
module.exports = {
async up(next) {
try {
for await (const track of Track.find()) {
if (!track.body) {
continue
}
await track.writeToOriginalFile(track.body)
delete track.body;
await track.save()
}
next();
} catch(err) {
next(err)
}
},
async down(next) {
next();
},
};

21
package-lock.json generated
View file

@ -9308,6 +9308,14 @@
}
}
},
"sanitize-filename": {
"version": "1.6.3",
"resolved": "https://registry.npmjs.org/sanitize-filename/-/sanitize-filename-1.6.3.tgz",
"integrity": "sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg==",
"requires": {
"truncate-utf8-bytes": "^1.0.0"
}
},
"sanitize-html": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-1.20.1.tgz",
@ -10167,6 +10175,14 @@
"punycode": "^2.1.1"
}
},
"truncate-utf8-bytes": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz",
"integrity": "sha1-QFkjkJWS1W94pYGENLC3hInKXys=",
"requires": {
"utf8-byte-length": "^1.0.1"
}
},
"tsconfig-paths": {
"version": "3.9.0",
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz",
@ -10957,6 +10973,11 @@
"integrity": "sha1-Qw/VEKt/yVtdWRDJAteYgMIIQ2s=",
"dev": true
},
"utf8-byte-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
"integrity": "sha1-9F8VDExm7uloGGUFq5P8u4rWv2E="
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

View file

@ -45,6 +45,7 @@
"passport": "0.4.1",
"passport-local": "1.0.0",
"request": "2.88.2",
"sanitize-filename": "^1.6.3",
"slug": "^3.3.5",
"turf": "^3.0.14",
"underscore": "^1.11.0"

View file

@ -64,6 +64,9 @@ function replaceDollarNewlinesHack(body) {
}
function* parseTrackPoints(body, format = null) {
if (body instanceof Buffer) {
body = body.toString('utf-8')
}
body = replaceDollarNewlinesHack(body);
const detectedFormat = format != null ? format : detectFormat(body);

View file

@ -1,35 +1,60 @@
const mongoose = require('mongoose');
const uniqueValidator = require('mongoose-unique-validator');
const slug = require('slug');
const path = require('path');
const sanitize = require('sanitize-filename');
const fs = require('fs')
const { parseTrackPoints } = require('../logic/tracks');
const TrackData = require('./TrackData');
const DATA_DIR = process.env.DATA_DIR || path.resolve(__dirname, '../../data/')
const schema = new mongoose.Schema(
{
slug: { type: String, lowercase: true, unique: true },
title: String,
description: String,
body: String,
visible: Boolean,
uploadedByUserAgent: String,
body: String, // deprecated, remove after migration has read it
comments: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Comment' }],
author: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
trackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
publicTrackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
originalFileName: {
type: String,
required: true,
validate: {
validator: function (v) {
// Must be a sane filename, i.e. not change when being sanitized
return sanitize(v) === v && v.length > 0 && /.+\.csv$/i.test(v);
},
message: (props) => `${props.value} is not a valid filename`,
},
},
originalFilePath: String,
},
{ timestamps: true },
);
schema.plugin(uniqueValidator, { message: 'is already taken' });
schema.pre('validate', function (next) {
if (!this.slug) {
this.slugify();
}
schema.pre('validate', async function (next) {
try {
if (!this.slug) {
this.slugify();
}
next();
if (!this.originalFilePath) {
await this.generateOriginalFilePath();
}
next();
} catch (err) {
next(err);
}
});
class Track extends mongoose.Model {
@ -37,6 +62,11 @@ class Track extends mongoose.Model {
this.slug = slug(this.title) + '-' + ((Math.random() * Math.pow(36, 6)) | 0).toString(36);
}
async generateOriginalFilePath() {
await this.populate('author').execPopulate();
this.originalFilePath = path.join('uploads', 'originals', this.author.username, this.slug, this.originalFileName);
}
isVisibleTo(user) {
if (this.visible) {
return true;
@ -57,6 +87,24 @@ class Track extends mongoose.Model {
return user && user._id.equals(this.author._id);
}
async _ensureDirectoryExists() {
if (!this.originalFilePath) {
await this.generateOriginalFilePath()
}
const dir = path.join(DATA_DIR, path.dirname(this.originalFilePath))
await fs.promises.mkdir(dir, {recursive: true})
}
get fullOriginalFilePath() {
return path.join(DATA_DIR, this.originalFilePath)
}
async writeToOriginalFile(fileBody) {
await this._ensureDirectoryExists()
await fs.promises.writeFile(this.fullOriginalFilePath, fileBody)
}
/**
* Fills the trackData and publicTrackData with references to correct
* TrackData objects. For now, this is either the same, or publicTrackData
@ -76,8 +124,11 @@ class Track extends mongoose.Model {
await TrackData.findByIdAndDelete(this.publicTrackData);
}
// parse the points from the body
const points = Array.from(parseTrackPoints(this.body));
// Parse the points from the body.
// TODO: Stream file contents, if possible
const body = await fs.promises.readFile(this.fullOriginalFilePath)
const points = Array.from(parseTrackPoints(body));
const trackData = TrackData.createFromPoints(points);
await trackData.save();
@ -102,7 +153,12 @@ class Track extends mongoose.Model {
updatedAt: this.updatedAt,
visible: this.visible,
author: this.author.toProfileJSONFor(user),
...(includePrivateFields ? { uploadedByUserAgent: this.uploadedByUserAgent } : {}),
...(includePrivateFields
? {
uploadedByUserAgent: this.uploadedByUserAgent,
originalFileName: this.originalFileName,
}
: {}),
};
}
}

View file

@ -175,23 +175,28 @@ router.post(
auth.required,
busboy(), // parse multipart body
wrapRoute(async (req, res) => {
const { body } = await getMultipartOrJsonBody(req, (body) => body.track);
// Read the whole file into memory. This is not optimal, instead, we should
// write the file data directly to the target file. However, we first have
// to parse the rest of the track data to know where to place the file.
// TODO: Stream into temporary file, then move it later.
const { body, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
const track = new Track(body);
track.author = req.user;
const {body: fileBody, visible, ...trackBody} = body
if (body.visible != null) {
track.visible = Boolean(body.visible);
} else {
track.visible = track.author.areTracksVisibleForAll;
}
const track = new Track({
...trackBody,
author: req.user,
visible: visible == null ? req.user.areTracksVisibleForAll : Boolean(trackBody.visible)
})
track.slugify();
if (track.body) {
track.body = track.body.trim();
if (fileBody) {
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
await track.writeToOriginalFile(fileBody)
await track.rebuildTrackDataAndSave();
} else {
await track.save();
await track.save()
}
// console.log(track.author);
@ -224,32 +229,25 @@ router.put(
return res.sendStatus(403);
}
const { body } = await getMultipartOrJsonBody(req, (body) => body.track);
const { body: {body: fileBody, ...trackBody}, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
if (typeof body.title !== 'undefined') {
track.title = (body.title || '').trim() || null;
if (typeof trackBody.title !== 'undefined') {
track.title = (trackBody.title || '').trim() || null;
}
if (typeof body.description !== 'undefined') {
track.description = (body.description || '').trim() || null;
if (typeof trackBody.description !== 'undefined') {
track.description = (trackBody.description || '').trim() || null;
}
if (body.visible != null) {
track.visible = Boolean(body.visible);
if (trackBody.visible != null) {
track.visible = Boolean(trackBody.visible);
}
if (typeof body.tagList !== 'undefined') {
track.tagList = body.tagList;
}
if (body.body && body.body.trim()) {
// delete existing
if (track.trackData) {
await TrackData.findByIdAndDelete(track.trackData);
}
track.body = body.body.trim();
if (fileBody) {
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
await track.writeToOriginalFile(fileBody)
await track.rebuildTrackDataAndSave();
} else {
await track.save();