Compare commits
6 commits
Author | SHA1 | Date | |
---|---|---|---|
b7441741c0 | |||
59b67e5120 | |||
fdf6ad7f06 | |||
90c2147ebc | |||
e1320b3ff5 | |||
b152d38edf |
19
.editorconfig
Normal file
19
.editorconfig
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
end_of_line = lf
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
charset = utf-8
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[Makefile]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
|
||||||
|
[*.{py,rs}]
|
||||||
|
indent_size = 4
|
|
@ -1 +1 @@
|
||||||
Subproject commit 44488dfdc8a15f4723824ce2f8f12a1ebdd0d23d
|
Subproject commit 6beab2ebfede7e41a1184b5ae1d0be0c83f8f95c
|
|
@ -1,5 +1,7 @@
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
|
const API_ROOT_DIR = path.resolve(__dirname, '../');
|
||||||
|
|
||||||
const DATA_DIR = process.env.DATA_DIR || path.resolve(__dirname, '../../data/');
|
const DATA_DIR = process.env.DATA_DIR || path.resolve(__dirname, '../../data/');
|
||||||
|
|
||||||
// Contains the subtree for processing files
|
// Contains the subtree for processing files
|
||||||
|
@ -17,10 +19,11 @@ const TRACKS_DIR = path.join(DATA_DIR, 'tracks');
|
||||||
const OBS_FACE_CACHE_DIR = path.join(DATA_DIR, 'obs-face-cache');
|
const OBS_FACE_CACHE_DIR = path.join(DATA_DIR, 'obs-face-cache');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
API_ROOT_DIR,
|
||||||
DATA_DIR,
|
DATA_DIR,
|
||||||
PROCESSING_DIR,
|
PROCESSING_DIR,
|
||||||
PROCESSING_OUTPUT_DIR,
|
PROCESSING_OUTPUT_DIR,
|
||||||
PROCESSING_DIR_PRIVATE,
|
PROCESSING_DIR_PRIVATE,
|
||||||
TRACKS_DIR,
|
TRACKS_DIR,
|
||||||
OBS_FACE_CACHE_DIR,
|
OBS_FACE_CACHE_DIR,
|
||||||
}
|
};
|
||||||
|
|
188
api/src/process_track.py
Normal file
188
api/src/process_track.py
Normal file
|
@ -0,0 +1,188 @@
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
import json
|
||||||
|
|
||||||
|
from obs.face.importer import ImportMeasurementsCsv
|
||||||
|
from obs.face.annotate import AnnotateMeasurements
|
||||||
|
from obs.face.filter import (
|
||||||
|
AnonymizationMode,
|
||||||
|
ChainFilter,
|
||||||
|
ConfirmedFilter,
|
||||||
|
DistanceMeasuredFilter,
|
||||||
|
PrivacyFilter,
|
||||||
|
PrivacyZone,
|
||||||
|
PrivacyZonesFilter,
|
||||||
|
RequiredFieldsFilter,
|
||||||
|
)
|
||||||
|
from obs.face.osm import DataSource as OSMDataSource
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="processes a single track for use in the portal, "
|
||||||
|
"using the obs.face algorithms"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-i", "--input", required=True, action="store", help="path to input CSV file"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-o", "--output", required=True, action="store", help="path to output directory"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--path-cache",
|
||||||
|
action="store",
|
||||||
|
default=None,
|
||||||
|
dest="cache_dir",
|
||||||
|
help="path where the visualization data will be stored",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--settings",
|
||||||
|
type=argparse.FileType("rt", encoding="utf-8"),
|
||||||
|
default=None,
|
||||||
|
help="path where the visualization data will be stored",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.cache_dir is None:
|
||||||
|
with tempfile.TemporaryDirectory() as cache_dir:
|
||||||
|
args.cache_dir = cache_dir
|
||||||
|
process(args)
|
||||||
|
else:
|
||||||
|
process(args)
|
||||||
|
|
||||||
|
|
||||||
|
def process(args):
|
||||||
|
log.info("Loading OpenStreetMap data")
|
||||||
|
osm = OSMDataSource(cache_dir=args.cache_dir)
|
||||||
|
|
||||||
|
filename_input = os.path.abspath(args.input)
|
||||||
|
dataset_id = os.path.splitext(os.path.basename(args.input))[0]
|
||||||
|
|
||||||
|
os.makedirs(args.output, exist_ok=True)
|
||||||
|
|
||||||
|
log.info("Loading settings")
|
||||||
|
settings = json.load(args.settings)
|
||||||
|
|
||||||
|
log.info("Annotating and filtering CSV file")
|
||||||
|
measurements, statistics = ImportMeasurementsCsv().read(
|
||||||
|
filename_input,
|
||||||
|
user_id="dummy",
|
||||||
|
dataset_id=dataset_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
measurements = AnnotateMeasurements(osm, cache_dir=args.cache_dir).annotate(
|
||||||
|
measurements
|
||||||
|
)
|
||||||
|
|
||||||
|
filters_from_settings = []
|
||||||
|
for filter_description in settings.get("filters", []):
|
||||||
|
filter_type = filter_description.get("type")
|
||||||
|
if filter_type == "PrivacyZonesFilter":
|
||||||
|
privacy_zones = [
|
||||||
|
PrivacyZone(
|
||||||
|
latitude=zone.get("latitude"),
|
||||||
|
longitude=zone.get("longitude"),
|
||||||
|
radius=zone.get("radius"),
|
||||||
|
)
|
||||||
|
for zone in filter_description.get("config", {}).get("privacyZones", [])
|
||||||
|
]
|
||||||
|
filters_from_settings.append(PrivacyZonesFilter(privacy_zones))
|
||||||
|
else:
|
||||||
|
log.warning("Ignoring unknown filter type %r in settings file", filter_type)
|
||||||
|
|
||||||
|
input_filter = ChainFilter(
|
||||||
|
RequiredFieldsFilter(),
|
||||||
|
PrivacyFilter(
|
||||||
|
user_id_mode=AnonymizationMode.REMOVE,
|
||||||
|
measurement_id_mode=AnonymizationMode.REMOVE,
|
||||||
|
),
|
||||||
|
*filters_from_settings,
|
||||||
|
)
|
||||||
|
events_filter = DistanceMeasuredFilter()
|
||||||
|
confirmed_filter = ChainFilter(
|
||||||
|
ConfirmedFilter(),
|
||||||
|
)
|
||||||
|
|
||||||
|
track_measurements = input_filter.filter(measurements, log=log)
|
||||||
|
event_measurements = events_filter.filter(track_measurements , log=log)
|
||||||
|
confirmed_measurements = confirmed_filter.filter(track_measurements, log=log)
|
||||||
|
|
||||||
|
# write out
|
||||||
|
confirmed_measurements_json = {
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [m["latitude"], m["longitude"]],
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"distanceOvertaker": m["distance_overtaker"],
|
||||||
|
"distanceStationary": m["distance_stationary"],
|
||||||
|
"confirmed": True,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for m in confirmed_measurements
|
||||||
|
],
|
||||||
|
}
|
||||||
|
all_measurements_json = {
|
||||||
|
"type": "FeatureCollection",
|
||||||
|
"features": [
|
||||||
|
{
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "Point",
|
||||||
|
"coordinates": [m["latitude"], m["longitude"]],
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"distanceOvertaker": m["distance_overtaker"],
|
||||||
|
"distanceStationary": m["distance_stationary"],
|
||||||
|
"confirmed": m in confirmed_measurements,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for m in event_measurements
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
track_json = {
|
||||||
|
"type": "Feature",
|
||||||
|
"geometry": {
|
||||||
|
"type": "LineString",
|
||||||
|
"coordinates": [
|
||||||
|
[m["latitude"], m["longitude"]] for m in track_measurements
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
statistics_json = {
|
||||||
|
"recordedAt": statistics["t_min"].isoformat(),
|
||||||
|
"recordedUntil": statistics["t_max"].isoformat(),
|
||||||
|
"duration": statistics["t"],
|
||||||
|
"length": statistics["d"],
|
||||||
|
"segments": statistics["n_segments"],
|
||||||
|
"numEvents": statistics["n_confirmed"],
|
||||||
|
"numMeasurements": statistics["n_measurements"],
|
||||||
|
"numValid": statistics["n_valid"],
|
||||||
|
}
|
||||||
|
|
||||||
|
for output_filename, data in [
|
||||||
|
("all_measurements.json", all_measurements_json),
|
||||||
|
("confirmed_measurements.json", confirmed_measurements_json),
|
||||||
|
("track.json", track_json),
|
||||||
|
("statistics.json", statistics_json),
|
||||||
|
]:
|
||||||
|
with open(os.path.join(args.output, output_filename), "w") as fp:
|
||||||
|
json.dump(data, fp, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -5,7 +5,7 @@ const { spawn } = require('child_process');
|
||||||
const queue = require('./queue');
|
const queue = require('./queue');
|
||||||
require('./db');
|
require('./db');
|
||||||
const { Track } = require('./models');
|
const { Track } = require('./models');
|
||||||
const { PROCESSING_DIR, OBS_FACE_CACHE_DIR, PROCESSING_OUTPUT_DIR } = require('./paths');
|
const { API_ROOT_DIR, PROCESSING_DIR, OBS_FACE_CACHE_DIR, PROCESSING_OUTPUT_DIR } = require('./paths');
|
||||||
|
|
||||||
queue.process('processTrack', async (job) => {
|
queue.process('processTrack', async (job) => {
|
||||||
const track = await Track.findById(job.data.trackId);
|
const track = await Track.findById(job.data.trackId);
|
||||||
|
@ -39,10 +39,25 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// copy original file to processing dir
|
// copy original file to processing dir
|
||||||
const inputFilePath = path.join(inputDirectory, 'track.csv');
|
const inputFilePath = path.join(inputDirectory, 'track.csv');
|
||||||
const originalFilePath = track.getOriginalFilePath()
|
const originalFilePath = track.getOriginalFilePath();
|
||||||
console.log(`[${track.slug}] Copy ${originalFilePath} to ${inputFilePath}`);
|
console.log(`[${track.slug}] Copy ${originalFilePath} to ${inputFilePath}`);
|
||||||
await fs.promises.copyFile(originalFilePath, inputFilePath);
|
await fs.promises.copyFile(originalFilePath, inputFilePath);
|
||||||
|
|
||||||
|
// create track settings file
|
||||||
|
const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
||||||
|
console.log(`[${track.slug}] Create settings at ${settingsFilePath}`);
|
||||||
|
const settings = {
|
||||||
|
settingsGeneratedAt: new Date().getTime(),
|
||||||
|
filters: [
|
||||||
|
// TODO: Add actual privacy zones from user database
|
||||||
|
/* {
|
||||||
|
type: 'PrivacyZonesFilter',
|
||||||
|
config: { privacyZones: [{ longitude: 10, latitude: 10, radius: 250 }] },
|
||||||
|
}, */
|
||||||
|
],
|
||||||
|
};
|
||||||
|
await fs.promises.writeFile(settingsFilePath, JSON.stringify(settings));
|
||||||
|
|
||||||
// Create output directory
|
// Create output directory
|
||||||
const outputDirectory = path.join(PROCESSING_OUTPUT_DIR, filePath);
|
const outputDirectory = path.join(PROCESSING_OUTPUT_DIR, filePath);
|
||||||
await fs.promises.mkdir(outputDirectory, { recursive: true });
|
await fs.promises.mkdir(outputDirectory, { recursive: true });
|
||||||
|
@ -54,15 +69,19 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// TODO: Generate track transformation settings (privacy zones etc)
|
// TODO: Generate track transformation settings (privacy zones etc)
|
||||||
// const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
// const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
||||||
|
//
|
||||||
const child = spawn(
|
const child = spawn(
|
||||||
'obs-process-track',
|
'python',
|
||||||
[
|
[
|
||||||
|
path.join(API_ROOT_DIR, 'src', 'process_track.py'),
|
||||||
'--input',
|
'--input',
|
||||||
inputFilePath,
|
inputFilePath,
|
||||||
'--output',
|
'--output',
|
||||||
outputDirectory,
|
outputDirectory,
|
||||||
'--path-cache',
|
'--path-cache',
|
||||||
OBS_FACE_CACHE_DIR,
|
OBS_FACE_CACHE_DIR,
|
||||||
|
'--settings',
|
||||||
|
settingsFilePath,
|
||||||
// '--anonymize-user-id', 'remove',
|
// '--anonymize-user-id', 'remove',
|
||||||
// '--anonymize-measurement-id', 'remove',
|
// '--anonymize-measurement-id', 'remove',
|
||||||
],
|
],
|
||||||
|
@ -102,7 +121,7 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// Maybe we have found out the recording date, regenerate the automatic
|
// Maybe we have found out the recording date, regenerate the automatic
|
||||||
// title (if not yet manually set)
|
// title (if not yet manually set)
|
||||||
await track.autoGenerateTitle()
|
await track.autoGenerateTitle();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Processing failed:', err);
|
console.error('Processing failed:', err);
|
||||||
track.processingLog += String(err) + '\n' + err.stack + '\n';
|
track.processingLog += String(err) + '\n' + err.stack + '\n';
|
||||||
|
|
|
@ -5,7 +5,7 @@ interface Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function loadConfig(): Promise<Config> {
|
async function loadConfig(): Promise<Config> {
|
||||||
const response = await fetch('./config.json')
|
const response = await fetch(__webpack_public_path__ + 'config.json')
|
||||||
const config = await response.json()
|
const config = await response.json()
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue