api: Generate track-settings file and read it in process_track.py
This commit is contained in:
parent
ddeb5f9195
commit
e8a66fa7b9
|
@ -1 +1 @@
|
||||||
Subproject commit 657a63975325f42bd932108d7d73ddfbd04487a7
|
Subproject commit 16909ea356ff6695d3e97d46efce78bca5fe5c40
|
|
@ -19,6 +19,7 @@ const TRACKS_DIR = path.join(DATA_DIR, 'tracks');
|
||||||
const OBS_FACE_CACHE_DIR = path.join(DATA_DIR, 'obs-face-cache');
|
const OBS_FACE_CACHE_DIR = path.join(DATA_DIR, 'obs-face-cache');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
API_ROOT_DIR,
|
||||||
DATA_DIR,
|
DATA_DIR,
|
||||||
PROCESSING_DIR,
|
PROCESSING_DIR,
|
||||||
PROCESSING_OUTPUT_DIR,
|
PROCESSING_OUTPUT_DIR,
|
||||||
|
|
|
@ -6,7 +6,15 @@ import json
|
||||||
|
|
||||||
from obs.face.importer import ImportMeasurementsCsv
|
from obs.face.importer import ImportMeasurementsCsv
|
||||||
from obs.face.annotate import AnnotateMeasurements
|
from obs.face.annotate import AnnotateMeasurements
|
||||||
from obs.face.filter import PrivacyFilter, ChainFilter, AnonymizationMode, RequiredFieldsFilter, ConfirmedFilter
|
from obs.face.filter import (
|
||||||
|
PrivacyFilter,
|
||||||
|
ChainFilter,
|
||||||
|
AnonymizationMode,
|
||||||
|
RequiredFieldsFilter,
|
||||||
|
ConfirmedFilter,
|
||||||
|
PrivacyZone,
|
||||||
|
PrivacyZonesFilter,
|
||||||
|
)
|
||||||
from obs.face.osm import DataSource as OSMDataSource
|
from obs.face.osm import DataSource as OSMDataSource
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -33,6 +41,12 @@ def main():
|
||||||
dest="cache_dir",
|
dest="cache_dir",
|
||||||
help="path where the visualization data will be stored",
|
help="path where the visualization data will be stored",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--settings",
|
||||||
|
type=argparse.FileType("rt", encoding="utf-8"),
|
||||||
|
default=None,
|
||||||
|
help="path where the visualization data will be stored",
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -53,6 +67,9 @@ def process(args):
|
||||||
|
|
||||||
os.makedirs(args.output, exist_ok=True)
|
os.makedirs(args.output, exist_ok=True)
|
||||||
|
|
||||||
|
log.info("Loading settings")
|
||||||
|
settings = json.load(args.settings)
|
||||||
|
|
||||||
log.info("Annotating and filtering CSV file")
|
log.info("Annotating and filtering CSV file")
|
||||||
measurements, statistics = ImportMeasurementsCsv().read(
|
measurements, statistics = ImportMeasurementsCsv().read(
|
||||||
filename_input,
|
filename_input,
|
||||||
|
@ -60,16 +77,38 @@ def process(args):
|
||||||
dataset_id=dataset_id,
|
dataset_id=dataset_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
measurements = AnnotateMeasurements(osm, cache_dir=args.cache_dir).annotate(measurements)
|
measurements = AnnotateMeasurements(osm, cache_dir=args.cache_dir).annotate(
|
||||||
|
measurements
|
||||||
|
)
|
||||||
|
|
||||||
valid_measurements = ChainFilter(
|
filters_from_settings = []
|
||||||
|
for filter_description in settings.get("filters", []):
|
||||||
|
filter_type = filter_description.get("type")
|
||||||
|
if filter_type == "PrivacyZonesFilter":
|
||||||
|
privacy_zones = [
|
||||||
|
PrivacyZone(
|
||||||
|
latitude=zone.get("latitude"),
|
||||||
|
longitude=zone.get("longitude"),
|
||||||
|
radius=zone.get("radius"),
|
||||||
|
)
|
||||||
|
for zone in filter_description.get("config", {}).get("privacyZones", [])
|
||||||
|
]
|
||||||
|
filters_from_settings.append(PrivacyZonesFilter(privacy_zones))
|
||||||
|
else:
|
||||||
|
log.warning("Ignoring unknown filter type %r in settings file", filter_type)
|
||||||
|
|
||||||
|
input_filter = ChainFilter(
|
||||||
RequiredFieldsFilter(),
|
RequiredFieldsFilter(),
|
||||||
PrivacyFilter(
|
PrivacyFilter(
|
||||||
user_id_mode=AnonymizationMode.REMOVE,
|
user_id_mode=AnonymizationMode.REMOVE,
|
||||||
measurement_id_mode=AnonymizationMode.REMOVE,
|
measurement_id_mode=AnonymizationMode.REMOVE,
|
||||||
),
|
),
|
||||||
).filter(measurements, log=log)
|
*filters_from_settings,
|
||||||
confirmed_measurements = ConfirmedFilter().filter(valid_measurements, log=log)
|
)
|
||||||
|
confirmed_filter = ConfirmedFilter()
|
||||||
|
|
||||||
|
valid_measurements = input_filter.filter(measurements, log=log)
|
||||||
|
confirmed_measurements = confirmed_filter.filter(valid_measurements, log=log)
|
||||||
|
|
||||||
# write out
|
# write out
|
||||||
confirmed_measurements_json = {
|
confirmed_measurements_json = {
|
||||||
|
@ -121,14 +160,14 @@ def process(args):
|
||||||
}
|
}
|
||||||
|
|
||||||
statistics_json = {
|
statistics_json = {
|
||||||
"recordedAt": statistics['t_min'].isoformat(),
|
"recordedAt": statistics["t_min"].isoformat(),
|
||||||
"recordedUntil": statistics['t_max'].isoformat(),
|
"recordedUntil": statistics["t_max"].isoformat(),
|
||||||
"duration": statistics['t'],
|
"duration": statistics["t"],
|
||||||
"length": statistics['d'],
|
"length": statistics["d"],
|
||||||
"segments": statistics['n_segments'],
|
"segments": statistics["n_segments"],
|
||||||
"numEvents": statistics['n_confirmed'],
|
"numEvents": statistics["n_confirmed"],
|
||||||
"numMeasurements": statistics['n_measurements'],
|
"numMeasurements": statistics["n_measurements"],
|
||||||
"numValid": statistics['n_valid'],
|
"numValid": statistics["n_valid"],
|
||||||
}
|
}
|
||||||
|
|
||||||
for output_filename, data in [
|
for output_filename, data in [
|
||||||
|
@ -137,8 +176,9 @@ def process(args):
|
||||||
("track.json", track_json),
|
("track.json", track_json),
|
||||||
("statistics.json", statistics_json),
|
("statistics.json", statistics_json),
|
||||||
]:
|
]:
|
||||||
with open(os.path.join(args.output, output_filename), 'w') as fp:
|
with open(os.path.join(args.output, output_filename), "w") as fp:
|
||||||
json.dump(data, fp, indent=4)
|
json.dump(data, fp, indent=4)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -39,10 +39,25 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// copy original file to processing dir
|
// copy original file to processing dir
|
||||||
const inputFilePath = path.join(inputDirectory, 'track.csv');
|
const inputFilePath = path.join(inputDirectory, 'track.csv');
|
||||||
const originalFilePath = track.getOriginalFilePath()
|
const originalFilePath = track.getOriginalFilePath();
|
||||||
console.log(`[${track.slug}] Copy ${originalFilePath} to ${inputFilePath}`);
|
console.log(`[${track.slug}] Copy ${originalFilePath} to ${inputFilePath}`);
|
||||||
await fs.promises.copyFile(originalFilePath, inputFilePath);
|
await fs.promises.copyFile(originalFilePath, inputFilePath);
|
||||||
|
|
||||||
|
// create track settings file
|
||||||
|
const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
||||||
|
console.log(`[${track.slug}] Create settings at ${settingsFilePath}`);
|
||||||
|
const settings = {
|
||||||
|
settingsGeneratedAt: new Date().getTime(),
|
||||||
|
filters: [
|
||||||
|
// TODO: Add actual privacy zones from user database
|
||||||
|
/* {
|
||||||
|
type: 'PrivacyZonesFilter',
|
||||||
|
config: { privacyZones: [{ longitude: 10, latitude: 10, radius: 250 }] },
|
||||||
|
}, */
|
||||||
|
],
|
||||||
|
};
|
||||||
|
await fs.promises.writeFile(settingsFilePath, JSON.stringify(settings));
|
||||||
|
|
||||||
// Create output directory
|
// Create output directory
|
||||||
const outputDirectory = path.join(PROCESSING_OUTPUT_DIR, filePath);
|
const outputDirectory = path.join(PROCESSING_OUTPUT_DIR, filePath);
|
||||||
await fs.promises.mkdir(outputDirectory, { recursive: true });
|
await fs.promises.mkdir(outputDirectory, { recursive: true });
|
||||||
|
@ -54,6 +69,7 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// TODO: Generate track transformation settings (privacy zones etc)
|
// TODO: Generate track transformation settings (privacy zones etc)
|
||||||
// const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
// const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
|
||||||
|
//
|
||||||
const child = spawn(
|
const child = spawn(
|
||||||
'python',
|
'python',
|
||||||
[
|
[
|
||||||
|
@ -64,6 +80,8 @@ queue.process('processTrack', async (job) => {
|
||||||
outputDirectory,
|
outputDirectory,
|
||||||
'--path-cache',
|
'--path-cache',
|
||||||
OBS_FACE_CACHE_DIR,
|
OBS_FACE_CACHE_DIR,
|
||||||
|
'--settings',
|
||||||
|
settingsFilePath,
|
||||||
// '--anonymize-user-id', 'remove',
|
// '--anonymize-user-id', 'remove',
|
||||||
// '--anonymize-measurement-id', 'remove',
|
// '--anonymize-measurement-id', 'remove',
|
||||||
],
|
],
|
||||||
|
@ -103,7 +121,7 @@ queue.process('processTrack', async (job) => {
|
||||||
|
|
||||||
// Maybe we have found out the recording date, regenerate the automatic
|
// Maybe we have found out the recording date, regenerate the automatic
|
||||||
// title (if not yet manually set)
|
// title (if not yet manually set)
|
||||||
await track.autoGenerateTitle()
|
await track.autoGenerateTitle();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Processing failed:', err);
|
console.error('Processing failed:', err);
|
||||||
track.processingLog += String(err) + '\n' + err.stack + '\n';
|
track.processingLog += String(err) + '\n' + err.stack + '\n';
|
||||||
|
|
Loading…
Reference in a new issue