Remove duplicate events from the same track
This commit is contained in:
parent
c63fd49245
commit
057e2bcc6c
|
@ -163,8 +163,11 @@ async def process_track(session, track, data_source):
|
||||||
with open(target, "w") as fp:
|
with open(target, "w") as fp:
|
||||||
json.dump(data, fp, indent=4)
|
json.dump(data, fp, indent=4)
|
||||||
|
|
||||||
log.info("Import events into database...")
|
log.info("Clearing old track data...")
|
||||||
await clear_track_data(session, track)
|
await clear_track_data(session, track)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
log.info("Import events into database...")
|
||||||
await import_overtaking_events(session, track, overtaking_events)
|
await import_overtaking_events(session, track, overtaking_events)
|
||||||
|
|
||||||
log.info("Write track statistics and update status...")
|
log.info("Write track statistics and update status...")
|
||||||
|
@ -207,7 +210,10 @@ async def clear_track_data(session, track):
|
||||||
|
|
||||||
|
|
||||||
async def import_overtaking_events(session, track, overtaking_events):
|
async def import_overtaking_events(session, track, overtaking_events):
|
||||||
event_models = []
|
# We use a dictionary to prevent per-track hash collisions, ignoring all
|
||||||
|
# but the first event of the same hash
|
||||||
|
event_models = {}
|
||||||
|
|
||||||
for m in overtaking_events:
|
for m in overtaking_events:
|
||||||
hex_hash = hashlib.sha256(
|
hex_hash = hashlib.sha256(
|
||||||
struct.pack(
|
struct.pack(
|
||||||
|
@ -215,26 +221,24 @@ async def import_overtaking_events(session, track, overtaking_events):
|
||||||
)
|
)
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
event_models.append(
|
event_models[hex_hash] = OvertakingEvent(
|
||||||
OvertakingEvent(
|
track_id=track.id,
|
||||||
track_id=track.id,
|
hex_hash=hex_hash,
|
||||||
hex_hash=hex_hash,
|
way_id=m.get("OSM_way_id"),
|
||||||
way_id=m.get("OSM_way_id"),
|
direction_reversed=m.get("OSM_way_orientation", 0) < 0,
|
||||||
direction_reversed=m.get("OSM_way_orientation", 0) < 0,
|
geometry=json.dumps(
|
||||||
geometry=json.dumps(
|
{
|
||||||
{
|
"type": "Point",
|
||||||
"type": "Point",
|
"coordinates": [m["longitude"], m["latitude"]],
|
||||||
"coordinates": [m["longitude"], m["latitude"]],
|
}
|
||||||
}
|
),
|
||||||
),
|
latitude=m["latitude"],
|
||||||
latitude=m["latitude"],
|
longitude=m["longitude"],
|
||||||
longitude=m["longitude"],
|
time=m["time"].astimezone(pytz.utc).replace(tzinfo=None),
|
||||||
time=m["time"].astimezone(pytz.utc).replace(tzinfo=None),
|
distance_overtaker=m["distance_overtaker"],
|
||||||
distance_overtaker=m["distance_overtaker"],
|
distance_stationary=m["distance_stationary"],
|
||||||
distance_stationary=m["distance_stationary"],
|
course=m["course"],
|
||||||
course=m["course"],
|
speed=m["speed"],
|
||||||
speed=m["speed"],
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
session.add_all(event_models)
|
session.add_all(event_models.values())
|
||||||
|
|
Loading…
Reference in a new issue