Remove duplicate events from the same track

This commit is contained in:
Paul Bienkowski 2021-12-25 21:22:48 +01:00
parent c63fd49245
commit 057e2bcc6c

View file

@ -163,8 +163,11 @@ async def process_track(session, track, data_source):
with open(target, "w") as fp: with open(target, "w") as fp:
json.dump(data, fp, indent=4) json.dump(data, fp, indent=4)
log.info("Import events into database...") log.info("Clearing old track data...")
await clear_track_data(session, track) await clear_track_data(session, track)
await session.commit()
log.info("Import events into database...")
await import_overtaking_events(session, track, overtaking_events) await import_overtaking_events(session, track, overtaking_events)
log.info("Write track statistics and update status...") log.info("Write track statistics and update status...")
@ -207,7 +210,10 @@ async def clear_track_data(session, track):
async def import_overtaking_events(session, track, overtaking_events): async def import_overtaking_events(session, track, overtaking_events):
event_models = [] # We use a dictionary to prevent per-track hash collisions, ignoring all
# but the first event of the same hash
event_models = {}
for m in overtaking_events: for m in overtaking_events:
hex_hash = hashlib.sha256( hex_hash = hashlib.sha256(
struct.pack( struct.pack(
@ -215,8 +221,7 @@ async def import_overtaking_events(session, track, overtaking_events):
) )
).hexdigest() ).hexdigest()
event_models.append( event_models[hex_hash] = OvertakingEvent(
OvertakingEvent(
track_id=track.id, track_id=track.id,
hex_hash=hex_hash, hex_hash=hex_hash,
way_id=m.get("OSM_way_id"), way_id=m.get("OSM_way_id"),
@ -235,6 +240,5 @@ async def import_overtaking_events(session, track, overtaking_events):
course=m["course"], course=m["course"],
speed=m["speed"], speed=m["speed"],
) )
)
session.add_all(event_models) session.add_all(event_models.values())