import in chunks to avoid smaller systems chocking
This commit is contained in:
parent
497e1b739a
commit
0233045959
1 changed files with 18 additions and 14 deletions
|
@ -71,10 +71,14 @@ async def import_osm(connection, filename, import_group=None):
|
||||||
|
|
||||||
# Pass 2: Import
|
# Pass 2: Import
|
||||||
log.info("Pass 2: Import roads")
|
log.info("Pass 2: Import roads")
|
||||||
|
amount = 0
|
||||||
|
for items in chunk(read_file(filename), 10000):
|
||||||
|
amount += 10000
|
||||||
|
log.info(f"...{amount}/{len(ids)} ({100*amount/len(ids)}%)")
|
||||||
async with cursor.copy(
|
async with cursor.copy(
|
||||||
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
||||||
) as copy:
|
) as copy:
|
||||||
for item in read_file(filename):
|
for item in items:
|
||||||
await copy.write_row(
|
await copy.write_row(
|
||||||
(
|
(
|
||||||
item.way_id,
|
item.way_id,
|
||||||
|
|
Loading…
Add table
Reference in a new issue