Skip to content

Commit

Permalink
EPG updates are potentially using a lot of memory to save in batches …
Browse files Browse the repository at this point in the history
…due to the diffing

This should fix that for systems with less memory.
  • Loading branch information
Josh5 committed Oct 27, 2024
1 parent f7444ca commit 6ecb241
Showing 1 changed file with 12 additions and 8 deletions.
20 changes: 12 additions & 8 deletions backend/epgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,10 +268,12 @@ def parse_and_save_programmes():
)
)
logger.info("Saving new programmes list for EPG #%s from path - '%s'", epg_id, xmltv_file)
# Save all new
db.session.bulk_save_objects(items)
# Commit all updates to channel programmes
db.session.commit()
# Save all new items in batches of 100
batch_size = 50
for i in range(0, len(items), batch_size):
db.session.bulk_save_objects(items[i:i + batch_size], update_changed_only=False)
# Commit updates to channel programmes
db.session.commit()
logger.info("Successfully imported %s programmes from path - '%s'", len(items), xmltv_file)

await run_sync(parse_and_save_programmes)()
Expand Down Expand Up @@ -573,10 +575,12 @@ async def update_channel_epg_with_online_data(config):
db_programmes = db_programmes_query.all()
logger.info(" - Updating programme list for %s - %s.", channel_id, result.name)
programmes = await update_programmes_concurrently(settings, db_programmes, cache, lock)
# Save all new
db.session.bulk_save_objects(programmes)
# Commit all updates to channel programmes
db.session.commit()
# Save all new items in batches of 50
batch_size = 50
for i in range(0, len(programmes), batch_size):
db.session.bulk_save_objects(programmes[i:i + batch_size])
# Commit updates to channel programmes
db.session.commit()
execution_time = time.time() - start_time
logger.info("Updating online EPG data for configured channels took '%s' seconds", int(execution_time))

Expand Down

0 comments on commit 6ecb241

Please sign in to comment.