Added chunking code for API limits

pull/44/head
moorsey 2024-01-06 11:14:19 +00:00 committed by GitHub
parent 335cb3ae75
commit d2b094f816
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 17 additions and 4 deletions

View File

@ -19,6 +19,18 @@ import os
import re import re
import json import json
#chunking
def sync_in_chunks(api):
chunk_size = 99 # Maximum commands per request
while api.queue:
chunk = api.queue[:chunk_size] # Get the next chunk
api.queue = api.queue[chunk_size:] # Remove the processed chunk from the queue
sync(api, chunk) # Sync the current chunk
logging.info('Synced chunk of %d commands.', chunk_size)
# Connect to SQLite database # Connect to SQLite database
@ -491,8 +503,7 @@ def commit_labels_update(api, overview_task_ids, overview_task_labels):
# Update tasks in batch with Todoist Sync API # Update tasks in batch with Todoist Sync API
def sync(api, chunk):
def sync(api):
# # This approach does not seem to work correctly. # # This approach does not seem to work correctly.
# BASE_URL = "https://api.todoist.com" # BASE_URL = "https://api.todoist.com"
# SYNC_VERSION = "v9" # SYNC_VERSION = "v9"
@ -510,7 +521,7 @@ def sync(api):
} }
data = 'sync_token=' + api.sync_token + \ data = 'sync_token=' + api.sync_token + \
'&commands=' + json.dumps(api.queue) '&commands=' + json.dumps(chunk)
response = requests.post( response = requests.post(
'https://api.todoist.com/sync/v9/sync', headers=headers, data=data) 'https://api.todoist.com/sync/v9/sync', headers=headers, data=data)
@ -1520,8 +1531,10 @@ def main():
overview_task_labels) overview_task_labels)
# Sync all queued up changes # Sync all queued up changes
# if api.queue:
# sync(api)
if api.queue: if api.queue:
sync(api) sync_in_chunks(api)
num_changes = len(api.queue)+len(api.overview_updated_ids) num_changes = len(api.queue)+len(api.overview_updated_ids)