pull/44/merge
moorsey 2024-01-06 12:39:36 +00:00 committed by GitHub
commit 94d1427924
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 25 additions and 4 deletions

View File

@ -193,3 +193,13 @@ To build the docker container, check out the repository and run:
To run autodoist inside the docker container:
docker run -it autodoist:latest
Docker-compose example:
version: "3.7"
services:
autodoist:
image: ghcr.io/Hoffelhas/autodoist:laster
container_name: autodoist
command: -l=next -hf=2 -a=**apikey**
restart: unless-stopped

View File

@ -19,6 +19,18 @@ import os
import re
import json
#chunking code to meet API limits of 100 calls per request
def sync_in_chunks(api):
chunk_size = 99 # Maximum commands per request
while api.queue:
chunk = api.queue[:chunk_size] # Get the next chunk
api.queue = api.queue[chunk_size:] # Remove the processed chunk from the queue
sync(api, chunk) # Sync the current chunk
logging.info('Synced chunk of %d commands.', chunk_size)
# Connect to SQLite database
@ -491,8 +503,7 @@ def commit_labels_update(api, overview_task_ids, overview_task_labels):
# Update tasks in batch with Todoist Sync API
def sync(api):
def sync(api, chunk):
# # This approach does not seem to work correctly.
# BASE_URL = "https://api.todoist.com"
# SYNC_VERSION = "v9"
@ -510,7 +521,7 @@ def sync(api):
}
data = 'sync_token=' + api.sync_token + \
'&commands=' + json.dumps(api.queue)
'&commands=' + json.dumps(chunk)
response = requests.post(
'https://api.todoist.com/sync/v9/sync', headers=headers, data=data)
@ -1521,7 +1532,7 @@ def main():
# Sync all queued up changes
if api.queue:
sync(api)
sync_in_chunks(api)
num_changes = len(api.queue)+len(api.overview_updated_ids)