mirror of https://github.com/Hoffelhas/autodoist
Implemented SYNC API and commit queue, to ensure task content updates are sent in batches. This to ensure we don't run into the requests limits.
parent
b741b83b14
commit
8541f06d92
248
autodoist.py
248
autodoist.py
|
@ -7,6 +7,7 @@ from todoist_api_python.models import Project
|
|||
from todoist_api_python.http_requests import get
|
||||
from todoist_api_python.http_requests import post
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import quote
|
||||
import sys
|
||||
import time
|
||||
import requests
|
||||
|
@ -17,6 +18,8 @@ import time
|
|||
import sqlite3
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
|
||||
|
||||
# Connect to SQLite database
|
||||
|
||||
|
@ -238,18 +241,6 @@ def make_wide(formatter, w=120, h=36):
|
|||
logging.error("Argparse help formatter failed, falling back.")
|
||||
return formatter
|
||||
|
||||
# Sync with Todoist API
|
||||
|
||||
|
||||
def sync(api):
|
||||
try:
|
||||
logging.debug('Syncing the current state from the API')
|
||||
api.sync()
|
||||
except Exception as e:
|
||||
logging.exception(
|
||||
'Error trying to sync with Todoist API: %s' % str(e))
|
||||
quit()
|
||||
|
||||
# Simple query for yes/no answer
|
||||
|
||||
|
||||
|
@ -323,7 +314,7 @@ def verify_label_existance(api, label_name, prompt_mode):
|
|||
logging.info('Exiting Autodoist.')
|
||||
exit(1)
|
||||
|
||||
return 0
|
||||
return labels
|
||||
|
||||
# Initialisation of Autodoist
|
||||
|
||||
|
@ -379,9 +370,11 @@ def initialise_api(args):
|
|||
api_arguments['cache'] = None
|
||||
|
||||
api = TodoistAPI(**api_arguments)
|
||||
|
||||
logging.info("Autodoist has successfully connected to Todoist!\n")
|
||||
|
||||
sync_api = initialise_sync_api(api)
|
||||
api.sync_token = sync_api['sync_token'] # Save SYNC API token to enable partial syncs
|
||||
|
||||
# Check if labels exist
|
||||
|
||||
# If labeling argument is used
|
||||
|
@ -390,16 +383,17 @@ def initialise_api(args):
|
|||
# Verify that the next action label exists; ask user if it needs to be created
|
||||
verify_label_existance(api, args.label, 1)
|
||||
|
||||
# If regeneration mode is used, verify labels
|
||||
if args.regeneration is not None:
|
||||
# TODO: Disabled for now
|
||||
# # If regeneration mode is used, verify labels
|
||||
# if args.regeneration is not None:
|
||||
|
||||
# Verify the existance of the regeneraton labels; force creation of label
|
||||
regen_labels_id = [verify_label_existance(
|
||||
api, regen_label, 2) for regen_label in args.regen_label_names]
|
||||
# # Verify the existance of the regeneraton labels; force creation of label
|
||||
# regen_labels_id = [verify_label_existance(
|
||||
# api, regen_label, 2) for regen_label in args.regen_label_names]
|
||||
|
||||
else:
|
||||
# Label functionality not needed
|
||||
regen_labels_id = [None, None, None]
|
||||
# else:
|
||||
# # Label functionality not needed
|
||||
# regen_labels_id = [None, None, None]
|
||||
|
||||
return api
|
||||
|
||||
|
@ -445,6 +439,87 @@ def get_all_data(api):
|
|||
|
||||
return data
|
||||
|
||||
|
||||
def initialise_sync_api(api):
|
||||
bearer_token = 'Bearer %s' % api._token
|
||||
|
||||
headers = {
|
||||
'Authorization': bearer_token,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
data = 'sync_token=*&resource_types=["all"]'
|
||||
|
||||
response = requests.post('https://api.todoist.com/sync/v9/sync', headers=headers, data=data)
|
||||
|
||||
return json.loads(response.text)
|
||||
|
||||
# Commit task content change to queue
|
||||
|
||||
|
||||
def commit_content_update(api, task_id, content):
|
||||
uuid = str(time.perf_counter()) # Create unique request id
|
||||
data = {"type": "item_update", "uuid": uuid,
|
||||
"args": {"id": task_id, "content": quote(content)}}
|
||||
api.queue.append(data)
|
||||
|
||||
return api
|
||||
|
||||
# Ensure label updates are only issued once per task and commit to queue
|
||||
|
||||
|
||||
def commit_labels_update(api, overview_task_ids, overview_task_labels):
|
||||
|
||||
filtered_overview_ids = [
|
||||
k for k, v in overview_task_ids.items() if v != 0]
|
||||
|
||||
for task_id in filtered_overview_ids:
|
||||
labels = overview_task_labels[task_id]
|
||||
|
||||
# api.update_task(task_id=task_id, labels=labels) # Not using REST API, since we would get too many single requests
|
||||
uuid = str(time.perf_counter()) # Create unique request id
|
||||
data = {"type": "item_update", "uuid": uuid,
|
||||
"args": {"id": task_id, "labels": labels}}
|
||||
api.queue.append(data)
|
||||
|
||||
return api
|
||||
|
||||
|
||||
# Update tasks in batch with Todoist Sync API
|
||||
|
||||
|
||||
def sync(api):
|
||||
# # This approach does not seem to work correctly.
|
||||
# BASE_URL = "https://api.todoist.com"
|
||||
# SYNC_VERSION = "v9"
|
||||
# SYNC_API = urljoin(BASE_URL, f"/sync/{SYNC_VERSION}/")
|
||||
# SYNC_ENDPOINT = "sync"
|
||||
# endpoint = urljoin(SYNC_API, SYNC_ENDPOINT)
|
||||
# task_data = post(api._session, endpoint, api._token, data=data)
|
||||
|
||||
try:
|
||||
bearer_token = 'Bearer %s' % api._token
|
||||
|
||||
headers = {
|
||||
'Authorization': bearer_token,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
}
|
||||
|
||||
data = 'sync_token=' + api.sync_token + '&commands=' + json.dumps(api.queue)
|
||||
|
||||
response = requests.post(
|
||||
'https://api.todoist.com/sync/v9/sync', headers=headers, data=data)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
|
||||
response.raise_for_status()
|
||||
return response.ok
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(
|
||||
'Error trying to sync with Todoist API: %s' % str(e))
|
||||
quit()
|
||||
|
||||
# Find the type based on name suffix.
|
||||
|
||||
|
||||
|
@ -560,7 +635,8 @@ def get_task_type(args, connection, task, section, project):
|
|||
args, connection, task, 'task_type')
|
||||
|
||||
if task_type is not None:
|
||||
logging.debug("Identified '%s > %s > %s' as %s type",project.name, section.name, task.content, task_type)
|
||||
logging.debug("Identified '%s > %s > %s' as %s type",
|
||||
project.name, section.name, task.content, task_type)
|
||||
|
||||
return task_type, task_type_changed
|
||||
|
||||
|
@ -594,23 +670,11 @@ def remove_label(task, label, overview_task_ids, overview_task_labels):
|
|||
overview_task_ids[task.id] = -1
|
||||
overview_task_labels[task.id] = labels
|
||||
|
||||
# Ensure label updates are only issued once per task
|
||||
|
||||
|
||||
def update_labels(api, overview_task_ids, overview_task_labels):
|
||||
filtered_overview_ids = [
|
||||
k for k, v in overview_task_ids.items() if v != 0]
|
||||
for task_id in filtered_overview_ids:
|
||||
labels = overview_task_labels[task_id]
|
||||
api.update_task(task_id=task_id, labels=labels)
|
||||
|
||||
return filtered_overview_ids
|
||||
|
||||
|
||||
# Check if header logic needs to be applied
|
||||
|
||||
|
||||
def check_header(api, model, overview_updated_ids):
|
||||
def check_header(api, model):
|
||||
header_all_in_level = False
|
||||
unheader_all_in_level = False
|
||||
regex_a = '(^[*]{2}\s*)(.*)'
|
||||
|
@ -630,7 +694,7 @@ def check_header(api, model, overview_updated_ids):
|
|||
unheader_all_in_level = True
|
||||
model.content = rb[2] # Local record
|
||||
api.update_task(task_id=model.id, content=rb[2])
|
||||
overview_updated_ids.append(model.id)
|
||||
# overview_updated_ids.append(model.id)
|
||||
else:
|
||||
ra = re.search(regex_a, model.name)
|
||||
rb = re.search(regex_b, model.name)
|
||||
|
@ -639,54 +703,64 @@ def check_header(api, model, overview_updated_ids):
|
|||
if ra:
|
||||
header_all_in_level = True
|
||||
api.update_section(section_id=model.id, name=ra[2])
|
||||
overview_updated_ids.append(model.id)
|
||||
api.overview_updated_ids.append(model.id)
|
||||
if rb:
|
||||
unheader_all_in_level = True
|
||||
api.update_section(section_id=model.id, name=rb[2])
|
||||
overview_updated_ids.append(model.id)
|
||||
api.overview_updated_ids.append(model.id)
|
||||
|
||||
elif isinstance(model, Project):
|
||||
if ra:
|
||||
header_all_in_level = True
|
||||
api.update_project(project_id=model.id, name=ra[2])
|
||||
overview_updated_ids.append(model.id)
|
||||
api.overview_updated_ids.append(model.id)
|
||||
if rb:
|
||||
unheader_all_in_level = True
|
||||
api.update_project(project_id=model.id, name=rb[2])
|
||||
overview_updated_ids.append(model.id)
|
||||
api.overview_updated_ids.append(model.id)
|
||||
except:
|
||||
logging.debug('check_header: no right model found')
|
||||
|
||||
return header_all_in_level, unheader_all_in_level
|
||||
return api, header_all_in_level, unheader_all_in_level
|
||||
|
||||
# Logic for applying and removing headers
|
||||
|
||||
|
||||
def modify_task_headers(api, task, section_tasks, overview_updated_ids, header_all_in_p, unheader_all_in_p, header_all_in_s, unheader_all_in_s, header_all_in_t, unheader_all_in_t):
|
||||
def modify_task_headers(api, task, section_tasks, header_all_in_p, unheader_all_in_p, header_all_in_s, unheader_all_in_s, header_all_in_t, unheader_all_in_t):
|
||||
|
||||
if any([header_all_in_p, header_all_in_s]):
|
||||
if task.content[:2] != '* ':
|
||||
api.update_task(task_id=task.id, content='* ' + task.content)
|
||||
overview_updated_ids.append(task.id)
|
||||
content = '* ' + task.content
|
||||
api = commit_content_update(api, task.id, content)
|
||||
# api.update_task(task_id=task.id, content='* ' + task.content)
|
||||
# overview_updated_ids.append(task.id)
|
||||
|
||||
if any([unheader_all_in_p, unheader_all_in_s]):
|
||||
if task.content[:2] == '* ':
|
||||
api.update_task(task_id=task.id, content=task.content[2:])
|
||||
overview_updated_ids.append(task.id)
|
||||
content = task.content[2:]
|
||||
api = commit_content_update(api, task.id, content)
|
||||
# api.update_task(task_id=task.id, content=task.content[2:])
|
||||
# overview_updated_ids.append(task.id)
|
||||
|
||||
if header_all_in_t:
|
||||
if task.content[:2] != '* ':
|
||||
api.update_task(task_id=task.id, content='* ' + task.content)
|
||||
overview_updated_ids.append(task.id)
|
||||
find_and_headerify_all_children(
|
||||
api, task, section_tasks, overview_updated_ids, 1)
|
||||
content = '* ' + task.content
|
||||
api = commit_content_update(api, task.id, content)
|
||||
# api.update_task(task_id=task.id, content='* ' + task.content)
|
||||
# overview_updated_ids.append(task.id)
|
||||
api = find_and_headerify_all_children(
|
||||
api, task, section_tasks, 1)
|
||||
|
||||
if unheader_all_in_t:
|
||||
if task.content[:2] == '* ':
|
||||
api.update_task(task_id=task.id, content=task.content[2:])
|
||||
overview_updated_ids.append(task.id)
|
||||
find_and_headerify_all_children(
|
||||
api, task, section_tasks, overview_updated_ids, 2)
|
||||
content = task.content[2:]
|
||||
api = commit_content_update(api, task.id, content)
|
||||
# api.update_task(task_id=task.id, content=task.content[2:])
|
||||
# overview_updated_ids.append(task.id)
|
||||
api = find_and_headerify_all_children(
|
||||
api, task, section_tasks, 2)
|
||||
|
||||
return api
|
||||
|
||||
|
||||
# Check regen mode based on label name
|
||||
|
@ -727,7 +801,7 @@ def check_regen_mode(api, item, regen_labels_id):
|
|||
# Recurring lists logic
|
||||
|
||||
|
||||
def run_recurring_lists_logic(args, api, connection, overview_updated_ids, task, task_items, task_items_all, regen_labels_id):
|
||||
def run_recurring_lists_logic(args, api, connection, task, task_items, task_items_all, regen_labels_id):
|
||||
|
||||
if task.parent_id == 0:
|
||||
try:
|
||||
|
@ -810,7 +884,7 @@ def run_recurring_lists_logic(args, api, connection, overview_updated_ids, task,
|
|||
# Update due-date to today
|
||||
api.update_task(
|
||||
task_id=task.id, due_date=today_str, due_string=task.due.string)
|
||||
logging.info(
|
||||
logging.debug(
|
||||
"Update date on task: '%s'" % (task.content))
|
||||
|
||||
# Save the new date for reference us
|
||||
|
@ -860,7 +934,7 @@ def find_and_clean_all_children(task_ids, task, section_tasks):
|
|||
return task_ids
|
||||
|
||||
|
||||
def find_and_headerify_all_children(api, task, section_tasks, overview_updated_ids, mode):
|
||||
def find_and_headerify_all_children(api, task, section_tasks, mode):
|
||||
|
||||
child_tasks = list(filter(lambda x: x.parent_id == task.id, section_tasks))
|
||||
|
||||
|
@ -869,18 +943,22 @@ def find_and_headerify_all_children(api, task, section_tasks, overview_updated_i
|
|||
# Children found, go deeper
|
||||
if mode == 1:
|
||||
if child_task.content[:2] != '* ':
|
||||
api.update_task(task_id=child_task.id,
|
||||
content='* ' + child_task.content)
|
||||
overview_updated_ids.append(child_task.id)
|
||||
api = commit_content_update(
|
||||
api, child_task.id, '* ' + child_task.content)
|
||||
# api.update_task(task_id=child_task.id,
|
||||
# content='* ' + child_task.content)
|
||||
# overview_updated_ids.append(child_task.id)
|
||||
|
||||
elif mode == 2:
|
||||
if child_task.content[:2] == '* ':
|
||||
api.update_task(task_id=child_task.id,
|
||||
content=child_task.content[2:])
|
||||
overview_updated_ids.append(child_task.id)
|
||||
api = commit_content_update(
|
||||
api, child_task.id, child_task.content[2:])
|
||||
# api.update_task(task_id=child_task.id,
|
||||
# content=child_task.content[2:])
|
||||
# overview_updated_ids.append(child_task.id)
|
||||
|
||||
find_and_headerify_all_children(
|
||||
api, child_task, section_tasks, overview_updated_ids, mode)
|
||||
api, child_task, section_tasks, mode)
|
||||
|
||||
return 0
|
||||
|
||||
|
@ -892,10 +970,11 @@ def autodoist_magic(args, api, connection):
|
|||
# Preallocate dictionaries and other values
|
||||
overview_task_ids = {}
|
||||
overview_task_labels = {}
|
||||
overview_updated_ids = []
|
||||
next_action_label = args.label
|
||||
regen_labels_id = args.regen_label_names
|
||||
first_found = [False, False, False]
|
||||
api.queue = []
|
||||
api.overview_updated_ids = []
|
||||
|
||||
# Get all projects info
|
||||
try:
|
||||
|
@ -913,8 +992,8 @@ def autodoist_magic(args, api, connection):
|
|||
db_check_existance(connection, project)
|
||||
|
||||
# Check if we need to (un)header entire project
|
||||
header_all_in_p, unheader_all_in_p = check_header(
|
||||
api, project, overview_updated_ids)
|
||||
api, header_all_in_p, unheader_all_in_p = check_header(
|
||||
api, project)
|
||||
|
||||
# Get project type
|
||||
if next_action_label is not None:
|
||||
|
@ -964,12 +1043,12 @@ def autodoist_magic(args, api, connection):
|
|||
|
||||
# Reset
|
||||
first_found[0] = False
|
||||
disable_section_labelling = 0
|
||||
|
||||
for section in sections:
|
||||
|
||||
# Check if section labelling is disabled (useful for e.g. Kanban)
|
||||
if next_action_label is not None:
|
||||
disable_section_labelling = 0
|
||||
try:
|
||||
if section.name.startswith('*') or section.name.endswith('*'):
|
||||
disable_section_labelling = 1
|
||||
|
@ -980,8 +1059,8 @@ def autodoist_magic(args, api, connection):
|
|||
db_check_existance(connection, section)
|
||||
|
||||
# Check if we need to (un)header entire secion
|
||||
header_all_in_s, unheader_all_in_s = check_header(
|
||||
api, section, overview_updated_ids)
|
||||
api, header_all_in_s, unheader_all_in_s = check_header(
|
||||
api, section)
|
||||
|
||||
# Get section type
|
||||
if next_action_label:
|
||||
|
@ -1036,12 +1115,12 @@ def autodoist_magic(args, api, connection):
|
|||
filter(lambda x: x.parent_id == task.id, non_completed_tasks))
|
||||
|
||||
# Check if we need to (un)header entire task tree
|
||||
header_all_in_t, unheader_all_in_t = check_header(
|
||||
api, task, overview_updated_ids)
|
||||
api, header_all_in_t, unheader_all_in_t = check_header(
|
||||
api, task)
|
||||
|
||||
# Modify headers where needed
|
||||
modify_task_headers(api, task, section_tasks, overview_updated_ids, header_all_in_p,
|
||||
unheader_all_in_p, header_all_in_s, unheader_all_in_s, header_all_in_t, unheader_all_in_t)
|
||||
api = modify_task_headers(api, task, section_tasks, header_all_in_p,
|
||||
unheader_all_in_p, header_all_in_s, unheader_all_in_s, header_all_in_t, unheader_all_in_t)
|
||||
|
||||
# TODO: Check is regeneration is still needed, now that it's part of core Todoist. Disabled for now.
|
||||
# Logic for recurring lists
|
||||
|
@ -1057,7 +1136,7 @@ def autodoist_magic(args, api, connection):
|
|||
# If options turned on, start recurring lists logic #TODO: regeneration currently doesn't work, becaue TASK_ENDPOINT doesn't show completed tasks. Use workaround.
|
||||
if args.regeneration is not None or args.end:
|
||||
run_recurring_lists_logic(
|
||||
args, api, connection, overview_updated_ids, task, child_tasks, child_tasks_all, regen_labels_id)
|
||||
args, api, connection, task, child_tasks, child_tasks_all, regen_labels_id)
|
||||
|
||||
# If options turned on, start labelling logic
|
||||
if next_action_label is not None:
|
||||
|
@ -1352,7 +1431,7 @@ def autodoist_magic(args, api, connection):
|
|||
first_found[0] = True
|
||||
|
||||
# Return all ids and corresponding labels that need to be modified
|
||||
return overview_task_ids, overview_task_labels, overview_updated_ids
|
||||
return overview_task_ids, overview_task_labels
|
||||
|
||||
# Main
|
||||
|
||||
|
@ -1395,7 +1474,7 @@ def main():
|
|||
args = parser.parse_args()
|
||||
|
||||
# #TODO: Temporary disable this feature for now. Find a way to see completed tasks first, since REST API v2 lost this funcionality.
|
||||
args.regeneration = 0
|
||||
args.regeneration = None
|
||||
|
||||
# Addition of regeneration labels
|
||||
args.regen_label_names = ('Regen_off', 'Regen_all',
|
||||
|
@ -1427,17 +1506,20 @@ def main():
|
|||
# Start main loop
|
||||
while True:
|
||||
start_time = time.time()
|
||||
# sync(api)
|
||||
|
||||
# Evaluate projects, sections, and tasks
|
||||
overview_task_ids, overview_task_labels, overview_updated_ids = autodoist_magic(
|
||||
overview_task_ids, overview_task_labels = autodoist_magic(
|
||||
args, api, connection)
|
||||
|
||||
# Commit next action label changes
|
||||
if args.label is not None:
|
||||
updated_ids = update_labels(api, overview_task_ids,
|
||||
overview_task_labels)
|
||||
num_changes = len(updated_ids)+len(overview_updated_ids)
|
||||
api = commit_labels_update(api, overview_task_ids,
|
||||
overview_task_labels)
|
||||
|
||||
# Sync all queued up changes
|
||||
sync(api)
|
||||
|
||||
num_changes = len(api.queue)+len(api.overview_updated_ids)
|
||||
|
||||
if num_changes:
|
||||
if num_changes == 1:
|
||||
|
|
Loading…
Reference in New Issue