-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathtmx_cli.py
More file actions
3009 lines (2448 loc) · 109 KB
/
tmx_cli.py
File metadata and controls
3009 lines (2448 loc) · 109 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
"""Thermomix/Cookidoo CLI - Wochenplan Management.
Rein Python, keine externen Dependencies (nur stdlib).
Nutzung:
python3 tmx_cli.py login # Einloggen
python3 tmx_cli.py plan show # Wochenplan anzeigen
python3 tmx_cli.py plan sync --since DATE # Sync von Cookidoo
python3 tmx_cli.py today # Heutige Rezepte
python3 tmx_cli.py search "Linsen" # Suche
"""
import argparse
import datetime as dt
import getpass
import json
import re
import ssl
import sys
import urllib.request
import urllib.error
import urllib.parse
from http.cookiejar import CookieJar
from pathlib import Path
from typing import Optional
# ─────────────────────────────────────────────────────────────────────────────
# Config & Paths
# ─────────────────────────────────────────────────────────────────────────────
SCRIPT_DIR = Path(__file__).parent
WEEKPLAN_JSON = SCRIPT_DIR / "cookidoo_weekplan_raw.json"
COOKIES_FILE = SCRIPT_DIR / "cookidoo_cookies.json"
CONFIG_FILE = Path.home() / ".tmx_config.json"
COOKIDOO_BASE = "https://cookidoo.de"
LOCALE = "de-DE"
# Algolia Search
ALGOLIA_APP_ID = "3TA8NT85XJ"
ALGOLIA_INDEX = "recipes-production-de"
SEARCH_TOKEN_FILE = SCRIPT_DIR / "cookidoo_search_token.json"
# Recipe Categories (ID -> German name) - Hardcoded fallback
CATEGORIES_FALLBACK = {
"vorspeisen": "VrkNavCategory-RPF-001",
"suppen": "VrkNavCategory-RPF-002",
"pasta": "VrkNavCategory-RPF-003",
"fleisch": "VrkNavCategory-RPF-004",
"fisch": "VrkNavCategory-RPF-005",
"vegetarisch": "VrkNavCategory-RPF-006",
"beilagen": "VrkNavCategory-RPF-008",
"desserts": "VrkNavCategory-RPF-011",
"herzhaft-backen": "VrkNavCategory-RPF-012",
"kuchen": "VrkNavCategory-RPF-013",
"brot": "VrkNavCategory-RPF-014",
"getraenke": "VrkNavCategory-RPF-015",
"grundrezepte": "VrkNavCategory-RPF-016",
"saucen": "VrkNavCategory-RPF-018",
"snacks": "VrkNavCategory-RPF-020",
}
CATEGORIES_CACHE_FILE = SCRIPT_DIR / "cookidoo_categories.json"
def load_categories() -> tuple[dict[str, str], bool]:
"""
Load categories from cache file or fallback to hardcoded.
Returns (categories_dict, from_cache).
"""
if CATEGORIES_CACHE_FILE.exists():
try:
with open(CATEGORIES_CACHE_FILE, "r", encoding="utf-8") as f:
data = json.load(f)
categories = data.get("categories", {})
if categories:
return categories, True
except (json.JSONDecodeError, KeyError):
pass
return CATEGORIES_FALLBACK, False
def get_category_facets(api_key: str) -> list[str]:
"""Get all category IDs from Algolia facets."""
url = f"https://{ALGOLIA_APP_ID}-dsn.algolia.net/1/indexes/{ALGOLIA_INDEX}/query"
search_params = {
"query": "",
"hitsPerPage": 0,
"facets": ["categories.id"],
}
query_data = json.dumps(search_params).encode("utf-8")
headers = {
"X-Algolia-Application-Id": ALGOLIA_APP_ID,
"X-Algolia-API-Key": api_key,
"Content-Type": "application/json",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=query_data, headers=headers, method="POST")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
data = json.loads(resp.read().decode())
except Exception as e:
print(f" ❌ Facets-Abfrage fehlgeschlagen: {e}")
return []
facets = data.get("facets", {}).get("categories.id", {})
return list(facets.keys())
def search_one_recipe_by_category(api_key: str, category_id: str) -> Optional[str]:
"""Search for one recipe in a category, return recipe ID."""
url = f"https://{ALGOLIA_APP_ID}-dsn.algolia.net/1/indexes/{ALGOLIA_INDEX}/query"
search_params = {
"query": "",
"hitsPerPage": 1,
"filters": f"categories.id:{category_id}",
}
query_data = json.dumps(search_params).encode("utf-8")
headers = {
"X-Algolia-Application-Id": ALGOLIA_APP_ID,
"X-Algolia-API-Key": api_key,
"Content-Type": "application/json",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=query_data, headers=headers, method="POST")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
data = json.loads(resp.read().decode())
except Exception:
return None
hits = data.get("hits", [])
if hits:
return hits[0].get("id")
return None
def extract_category_name(recipe_data: dict, category_id: str) -> Optional[str]:
"""Extract category name from recipe data by matching category ID."""
categories = recipe_data.get("categories", [])
for cat in categories:
if cat.get("id") == category_id:
return cat.get("title")
return None
def sync_categories(progress_callback=None) -> tuple[dict[str, str], list[str]]:
"""
Sync categories from Cookidoo by:
1. Getting all category IDs from Algolia facets
2. For each category: search 1 recipe, get details, extract category name
3. Save mapping to JSON file
Returns (categories_dict, errors_list).
"""
cookies = load_cookies()
if not is_authenticated(cookies):
return {}, ["Nicht eingeloggt"]
api_key = get_search_token(cookies)
if not api_key:
return {}, ["Konnte Such-Token nicht abrufen"]
# Get all category IDs
if progress_callback:
progress_callback("Hole Kategorie-IDs aus Algolia...")
category_ids = get_category_facets(api_key)
if not category_ids:
return {}, ["Keine Kategorien gefunden"]
if progress_callback:
progress_callback(f"Gefunden: {len(category_ids)} Kategorien")
categories = {}
errors = []
for i, cat_id in enumerate(category_ids, 1):
if progress_callback:
progress_callback(f"[{i}/{len(category_ids)}] {cat_id}...")
# Search for one recipe in this category
recipe_id = search_one_recipe_by_category(api_key, cat_id)
if not recipe_id:
errors.append(f"{cat_id}: Kein Rezept gefunden")
continue
# Get recipe details
recipe_data = get_recipe_details(recipe_id)
if not recipe_data or "error" in recipe_data:
errors.append(f"{cat_id}: Rezeptdetails nicht abrufbar")
continue
# Extract category name
cat_name = extract_category_name(recipe_data, cat_id)
if not cat_name:
errors.append(f"{cat_id}: Kategorie-Name nicht gefunden")
continue
# Create URL-friendly key
cat_key = cat_name.lower().replace(" ", "-").replace("ä", "ae").replace("ö", "oe").replace("ü", "ue").replace("ß", "ss")
cat_key = re.sub(r'[^a-z0-9-]', '', cat_key)
categories[cat_key] = cat_id
if progress_callback:
progress_callback(f" → {cat_name} ({cat_key})")
# Save to cache file
if categories:
cache_data = {
"timestamp": dt.datetime.now(dt.timezone.utc).isoformat(),
"categories": categories,
}
with open(CATEGORIES_CACHE_FILE, "w", encoding="utf-8") as f:
json.dump(cache_data, f, ensure_ascii=False, indent=2)
return categories, errors
# Alias for backward compatibility
CATEGORIES, _ = load_categories()
CATEGORY_NAMES = {v: k for k, v in CATEGORIES.items()} # Reverse lookup
# ─────────────────────────────────────────────────────────────────────────────
# User Config Management
# ─────────────────────────────────────────────────────────────────────────────
def load_config() -> dict:
"""Load user config from ~/.tmx_config.json or return empty dict."""
if not CONFIG_FILE.exists():
return {}
try:
with open(CONFIG_FILE, "r", encoding="utf-8") as f:
return json.load(f)
except (json.JSONDecodeError, IOError):
return {}
def save_config(config: dict):
"""Save user config to ~/.tmx_config.json."""
with open(CONFIG_FILE, "w", encoding="utf-8") as f:
json.dump(config, f, ensure_ascii=False, indent=2)
# ─────────────────────────────────────────────────────────────────────────────
# Cookie Management
# ─────────────────────────────────────────────────────────────────────────────
def load_cookies() -> dict[str, str]:
"""Load cookies from JSON file (Puppeteer format)."""
if not COOKIES_FILE.exists():
return {}
with open(COOKIES_FILE, "r", encoding="utf-8") as f:
cookies_raw = json.load(f)
# Puppeteer format: list of {name, value, domain, ...}
cookies = {}
for c in cookies_raw:
name = c.get("name")
value = c.get("value")
if name and value:
cookies[name] = value
return cookies
def format_cookie_header(cookies: dict[str, str]) -> str:
"""Format cookies as HTTP Cookie header."""
return "; ".join(f"{k}={v}" for k, v in cookies.items())
def is_authenticated(cookies: dict[str, str]) -> bool:
"""Check if we have auth cookies."""
return "v-authenticated" in cookies or "_oauth2_proxy" in cookies
def save_cookies_from_jar(jar: CookieJar):
"""Save cookies from CookieJar to JSON file (Puppeteer-compatible format)."""
cookies_list = []
for cookie in jar:
cookies_list.append({
"name": cookie.name,
"value": cookie.value,
"domain": cookie.domain,
"path": cookie.path,
"expires": cookie.expires or -1,
"httpOnly": cookie.has_nonstandard_attr("HttpOnly"),
"secure": cookie.secure,
"session": cookie.expires is None,
})
with open(COOKIES_FILE, "w", encoding="utf-8") as f:
json.dump(cookies_list, f, ensure_ascii=False, indent=2)
return cookies_list
# ─────────────────────────────────────────────────────────────────────────────
# Login Flow (Vorwerk/Cidaas OAuth)
# ─────────────────────────────────────────────────────────────────────────────
class NoRedirectHandler(urllib.request.HTTPRedirectHandler):
"""Handler that captures redirects instead of following them."""
def redirect_request(self, req, fp, code, msg, headers, newurl):
return None # Don't follow redirects
def do_login(email: str, password: str) -> tuple[bool, str]:
"""
Perform Cookidoo login via Vorwerk/Cidaas OAuth.
Returns (success, message).
"""
ctx = ssl.create_default_context()
jar = CookieJar()
# Opener that follows redirects and stores cookies
opener = urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(jar),
urllib.request.HTTPSHandler(context=ctx),
)
headers_base = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
}
# Step 1: Start OAuth flow to get requestId
print(" → Starte OAuth-Flow...")
oauth_url = f"{COOKIDOO_BASE}/oauth2/start?market=de&ui_locales={LOCALE}&rd=/planning/{LOCALE}/my-week"
req = urllib.request.Request(oauth_url, headers=headers_base)
try:
resp = opener.open(req, timeout=30)
login_html = resp.read().decode("utf-8", errors="replace")
login_url = resp.geturl()
except urllib.error.HTTPError as e:
return False, f"OAuth-Start fehlgeschlagen: HTTP {e.code}"
except Exception as e:
return False, f"OAuth-Start fehlgeschlagen: {e}"
# Extract requestId from the login page
request_id_match = re.search(r'name="requestId"\s+value="([^"]+)"', login_html)
if not request_id_match:
request_id_match = re.search(r'requestId=([^&"]+)', login_url)
if not request_id_match:
return False, "Konnte requestId nicht finden"
request_id = request_id_match.group(1)
# Step 2: Submit login form
print(" → Sende Anmeldedaten...")
login_post_url = "https://ciam.prod.cookidoo.vorwerk-digital.com/login-srv/login"
login_data = urllib.parse.urlencode({
"requestId": request_id,
"username": email,
"password": password,
}).encode("utf-8")
login_headers = {
**headers_base,
"Content-Type": "application/x-www-form-urlencoded",
"Origin": "https://eu.login.vorwerk.com",
"Referer": login_url,
}
req = urllib.request.Request(login_post_url, data=login_data, headers=login_headers)
try:
resp = opener.open(req, timeout=30)
result_html = resp.read().decode("utf-8", errors="replace")
final_url = resp.geturl()
except urllib.error.HTTPError as e:
if e.code in (302, 303, 307):
# Redirect is expected - follow it manually
final_url = e.headers.get("Location", "")
result_html = ""
else:
return False, f"Login fehlgeschlagen: HTTP {e.code}"
except Exception as e:
return False, f"Login fehlgeschlagen: {e}"
# Step 3: Follow redirect chain back to Cookidoo
print(" → Folge Redirects...")
max_redirects = 10
redirect_count = 0
while redirect_count < max_redirects:
# Check if we're back at Cookidoo with auth
if "cookidoo.de" in final_url and "oauth2/start" not in final_url:
# Try to access the final URL
req = urllib.request.Request(final_url, headers=headers_base)
try:
resp = opener.open(req, timeout=30)
result_html = resp.read().decode("utf-8", errors="replace")
final_url = resp.geturl()
# Check if we're authenticated
if "is-authenticated" in result_html or "my-week" in final_url:
break
except:
pass
# Look for redirect in response
redirect_match = re.search(r'location\.href\s*=\s*["\']([^"\']+)["\']', result_html)
if not redirect_match:
redirect_match = re.search(r'<meta[^>]+http-equiv="refresh"[^>]+url=([^"\'>\s]+)', result_html, re.I)
if redirect_match:
next_url = redirect_match.group(1)
if not next_url.startswith("http"):
# Relative URL
from urllib.parse import urljoin
next_url = urljoin(final_url, next_url)
req = urllib.request.Request(next_url, headers=headers_base)
try:
resp = opener.open(req, timeout=30)
result_html = resp.read().decode("utf-8", errors="replace")
final_url = resp.geturl()
except urllib.error.HTTPError as e:
if e.code in (302, 303, 307):
final_url = e.headers.get("Location", "")
else:
break
except:
break
else:
break
redirect_count += 1
# Step 4: Verify we got auth cookies
auth_cookies = {c.name: c.value for c in jar if "cookidoo" in c.domain}
if "v-authenticated" in auth_cookies or "_oauth2_proxy" in auth_cookies:
# Save cookies
save_cookies_from_jar(jar)
cookie_count = len([c for c in jar])
return True, f"Login erfolgreich! {cookie_count} Cookies gespeichert."
# Check for login errors
if "falsches Passwort" in result_html.lower() or "incorrect" in result_html.lower():
return False, "Falsches Passwort"
if "nicht gefunden" in result_html.lower() or "not found" in result_html.lower():
return False, "E-Mail-Adresse nicht gefunden"
return False, "Login fehlgeschlagen - keine Auth-Cookies erhalten"
# ─────────────────────────────────────────────────────────────────────────────
# HTTP Client
# ─────────────────────────────────────────────────────────────────────────────
def fetch(url: str, cookies: dict[str, str]) -> tuple[int, str]:
"""Fetch URL with cookies, return (status, body)."""
ctx = ssl.create_default_context()
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
}
if cookies:
headers["Cookie"] = format_cookie_header(cookies)
req = urllib.request.Request(url, headers=headers)
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
body = resp.read().decode("utf-8", errors="replace")
return resp.status, body
except urllib.error.HTTPError as e:
return e.code, ""
except Exception as e:
print(f"HTTP Error: {e}")
return 0, ""
# ─────────────────────────────────────────────────────────────────────────────
# HTML Parser for Cookidoo Calendar (Regex-based)
# ─────────────────────────────────────────────────────────────────────────────
def parse_weekplan_html(html: str) -> list[dict]:
"""Parse calendar/week HTML and extract days with recipes using regex."""
days = []
# Split by day blocks: <li class="my-week__day ...">
day_pattern = re.compile(
r'<li\s+class="my-week__day([^"]*)"[^>]*>.*?</li>',
re.DOTALL
)
# Alternative: split by plan-week-day elements
day_blocks = re.findall(
r'<plan-week-day[^>]*date="([^"]+)"[^>]*>(.*?)</plan-week-day>',
html,
re.DOTALL
)
for date, block in day_blocks:
# Extract day name and number
day_short_match = re.search(r'class="my-week__day-short">([^<]+)<', block)
day_num_match = re.search(r'class="my-week__day-number">([^<]+)<', block)
is_today = 'my-week__today' in block or '>Heute<' in block
day_name = day_short_match.group(1).strip() if day_short_match else ""
day_number = day_num_match.group(1).strip() if day_num_match else ""
# Extract recipes from this day
recipes = []
recipe_blocks = re.findall(
r'<core-tile\s+data-recipe-id="([^"]+)"[^>]*>(.*?)</core-tile>',
block,
re.DOTALL
)
for recipe_id, recipe_block in recipe_blocks:
# Title
title_match = re.search(
r'class="core-tile__description-text">([^<]+)<',
recipe_block
)
title = title_match.group(1).strip() if title_match else None
# Image
img_match = re.search(
r'<img[^>]+src="(https://assets\.tmecosys[^"]+)"',
recipe_block
)
image = img_match.group(1) if img_match else None
if title:
recipes.append({
"id": recipe_id,
"title": title,
"url": f"{COOKIDOO_BASE}/recipes/recipe/{LOCALE}/{recipe_id}",
"image": image,
})
days.append({
"date": date,
"dayName": day_name,
"dayNumber": day_number,
"isToday": is_today,
"recipes": recipes,
})
return days
# ─────────────────────────────────────────────────────────────────────────────
# Cookidoo API
# ─────────────────────────────────────────────────────────────────────────────
def fetch_week(cookies: dict, date: str, today: str) -> list[dict]:
"""Fetch one week of recipes starting from date."""
url = f"{COOKIDOO_BASE}/planning/{LOCALE}/calendar/week?date={date}&today={today}"
status, html = fetch(url, cookies)
if status != 200:
print(f" ⚠ Fehler beim Laden der Woche {date}: HTTP {status}")
return []
# Check for redirect to login
if "oauth2/start" in html or "login" in html.lower()[:500]:
return []
return parse_weekplan_html(html)
def sync_weekplan(since: str, days_count: int = 14) -> dict:
"""Sync weekplan from Cookidoo, fetching specified number of days."""
cookies = load_cookies()
if not is_authenticated(cookies):
return {"error": "Keine gültigen Cookies. Bitte zuerst einloggen."}
today = dt.date.today().isoformat()
all_days = []
seen_dates = set()
# Parse since date
try:
start_date = dt.date.fromisoformat(since)
except ValueError:
start_date = dt.date.today()
# Calculate end date
end_date = start_date + dt.timedelta(days=days_count)
# Calculate weeks needed (each API call returns ~7 days)
weeks_needed = (days_count // 7) + 2 # +2 for safety margin
# Fetch multiple weeks
for week_offset in range(weeks_needed):
week_start = start_date + dt.timedelta(weeks=week_offset)
# Stop if we're past our target range
if week_start > end_date:
break
week_date = week_start.isoformat()
print(f" → Lade Woche ab {week_date}...")
days = fetch_week(cookies, week_date, today)
if not days:
if week_offset == 0:
return {"error": "Session abgelaufen oder keine Daten. Bitte neu einloggen."}
break
for day in days:
date = day.get("date")
if date and date not in seen_dates:
day_date = dt.date.fromisoformat(date)
# Only include days within our range
if start_date <= day_date < end_date:
seen_dates.add(date)
day["isToday"] = (date == today)
all_days.append(day)
# Sort by date
all_days.sort(key=lambda d: d.get("date", ""))
return {
"timestamp": dt.datetime.now(dt.timezone.utc).isoformat(),
"sinceDate": since,
"weekplan": {"days": all_days},
}
# ─────────────────────────────────────────────────────────────────────────────
# Cookidoo Recipe Search (Algolia)
# ─────────────────────────────────────────────────────────────────────────────
def get_search_token(cookies: dict[str, str]) -> Optional[str]:
"""Get Algolia search token from Cookidoo API."""
# Check cached token
if SEARCH_TOKEN_FILE.exists():
try:
with open(SEARCH_TOKEN_FILE, "r") as f:
cached = json.load(f)
# Check if still valid (with 5 min buffer)
if cached.get("validUntil", 0) > dt.datetime.now().timestamp() + 300:
return cached.get("apiKey")
except:
pass
# Fetch new token
url = f"{COOKIDOO_BASE}/search/api/subscription/token"
status, body = fetch(url, cookies)
if status != 200:
return None
try:
data = json.loads(body)
# Cache token
with open(SEARCH_TOKEN_FILE, "w") as f:
json.dump(data, f)
return data.get("apiKey")
except:
return None
def search_recipes(
query: str,
limit: int = 10,
max_time: Optional[int] = None, # max time in minutes
difficulty: Optional[str] = None, # easy, medium, advanced
tm_version: Optional[str] = None, # TM5, TM6, TM7
category: Optional[str] = None, # category key from CATEGORIES
) -> tuple[list[dict], int]:
"""
Search Cookidoo recipes via Algolia.
Returns (results, total_count).
"""
cookies = load_cookies()
if not is_authenticated(cookies):
return [], 0
api_key = get_search_token(cookies)
if not api_key:
return [], 0
# Algolia search API
url = f"https://{ALGOLIA_APP_ID}-dsn.algolia.net/1/indexes/{ALGOLIA_INDEX}/query"
# Build filters
filters = []
if max_time:
# Convert minutes to seconds for Algolia
filters.append(f"totalTime <= {max_time * 60}")
if difficulty:
filters.append(f"difficulty:{difficulty}")
if tm_version:
filters.append(f"tmversion:{tm_version}")
if category:
cat_id = CATEGORIES.get(category.lower())
if cat_id:
filters.append(f"categories.id:{cat_id}")
search_params = {
"query": query,
"hitsPerPage": limit,
}
if filters:
search_params["filters"] = " AND ".join(filters)
query_data = json.dumps(search_params).encode("utf-8")
headers = {
"X-Algolia-Application-Id": ALGOLIA_APP_ID,
"X-Algolia-API-Key": api_key,
"Content-Type": "application/json",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=query_data, headers=headers, method="POST")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
data = json.loads(resp.read().decode())
except Exception as e:
print(f"Suche fehlgeschlagen: {e}")
return [], 0
results = []
for hit in data.get("hits", []):
recipe_id = hit.get("id", "")
results.append({
"id": recipe_id,
"title": hit.get("title", "Unbekannt"),
"url": f"{COOKIDOO_BASE}/recipes/recipe/{LOCALE}/{recipe_id}",
"image": hit.get("image"),
"totalTime": hit.get("totalTime"), # in seconds
"rating": hit.get("rating"),
"description": hit.get("description"),
})
return results, data.get("nbHits", 0)
def format_time(seconds: Optional[int]) -> str:
"""Format time in seconds to human-readable string."""
if not seconds:
return ""
minutes = seconds // 60
if minutes < 60:
return f"{minutes} Min"
hours = minutes // 60
mins = minutes % 60
if mins:
return f"{hours}h {mins}min"
return f"{hours}h"
def seconds_to_minutes(seconds: Optional[int]) -> Optional[int]:
"""Convert seconds to minutes, return None if input is None/0."""
if not seconds:
return None
return seconds // 60
# ─────────────────────────────────────────────────────────────────────────────
# Plan CRUD Operations
# ─────────────────────────────────────────────────────────────────────────────
def add_recipe_to_plan(recipe_id: str, date: str) -> tuple[bool, str]:
"""Add a recipe to the plan on a specific date."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = f"{COOKIDOO_BASE}/planning/{LOCALE}/api/my-day"
data = json.dumps({
"recipeSource": "VORWERK",
"recipeIds": [recipe_id],
"dayKey": date,
}).encode("utf-8")
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Cookie": format_cookie_header(cookies),
"Content-Type": "application/json",
"Accept": "application/json",
"Origin": COOKIDOO_BASE,
"Referer": f"{COOKIDOO_BASE}/planning/{LOCALE}/my-week",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=data, headers=headers, method="PUT")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
result = json.loads(resp.read().decode())
return True, result.get("message", "Rezept hinzugefügt")
except urllib.error.HTTPError as e:
if e.code in (200, 201, 204):
return True, "Rezept hinzugefügt"
return False, f"HTTP {e.code}"
except Exception as e:
return False, str(e)
def remove_recipe_from_plan(recipe_id: str, date: str) -> tuple[bool, str]:
"""Remove a recipe from the plan on a specific date."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = f"{COOKIDOO_BASE}/planning/{LOCALE}/api/my-day/{date}/recipes/{recipe_id}?recipeSource=VORWERK"
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Cookie": format_cookie_header(cookies),
"Accept": "application/json",
"Origin": COOKIDOO_BASE,
"Referer": f"{COOKIDOO_BASE}/planning/{LOCALE}/my-week",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, headers=headers, method="DELETE")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
result = json.loads(resp.read().decode())
return True, result.get("message", "Rezept entfernt")
except urllib.error.HTTPError as e:
if e.code in (200, 204):
return True, "Rezept entfernt"
return False, f"HTTP {e.code}"
except Exception as e:
return False, str(e)
def move_recipe_in_plan(recipe_id: str, from_date: str, to_date: str) -> tuple[bool, str]:
"""Move a recipe from one date to another."""
# Remove from old date
success, msg = remove_recipe_from_plan(recipe_id, from_date)
if not success:
return False, f"Entfernen fehlgeschlagen: {msg}"
# Add to new date
success, msg = add_recipe_to_plan(recipe_id, to_date)
if not success:
return False, f"Hinzufügen fehlgeschlagen: {msg}"
return True, "Rezept verschoben"
# ─────────────────────────────────────────────────────────────────────────────
# Shopping List
# ─────────────────────────────────────────────────────────────────────────────
def get_shopping_list() -> Optional[dict]:
"""Get the current shopping list from Cookidoo."""
cookies = load_cookies()
if not is_authenticated(cookies):
return None
url = f"{COOKIDOO_BASE}/shopping/{LOCALE}"
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Cookie": format_cookie_header(cookies),
"Accept": "application/json",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, headers=headers)
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
return json.loads(resp.read().decode())
except:
return None
def add_recipes_to_shopping_list(recipe_ids: list[str]) -> tuple[bool, str]:
"""Add recipes to the shopping list."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = f"{COOKIDOO_BASE}/shopping/{LOCALE}/add-recipes"
data = json.dumps({"recipeIDs": recipe_ids}).encode("utf-8")
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Cookie": format_cookie_header(cookies),
"Content-Type": "application/json",
"Accept": "application/json",
"Origin": COOKIDOO_BASE,
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=data, headers=headers, method="POST")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
result = json.loads(resp.read().decode())
return True, result.get("message", f"{len(recipe_ids)} Rezept(e) hinzugefügt")
except urllib.error.HTTPError as e:
return False, f"HTTP {e.code}"
except Exception as e:
return False, str(e)
def remove_recipe_from_shopping_list(recipe_id: str) -> tuple[bool, str]:
"""Remove a recipe from the shopping list."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = f"{COOKIDOO_BASE}/shopping/{LOCALE}/recipe/{recipe_id}/remove"
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36",
"Cookie": format_cookie_header(cookies),
"Content-Type": "application/json",
"Accept": "application/json",
}
ctx = ssl.create_default_context()
req = urllib.request.Request(url, data=b"{}", headers=headers, method="DELETE")
try:
with urllib.request.urlopen(req, context=ctx, timeout=30) as resp:
result = json.loads(resp.read().decode())
return True, result.get("message", "Rezept entfernt")
except urllib.error.HTTPError as e:
return False, f"HTTP {e.code}"
except Exception as e:
return False, str(e)
def clear_shopping_list() -> tuple[bool, str]:
"""Clear the entire shopping list (recipes and additional items)."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = "https://cookidoo.de/shopping/de-DE"
headers = {
"Cookie": format_cookie_header(cookies),
"Accept": "application/json",
}
try:
req = urllib.request.Request(url, method="DELETE", headers=headers)
ctx = ssl.create_default_context()
with urllib.request.urlopen(req, context=ctx) as resp:
if resp.status == 200:
return True, "Einkaufsliste geleert"
return False, f"Unerwarteter Status: {resp.status}"
except urllib.error.HTTPError as e:
return False, f"HTTP-Fehler: {e.code}"
except Exception as e:
return False, str(e)
def add_custom_item_to_shopping_list(item_name: str) -> tuple[bool, str]:
"""Add a custom item (not from a recipe) to the shopping list."""
cookies = load_cookies()
if not is_authenticated(cookies):
return False, "Nicht eingeloggt"
url = "https://cookidoo.de/shopping/de-DE/additional-item"
headers = {
"Cookie": format_cookie_header(cookies),
"Accept": "application/json",
"Content-Type": "application/json",
}
payload = json.dumps({"itemValue": item_name}).encode("utf-8")
try:
req = urllib.request.Request(url, data=payload, method="POST", headers=headers)
ctx = ssl.create_default_context()