diff --git a/db/migrations/versions/20260101_120000_add_task_and_calendar.py b/db/migrations/versions/20260101_120000_add_task_and_calendar.py
new file mode 100644
index 0000000..3f054b1
--- /dev/null
+++ b/db/migrations/versions/20260101_120000_add_task_and_calendar.py
@@ -0,0 +1,170 @@
+"""Add task, calendar_event, and calendar_accounts tables
+
+Revision ID: g3b4c5d6e7f8
+Revises: add_exclude_folder_ids
+Create Date: 2026-01-01 12:00:00.000000
+
+"""
+
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import postgresql
+
+
+# revision identifiers, used by Alembic.
+revision: str = "g3b4c5d6e7f8"
+down_revision: Union[str, None] = "add_exclude_folder_ids"
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # Create calendar_accounts table (source for syncing)
+ op.create_table(
+ "calendar_accounts",
+ sa.Column("id", sa.BigInteger(), nullable=False),
+ sa.Column("name", sa.Text(), nullable=False),
+ sa.Column("calendar_type", sa.Text(), nullable=False),
+ sa.Column("caldav_url", sa.Text(), nullable=True),
+ sa.Column("caldav_username", sa.Text(), nullable=True),
+ sa.Column("caldav_password", sa.Text(), nullable=True),
+ sa.Column("google_account_id", sa.BigInteger(), nullable=True),
+ sa.Column(
+ "calendar_ids",
+ postgresql.ARRAY(sa.Text()),
+ server_default="{}",
+ nullable=False,
+ ),
+ sa.Column(
+ "tags",
+ postgresql.ARRAY(sa.Text()),
+ server_default="{}",
+ nullable=False,
+ ),
+ sa.Column("check_interval", sa.Integer(), server_default="15", nullable=False),
+ sa.Column("sync_past_days", sa.Integer(), server_default="30", nullable=False),
+ sa.Column("sync_future_days", sa.Integer(), server_default="90", nullable=False),
+ sa.Column("last_sync_at", sa.DateTime(timezone=True), nullable=True),
+ sa.Column("sync_error", sa.Text(), nullable=True),
+ sa.Column("active", sa.Boolean(), server_default="true", nullable=False),
+ sa.Column(
+ "created_at",
+ sa.DateTime(timezone=True),
+ server_default=sa.func.now(),
+ nullable=False,
+ ),
+ sa.Column(
+ "updated_at",
+ sa.DateTime(timezone=True),
+ server_default=sa.func.now(),
+ nullable=False,
+ ),
+ sa.ForeignKeyConstraint(
+ ["google_account_id"], ["google_accounts.id"], ondelete="SET NULL"
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ sa.CheckConstraint("calendar_type IN ('caldav', 'google')"),
+ )
+ op.create_index(
+ "calendar_accounts_active_idx",
+ "calendar_accounts",
+ ["active", "last_sync_at"],
+ unique=False,
+ )
+ op.create_index(
+ "calendar_accounts_type_idx",
+ "calendar_accounts",
+ ["calendar_type"],
+ unique=False,
+ )
+
+ # Create task table
+ op.create_table(
+ "task",
+ sa.Column("id", sa.BigInteger(), nullable=False),
+ sa.Column("task_title", sa.Text(), nullable=False),
+ sa.Column("due_date", sa.DateTime(timezone=True), nullable=True),
+ sa.Column("priority", sa.Text(), nullable=True),
+ sa.Column("status", sa.Text(), server_default="pending", nullable=False),
+ sa.Column("recurrence", sa.Text(), nullable=True),
+ sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True),
+ sa.Column("source_item_id", sa.BigInteger(), nullable=True),
+ sa.ForeignKeyConstraint(["id"], ["source_item.id"], ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(
+ ["source_item_id"], ["source_item.id"], ondelete="SET NULL"
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ sa.CheckConstraint(
+ "status IN ('pending', 'in_progress', 'done', 'cancelled')",
+ name="task_status_check",
+ ),
+ sa.CheckConstraint(
+ "priority IS NULL OR priority IN ('low', 'medium', 'high', 'urgent')",
+ name="task_priority_check",
+ ),
+ )
+ op.create_index("task_due_date_idx", "task", ["due_date"], unique=False)
+ op.create_index("task_status_idx", "task", ["status"], unique=False)
+ op.create_index("task_priority_idx", "task", ["priority"], unique=False)
+ op.create_index("task_source_item_idx", "task", ["source_item_id"], unique=False)
+
+ # Create calendar_event table
+ op.create_table(
+ "calendar_event",
+ sa.Column("id", sa.BigInteger(), nullable=False),
+ sa.Column("event_title", sa.Text(), nullable=False),
+ sa.Column("start_time", sa.DateTime(timezone=True), nullable=False),
+ sa.Column("end_time", sa.DateTime(timezone=True), nullable=True),
+ sa.Column("all_day", sa.Boolean(), server_default="false", nullable=False),
+ sa.Column("location", sa.Text(), nullable=True),
+ sa.Column("recurrence_rule", sa.Text(), nullable=True),
+ sa.Column("calendar_account_id", sa.BigInteger(), nullable=True),
+ sa.Column("calendar_name", sa.Text(), nullable=True),
+ sa.Column("external_id", sa.Text(), nullable=True),
+ sa.Column(
+ "event_metadata",
+ postgresql.JSONB(astext_type=sa.Text()),
+ server_default="{}",
+ nullable=True,
+ ),
+ sa.ForeignKeyConstraint(["id"], ["source_item.id"], ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(
+ ["calendar_account_id"], ["calendar_accounts.id"], ondelete="SET NULL"
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index("calendar_event_start_idx", "calendar_event", ["start_time"], unique=False)
+ op.create_index("calendar_event_end_idx", "calendar_event", ["end_time"], unique=False)
+ op.create_index("calendar_event_account_idx", "calendar_event", ["calendar_account_id"], unique=False)
+ op.create_index("calendar_event_calendar_idx", "calendar_event", ["calendar_name"], unique=False)
+ op.create_index(
+ "calendar_event_external_idx",
+ "calendar_event",
+ ["calendar_account_id", "external_id"],
+ unique=True,
+ postgresql_where=sa.text("external_id IS NOT NULL"),
+ )
+
+
+def downgrade() -> None:
+ # Drop calendar_event
+ op.drop_index("calendar_event_external_idx", table_name="calendar_event")
+ op.drop_index("calendar_event_calendar_idx", table_name="calendar_event")
+ op.drop_index("calendar_event_account_idx", table_name="calendar_event")
+ op.drop_index("calendar_event_end_idx", table_name="calendar_event")
+ op.drop_index("calendar_event_start_idx", table_name="calendar_event")
+ op.drop_table("calendar_event")
+
+ # Drop task
+ op.drop_index("task_source_item_idx", table_name="task")
+ op.drop_index("task_priority_idx", table_name="task")
+ op.drop_index("task_status_idx", table_name="task")
+ op.drop_index("task_due_date_idx", table_name="task")
+ op.drop_table("task")
+
+ # Drop calendar_accounts
+ op.drop_index("calendar_accounts_type_idx", table_name="calendar_accounts")
+ op.drop_index("calendar_accounts_active_idx", table_name="calendar_accounts")
+ op.drop_table("calendar_accounts")
diff --git a/docker-compose.yaml b/docker-compose.yaml
index 31ef6b0..0da0603 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -206,7 +206,7 @@ services:
<<: *worker-base
environment:
<<: *worker-env
- QUEUES: "backup,blogs,comic,discord,ebooks,email,forums,github,google,people,photo_embed,maintenance,notes,scheduler"
+ QUEUES: "backup,blogs,calendar,comic,discord,ebooks,email,forums,github,google,people,photo_embed,maintenance,notes,scheduler"
ingest-hub:
<<: *worker-base
diff --git a/frontend/src/App.css b/frontend/src/App.css
index 7359d3e..f7ed210 100644
--- a/frontend/src/App.css
+++ b/frontend/src/App.css
@@ -2029,4 +2029,191 @@ a.folder-item-name:hover {
background: #fffbeb;
color: #b45309;
border: 1px solid #f59e0b;
+}
+
+/* === Calendar Panel Events Section === */
+
+.calendar-account-card {
+ background: #f8fafc;
+ border: 1px solid #e2e8f0;
+ border-radius: 8px;
+ padding: 1rem;
+}
+
+.calendar-events-section {
+ margin-top: 1rem;
+ padding-top: 1rem;
+ border-top: 1px solid #e2e8f0;
+}
+
+.calendar-events-section h5 {
+ font-size: 0.9rem;
+ color: #4a5568;
+ font-weight: 500;
+ margin-bottom: 0.75rem;
+}
+
+.no-events {
+ color: #718096;
+ font-size: 0.875rem;
+ font-style: italic;
+}
+
+.calendar-groups {
+ display: flex;
+ flex-direction: column;
+ gap: 0.5rem;
+}
+
+.calendar-group {
+ background: white;
+ border: 1px solid #e2e8f0;
+ border-radius: 6px;
+ overflow: hidden;
+}
+
+.calendar-group-header {
+ display: flex;
+ align-items: center;
+ gap: 0.5rem;
+ width: 100%;
+ padding: 0.75rem 1rem;
+ background: #f8fafc;
+ border: none;
+ cursor: pointer;
+ text-align: left;
+ font-size: 0.875rem;
+ transition: background-color 0.15s;
+}
+
+.calendar-group-header:hover {
+ background: #edf2f7;
+}
+
+.calendar-group-header.expanded {
+ border-bottom: 1px solid #e2e8f0;
+}
+
+.calendar-expand-icon {
+ color: #718096;
+ font-size: 0.75rem;
+ width: 12px;
+ transition: transform 0.15s;
+}
+
+.calendar-group-name {
+ flex: 1;
+ font-weight: 500;
+ color: #2d3748;
+}
+
+.calendar-event-count {
+ background: #edf2f7;
+ color: #4a5568;
+ padding: 0.125rem 0.5rem;
+ border-radius: 10px;
+ font-size: 0.75rem;
+ font-weight: 500;
+}
+
+.calendar-events-list {
+ max-height: 300px;
+ overflow-y: auto;
+}
+
+.calendar-event-item {
+ display: flex;
+ gap: 1rem;
+ padding: 0.625rem 1rem;
+ border-bottom: 1px solid #f0f4f8;
+ transition: background-color 0.15s;
+}
+
+.calendar-event-item:last-child {
+ border-bottom: none;
+}
+
+.calendar-event-item:hover {
+ background: #f8fafc;
+}
+
+.calendar-event-item.all-day {
+ background: #f0f9ff;
+}
+
+.calendar-event-item.all-day:hover {
+ background: #e0f2fe;
+}
+
+.event-date-col {
+ display: flex;
+ flex-direction: column;
+ gap: 0.125rem;
+ min-width: 100px;
+ flex-shrink: 0;
+}
+
+.event-date {
+ font-size: 0.8rem;
+ font-weight: 500;
+ color: #4a5568;
+}
+
+.event-time {
+ font-size: 0.75rem;
+ color: #718096;
+}
+
+.event-time.all-day-badge {
+ color: #3182ce;
+ font-weight: 500;
+}
+
+.event-info-col {
+ display: flex;
+ flex-direction: column;
+ gap: 0.25rem;
+ flex: 1;
+ min-width: 0;
+}
+
+.event-info-col .event-title {
+ font-size: 0.875rem;
+ color: #2d3748;
+ font-weight: 500;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.event-location {
+ font-size: 0.75rem;
+ color: #718096;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ white-space: nowrap;
+}
+
+.event-recurring-badge {
+ display: inline-block;
+ background: #e9d8fd;
+ color: #6b46c1;
+ padding: 0.125rem 0.375rem;
+ border-radius: 3px;
+ font-size: 0.65rem;
+ font-weight: 500;
+ width: fit-content;
+}
+
+@media (max-width: 768px) {
+ .calendar-event-item {
+ flex-direction: column;
+ gap: 0.375rem;
+ }
+
+ .event-date-col {
+ flex-direction: row;
+ gap: 0.5rem;
+ min-width: auto;
+ }
}
\ No newline at end of file
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
index 4d9dee8..ad67054 100644
--- a/frontend/src/App.jsx
+++ b/frontend/src/App.jsx
@@ -4,7 +4,7 @@ import './App.css'
import { useAuth } from '@/hooks/useAuth'
import { useOAuth } from '@/hooks/useOAuth'
-import { Loading, LoginPrompt, AuthError, Dashboard, Search, Sources } from '@/components'
+import { Loading, LoginPrompt, AuthError, Dashboard, Search, Sources, Calendar } from '@/components'
// AuthWrapper handles redirects based on auth state
const AuthWrapper = () => {
@@ -102,6 +102,14 @@ const AuthWrapper = () => {
)
} />
+
+ ) : (
+
+ )
+ } />
+
{/* Default redirect */}
diff --git a/frontend/src/components/Dashboard.jsx b/frontend/src/components/Dashboard.jsx
index 2856859..c59a620 100644
--- a/frontend/src/components/Dashboard.jsx
+++ b/frontend/src/components/Dashboard.jsx
@@ -31,6 +31,11 @@ const Dashboard = ({ onLogout }) => {
Manage email, GitHub, RSS feeds, and Google Drive
+
+ Calendar
+ View upcoming events from your calendars
+
+
console.log(await listNotes())}>
📝 Notes
Create and manage your notes
diff --git a/frontend/src/components/calendar/Calendar.css b/frontend/src/components/calendar/Calendar.css
new file mode 100644
index 0000000..c4daa82
--- /dev/null
+++ b/frontend/src/components/calendar/Calendar.css
@@ -0,0 +1,423 @@
+.calendar-view {
+ width: 100%;
+ max-width: 1200px;
+ margin: 0 auto;
+ padding: 1.5rem;
+ min-height: calc(100vh - 3rem);
+ display: flex;
+ flex-direction: column;
+ box-sizing: border-box;
+}
+
+.calendar-header {
+ display: flex;
+ align-items: center;
+ justify-content: space-between;
+ margin-bottom: 1rem;
+ flex-shrink: 0;
+}
+
+.calendar-header h1 {
+ margin: 0;
+ font-size: 1.5rem;
+ font-weight: 500;
+ color: #333;
+}
+
+.back-btn {
+ padding: 0.5rem 1rem;
+ background: #f0f0f0;
+ color: #333;
+ text-decoration: none;
+ border-radius: 6px;
+ font-size: 0.9rem;
+ transition: background 0.2s;
+}
+
+.back-btn:hover {
+ background: #e0e0e0;
+}
+
+.calendar-nav {
+ display: flex;
+ gap: 0.5rem;
+ align-items: center;
+}
+
+.nav-btn {
+ width: 32px;
+ height: 32px;
+ border: 1px solid #ddd;
+ background: white;
+ border-radius: 6px;
+ cursor: pointer;
+ font-size: 1rem;
+ color: #555;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ transition: all 0.2s;
+}
+
+.nav-btn:hover {
+ background: #f5f5f5;
+ border-color: #ccc;
+}
+
+.today-btn {
+ padding: 0.4rem 1rem;
+ border: 1px solid #ddd;
+ background: white;
+ border-radius: 6px;
+ cursor: pointer;
+ font-size: 0.85rem;
+ color: #555;
+ transition: all 0.2s;
+}
+
+.today-btn:hover {
+ background: #f5f5f5;
+ border-color: #ccc;
+}
+
+.calendar-error {
+ background: #fee;
+ border: 1px solid #fcc;
+ border-radius: 8px;
+ padding: 1rem;
+ margin-bottom: 1rem;
+ text-align: center;
+}
+
+.calendar-error p {
+ margin: 0 0 0.5rem 0;
+ color: #c00;
+}
+
+.calendar-error button {
+ padding: 0.4rem 1rem;
+ background: #c00;
+ color: white;
+ border: none;
+ border-radius: 4px;
+ cursor: pointer;
+}
+
+.calendar-grid {
+ display: grid;
+ grid-template-columns: repeat(7, 1fr);
+ grid-template-rows: auto repeat(6, minmax(90px, 1fr));
+ width: 100%;
+ min-height: 600px;
+ border: 1px solid #e0e0e0;
+ border-radius: 8px;
+ overflow: hidden;
+ background: white;
+}
+
+.calendar-day-header {
+ padding: 0.75rem 0.5rem;
+ text-align: center;
+ font-size: 0.75rem;
+ font-weight: 600;
+ color: #888;
+ text-transform: uppercase;
+ background: #fafafa;
+ border-bottom: 1px solid #e0e0e0;
+}
+
+.calendar-cell {
+ border-right: 1px solid #f0f0f0;
+ border-bottom: 1px solid #f0f0f0;
+ padding: 0.25rem;
+ display: flex;
+ flex-direction: column;
+ overflow: hidden;
+ background: white;
+}
+
+.calendar-cell:nth-child(7n) {
+ border-right: none;
+}
+
+.calendar-cell.other-month {
+ background: #fafafa;
+}
+
+.calendar-cell.other-month .cell-date {
+ color: #bbb;
+}
+
+.calendar-cell.today {
+ background: #f0f7ff;
+}
+
+.calendar-cell.today .cell-date {
+ background: #4a90d9;
+ color: white;
+ border-radius: 50%;
+ width: 24px;
+ height: 24px;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+}
+
+.cell-date {
+ font-size: 0.85rem;
+ font-weight: 500;
+ color: #333;
+ padding: 0.25rem;
+ margin-bottom: 0.25rem;
+ flex-shrink: 0;
+}
+
+.cell-events {
+ flex: 1;
+ overflow: hidden;
+ display: flex;
+ flex-direction: column;
+ gap: 2px;
+}
+
+.event-item {
+ font-size: 0.7rem;
+ padding: 2px 4px;
+ border-radius: 3px;
+ background: #e8f0fe;
+ color: #1a73e8;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: ellipsis;
+ cursor: pointer;
+ line-height: 1.3;
+ transition: opacity 0.15s;
+}
+
+.event-item:hover {
+ opacity: 0.8;
+}
+
+.event-item.all-day {
+ background: #1a73e8;
+ color: white;
+}
+
+.event-time {
+ font-weight: 600;
+ margin-right: 3px;
+ font-size: 0.65rem;
+}
+
+.event-title {
+ overflow: hidden;
+ text-overflow: ellipsis;
+}
+
+.more-events {
+ font-size: 0.7rem;
+ color: #666;
+ padding: 2px 4px;
+ cursor: pointer;
+}
+
+.more-events:hover {
+ color: #333;
+}
+
+.loading-overlay {
+ position: fixed;
+ top: 50%;
+ left: 50%;
+ transform: translate(-50%, -50%);
+ background: rgba(255, 255, 255, 0.95);
+ padding: 1.5rem 2.5rem;
+ border-radius: 8px;
+ box-shadow: 0 4px 20px rgba(0, 0, 0, 0.15);
+ font-size: 0.95rem;
+ color: #555;
+}
+
+.calendar-footer {
+ margin-top: 1rem;
+ text-align: center;
+ flex-shrink: 0;
+}
+
+.config-link {
+ color: #666;
+ font-size: 0.85rem;
+ text-decoration: none;
+}
+
+.config-link:hover {
+ color: #333;
+ text-decoration: underline;
+}
+
+@media (max-width: 768px) {
+ .calendar-view {
+ padding: 1rem;
+ height: auto;
+ min-height: 100vh;
+ }
+
+ .calendar-header {
+ flex-wrap: wrap;
+ gap: 0.75rem;
+ }
+
+ .calendar-header h1 {
+ order: 2;
+ width: 100%;
+ text-align: center;
+ font-size: 1.25rem;
+ }
+
+ .back-btn {
+ order: 1;
+ }
+
+ .calendar-nav {
+ order: 3;
+ margin-left: auto;
+ }
+
+ .calendar-grid {
+ min-height: 500px;
+ }
+
+ .calendar-day-header {
+ padding: 0.5rem 0.25rem;
+ font-size: 0.65rem;
+ }
+
+ .cell-date {
+ font-size: 0.75rem;
+ }
+
+ .event-item {
+ font-size: 0.6rem;
+ padding: 1px 3px;
+ }
+
+ .event-time {
+ display: none;
+ }
+}
+
+/* Event Detail Modal */
+.event-modal-overlay {
+ position: fixed;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: rgba(0, 0, 0, 0.4);
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ z-index: 1000;
+ padding: 1rem;
+}
+
+.event-modal {
+ background: white;
+ border-radius: 12px;
+ width: 100%;
+ max-width: 450px;
+ max-height: 80vh;
+ overflow: auto;
+ box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2);
+}
+
+.event-modal-header {
+ display: flex;
+ justify-content: space-between;
+ align-items: flex-start;
+ padding: 1.25rem 1.5rem;
+ border-bottom: 1px solid #e0e0e0;
+ gap: 1rem;
+}
+
+.event-modal-header h2 {
+ margin: 0;
+ font-size: 1.25rem;
+ font-weight: 600;
+ color: #333;
+ line-height: 1.4;
+}
+
+.event-modal-header .modal-close {
+ background: none;
+ border: none;
+ font-size: 1.75rem;
+ color: #888;
+ cursor: pointer;
+ padding: 0;
+ line-height: 1;
+ flex-shrink: 0;
+}
+
+.event-modal-header .modal-close:hover {
+ color: #333;
+}
+
+.event-modal-content {
+ padding: 1.25rem 1.5rem;
+}
+
+.event-detail {
+ display: flex;
+ flex-direction: column;
+ gap: 0.25rem;
+ padding: 0.75rem 0;
+ border-bottom: 1px solid #f0f0f0;
+}
+
+.event-detail:last-child {
+ border-bottom: none;
+}
+
+.detail-label {
+ font-size: 0.75rem;
+ font-weight: 500;
+ color: #888;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
+.detail-value {
+ font-size: 0.95rem;
+ color: #333;
+}
+
+.detail-value.recurring-badge {
+ display: inline-block;
+ background: #e9d8fd;
+ color: #6b46c1;
+ padding: 0.25rem 0.5rem;
+ border-radius: 4px;
+ font-size: 0.8rem;
+ font-weight: 500;
+ width: fit-content;
+}
+
+@media (max-width: 768px) {
+ .event-modal {
+ max-width: 100%;
+ margin: 0.5rem;
+ }
+
+ .event-modal-header {
+ padding: 1rem 1.25rem;
+ }
+
+ .event-modal-header h2 {
+ font-size: 1.1rem;
+ }
+
+ .event-modal-content {
+ padding: 1rem 1.25rem;
+ }
+}
diff --git a/frontend/src/components/calendar/Calendar.tsx b/frontend/src/components/calendar/Calendar.tsx
new file mode 100644
index 0000000..e16f95a
--- /dev/null
+++ b/frontend/src/components/calendar/Calendar.tsx
@@ -0,0 +1,275 @@
+import { useState, useEffect, useCallback, useMemo } from 'react'
+import { Link } from 'react-router-dom'
+import { useSources, CalendarEvent } from '@/hooks/useSources'
+import './Calendar.css'
+
+const DAYS_OF_WEEK = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+const MONTH_NAMES = [
+ 'January', 'February', 'March', 'April', 'May', 'June',
+ 'July', 'August', 'September', 'October', 'November', 'December'
+]
+
+interface DayCell {
+ date: Date
+ isCurrentMonth: boolean
+ isToday: boolean
+ events: CalendarEvent[]
+}
+
+const Calendar = () => {
+ const { getUpcomingEvents } = useSources()
+ const [events, setEvents] = useState
([])
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState(null)
+ const [currentDate, setCurrentDate] = useState(new Date())
+ const [selectedEvent, setSelectedEvent] = useState(null)
+
+ const loadEvents = useCallback(async (date: Date) => {
+ setLoading(true)
+ setError(null)
+ try {
+ // Calculate range for the month view (include overflow days)
+ const year = date.getFullYear()
+ const month = date.getMonth()
+ // Start from first day of previous month (for overflow)
+ const startDate = new Date(year, month - 1, 1)
+ // End at last day of next month (for overflow)
+ const endDate = new Date(year, month + 2, 0)
+
+ const data = await getUpcomingEvents({
+ startDate: startDate.toISOString(),
+ endDate: endDate.toISOString(),
+ limit: 200,
+ })
+ setEvents(data)
+ } catch (e) {
+ setError(e instanceof Error ? e.message : 'Failed to load events')
+ } finally {
+ setLoading(false)
+ }
+ }, [getUpcomingEvents])
+
+ useEffect(() => {
+ loadEvents(currentDate)
+ }, [loadEvents, currentDate])
+
+ // Generate calendar grid for current month view
+ const calendarDays = useMemo((): DayCell[] => {
+ const year = currentDate.getFullYear()
+ const month = currentDate.getMonth()
+
+ // First day of the month
+ const firstDay = new Date(year, month, 1)
+ // Last day of the month
+ const lastDay = new Date(year, month + 1, 0)
+
+ // Get the day of week for first day (0 = Sunday, convert to Monday start)
+ let startDayOfWeek = firstDay.getDay()
+ startDayOfWeek = startDayOfWeek === 0 ? 6 : startDayOfWeek - 1 // Convert to Monday = 0
+
+ const days: DayCell[] = []
+ const today = new Date()
+ today.setHours(0, 0, 0, 0)
+
+ // Add days from previous month to fill the first week
+ const prevMonth = new Date(year, month, 0)
+ for (let i = startDayOfWeek - 1; i >= 0; i--) {
+ const date = new Date(year, month - 1, prevMonth.getDate() - i)
+ days.push({
+ date,
+ isCurrentMonth: false,
+ isToday: date.getTime() === today.getTime(),
+ events: getEventsForDate(date, events),
+ })
+ }
+
+ // Add days of current month
+ for (let day = 1; day <= lastDay.getDate(); day++) {
+ const date = new Date(year, month, day)
+ days.push({
+ date,
+ isCurrentMonth: true,
+ isToday: date.getTime() === today.getTime(),
+ events: getEventsForDate(date, events),
+ })
+ }
+
+ // Add days from next month to complete the grid (6 rows)
+ const remainingDays = 42 - days.length // 6 weeks * 7 days
+ for (let day = 1; day <= remainingDays; day++) {
+ const date = new Date(year, month + 1, day)
+ days.push({
+ date,
+ isCurrentMonth: false,
+ isToday: date.getTime() === today.getTime(),
+ events: getEventsForDate(date, events),
+ })
+ }
+
+ return days
+ }, [currentDate, events])
+
+ const goToPreviousMonth = () => {
+ setCurrentDate(new Date(currentDate.getFullYear(), currentDate.getMonth() - 1, 1))
+ }
+
+ const goToNextMonth = () => {
+ setCurrentDate(new Date(currentDate.getFullYear(), currentDate.getMonth() + 1, 1))
+ }
+
+ const goToToday = () => {
+ setCurrentDate(new Date())
+ }
+
+ const formatEventTime = (event: CalendarEvent) => {
+ if (event.all_day) return ''
+ const date = new Date(event.start_time)
+ return date.toLocaleTimeString('en-US', { hour: 'numeric', minute: '2-digit' }).replace(' ', '')
+ }
+
+ return (
+
+
+
Back
+
{MONTH_NAMES[currentDate.getMonth()]} {currentDate.getFullYear()}
+
+
+
+
+
+
+
+ {error && (
+
+
{error}
+
+
+ )}
+
+
+ {/* Day headers */}
+ {DAYS_OF_WEEK.map(day => (
+
{day}
+ ))}
+
+ {/* Calendar cells */}
+ {calendarDays.map((day, index) => (
+
+
{day.date.getDate()}
+
+ {day.events.slice(0, 4).map((event, eventIndex) => (
+
{
+ e.stopPropagation()
+ setSelectedEvent(event)
+ }}
+ >
+ {!event.all_day && (
+ {formatEventTime(event)}
+ )}
+ {event.event_title}
+
+ ))}
+ {day.events.length > 4 && (
+
+{day.events.length - 4} more
+ )}
+
+
+ ))}
+
+
+ {loading &&
Loading events...
}
+
+ {/* Event Detail Modal */}
+ {selectedEvent && (
+
setSelectedEvent(null)}>
+
e.stopPropagation()}>
+
+
{selectedEvent.event_title}
+
+
+
+
+ Date
+
+ {new Date(selectedEvent.start_time).toLocaleDateString('en-US', {
+ weekday: 'long',
+ year: 'numeric',
+ month: 'long',
+ day: 'numeric'
+ })}
+
+
+
+
+ Time
+
+ {selectedEvent.all_day ? 'All day' : (
+ <>
+ {new Date(selectedEvent.start_time).toLocaleTimeString('en-US', {
+ hour: 'numeric',
+ minute: '2-digit'
+ })}
+ {selectedEvent.end_time && (
+ <> – {new Date(selectedEvent.end_time).toLocaleTimeString('en-US', {
+ hour: 'numeric',
+ minute: '2-digit'
+ })}>
+ )}
+ >
+ )}
+
+
+
+ {selectedEvent.location && (
+
+ Location
+ {selectedEvent.location}
+
+ )}
+
+ {selectedEvent.calendar_name && (
+
+ Calendar
+ {selectedEvent.calendar_name}
+
+ )}
+
+ {selectedEvent.recurrence_rule && (
+
+ Repeats
+ Recurring event
+
+ )}
+
+
+
+ )}
+
+
+ Configure calendar accounts
+
+
+ )
+}
+
+function getEventsForDate(date: Date, events: CalendarEvent[]): CalendarEvent[] {
+ const dateStr = date.toISOString().split('T')[0]
+ return events.filter(event => {
+ const eventDate = new Date(event.start_time).toISOString().split('T')[0]
+ return eventDate === dateStr
+ }).sort((a, b) => {
+ // All-day events first, then by time
+ if (a.all_day && !b.all_day) return -1
+ if (!a.all_day && b.all_day) return 1
+ return new Date(a.start_time).getTime() - new Date(b.start_time).getTime()
+ })
+}
+
+export default Calendar
diff --git a/frontend/src/components/calendar/index.ts b/frontend/src/components/calendar/index.ts
new file mode 100644
index 0000000..7c09be7
--- /dev/null
+++ b/frontend/src/components/calendar/index.ts
@@ -0,0 +1 @@
+export { default } from './Calendar'
diff --git a/frontend/src/components/index.js b/frontend/src/components/index.js
index eaf565a..a53642f 100644
--- a/frontend/src/components/index.js
+++ b/frontend/src/components/index.js
@@ -2,5 +2,6 @@ export { default as Loading } from './Loading'
export { default as Dashboard } from './Dashboard'
export { default as Search } from './search'
export { default as Sources } from './sources'
+export { default as Calendar } from './calendar'
export { default as LoginPrompt } from './auth/LoginPrompt'
export { default as AuthError } from './auth/AuthError'
\ No newline at end of file
diff --git a/frontend/src/components/sources/Sources.tsx b/frontend/src/components/sources/Sources.tsx
index b56521e..56998c7 100644
--- a/frontend/src/components/sources/Sources.tsx
+++ b/frontend/src/components/sources/Sources.tsx
@@ -1,6 +1,6 @@
import { useState, useEffect, useCallback } from 'react'
import { Link } from 'react-router-dom'
-import { useSources, EmailAccount, ArticleFeed, GithubAccount, GoogleAccount, GoogleFolder, GoogleOAuthConfig, DriveItem, BrowseResponse, GoogleFolderCreate } from '@/hooks/useSources'
+import { useSources, EmailAccount, ArticleFeed, GithubAccount, GoogleAccount, GoogleFolder, GoogleOAuthConfig, DriveItem, BrowseResponse, GoogleFolderCreate, CalendarAccount } from '@/hooks/useSources'
import {
SourceCard,
Modal,
@@ -15,7 +15,7 @@ import {
ConfirmDialog,
} from './shared'
-type TabType = 'email' | 'feeds' | 'github' | 'google'
+type TabType = 'email' | 'feeds' | 'github' | 'google' | 'calendar'
const Sources = () => {
const [activeTab, setActiveTab] = useState('email')
@@ -52,6 +52,12 @@ const Sources = () => {
>
Google Drive
+
@@ -59,6 +65,7 @@ const Sources = () => {
{activeTab === 'feeds' && }
{activeTab === 'github' && }
{activeTab === 'google' && }
+ {activeTab === 'calendar' && }
)
@@ -1640,6 +1647,407 @@ const ExclusionBrowser = ({ accountId, folder, onSave, onCancel }: ExclusionBrow
)
}
+// === Calendar Panel ===
+
+import { CalendarEvent } from '@/hooks/useSources'
+
+interface GroupedEvents {
+ [calendarName: string]: CalendarEvent[]
+}
+
+const CalendarPanel = () => {
+ const {
+ listCalendarAccounts, createCalendarAccount, updateCalendarAccount,
+ deleteCalendarAccount, syncCalendarAccount, listGoogleAccounts, getUpcomingEvents
+ } = useSources()
+ const [accounts, setAccounts] = useState([])
+ const [googleAccounts, setGoogleAccounts] = useState([])
+ const [events, setEvents] = useState([])
+ const [expandedCalendars, setExpandedCalendars] = useState>(new Set())
+ const [loading, setLoading] = useState(true)
+ const [error, setError] = useState(null)
+ const [showForm, setShowForm] = useState(false)
+ const [editingAccount, setEditingAccount] = useState(null)
+
+ const loadData = useCallback(async () => {
+ setLoading(true)
+ setError(null)
+ try {
+ const [calendarData, googleData, eventsData] = await Promise.all([
+ listCalendarAccounts(),
+ listGoogleAccounts(),
+ getUpcomingEvents({ days: 365, limit: 200 })
+ ])
+ setAccounts(calendarData)
+ setGoogleAccounts(googleData)
+ setEvents(eventsData)
+ } catch (e) {
+ setError(e instanceof Error ? e.message : 'Failed to load accounts')
+ } finally {
+ setLoading(false)
+ }
+ }, [listCalendarAccounts, listGoogleAccounts, getUpcomingEvents])
+
+ useEffect(() => { loadData() }, [loadData])
+
+ const handleCreate = async (data: any) => {
+ await createCalendarAccount(data)
+ setShowForm(false)
+ loadData()
+ }
+
+ const handleUpdate = async (data: any) => {
+ if (editingAccount) {
+ await updateCalendarAccount(editingAccount.id, data)
+ setEditingAccount(null)
+ loadData()
+ }
+ }
+
+ const handleDelete = async (id: number) => {
+ await deleteCalendarAccount(id)
+ loadData()
+ }
+
+ const handleToggleActive = async (account: CalendarAccount) => {
+ await updateCalendarAccount(account.id, { active: !account.active })
+ loadData()
+ }
+
+ const handleSync = async (id: number) => {
+ await syncCalendarAccount(id)
+ loadData()
+ }
+
+ const toggleCalendar = (calendarName: string) => {
+ const newExpanded = new Set(expandedCalendars)
+ if (newExpanded.has(calendarName)) {
+ newExpanded.delete(calendarName)
+ } else {
+ newExpanded.add(calendarName)
+ }
+ setExpandedCalendars(newExpanded)
+ }
+
+ // Group events by calendar name
+ const groupedEvents: GroupedEvents = events.reduce((acc, event) => {
+ const calName = event.calendar_name || 'Unknown'
+ if (!acc[calName]) acc[calName] = []
+ acc[calName].push(event)
+ return acc
+ }, {} as GroupedEvents)
+
+ const formatEventDate = (dateStr: string) => {
+ const date = new Date(dateStr)
+ return date.toLocaleDateString('en-US', { month: 'short', day: 'numeric', year: 'numeric' })
+ }
+
+ const formatEventTime = (dateStr: string) => {
+ const date = new Date(dateStr)
+ return date.toLocaleTimeString('en-US', { hour: 'numeric', minute: '2-digit' })
+ }
+
+ if (loading) return
+ if (error) return
+
+ return (
+
+
+
Calendar Accounts
+
+
+
+ {accounts.length === 0 ? (
+
setShowForm(true)}
+ />
+ ) : (
+
+ {accounts.map(account => (
+
+
+
+
{account.name}
+
+ {account.calendar_type === 'google'
+ ? `Google Calendar (${account.google_account?.email || 'linked'})`
+ : `CalDAV: ${account.caldav_url}`
+ }
+
+
+
+ handleToggleActive(account)} />
+ handleSync(account.id)} disabled={!account.active} label="Sync" />
+
+
+
+
+
+
+ Type: {account.calendar_type === 'google' ? 'Google Calendar' : 'CalDAV'}
+
+ {account.sync_error && (
+ Error: {account.sync_error}
+ )}
+
+
+ {/* Events grouped by calendar */}
+
+
Calendars & Events
+ {Object.keys(groupedEvents).length === 0 ? (
+
No events synced yet
+ ) : (
+
+ {Object.entries(groupedEvents).map(([calendarName, calEvents]) => (
+
+
+ {expandedCalendars.has(calendarName) && (
+
+ {calEvents.map((event, idx) => (
+
+
+ {formatEventDate(event.start_time)}
+ {!event.all_day && (
+ {formatEventTime(event.start_time)}
+ )}
+ {event.all_day && All day}
+
+
+ {event.event_title}
+ {event.location && {event.location}}
+ {event.recurrence_rule && Recurring}
+
+
+ ))}
+
+ )}
+
+ ))}
+
+ )}
+
+
+ ))}
+
+ )}
+
+ {showForm && (
+ setShowForm(false)}
+ />
+ )}
+
+ {editingAccount && (
+ setEditingAccount(null)}
+ />
+ )}
+
+ )
+}
+
+interface CalendarFormProps {
+ account?: CalendarAccount
+ googleAccounts: GoogleAccount[]
+ onSubmit: (data: any) => Promise
+ onCancel: () => void
+}
+
+const CalendarForm = ({ account, googleAccounts, onSubmit, onCancel }: CalendarFormProps) => {
+ const [formData, setFormData] = useState({
+ name: account?.name || '',
+ calendar_type: account?.calendar_type || 'caldav' as 'caldav' | 'google',
+ caldav_url: account?.caldav_url || '',
+ caldav_username: account?.caldav_username || '',
+ caldav_password: '',
+ google_account_id: account?.google_account_id || undefined as number | undefined,
+ tags: account?.tags || [],
+ check_interval: account?.check_interval || 15,
+ sync_past_days: account?.sync_past_days || 30,
+ sync_future_days: account?.sync_future_days || 90,
+ })
+ const [submitting, setSubmitting] = useState(false)
+ const [error, setError] = useState(null)
+
+ const handleSubmit = async (e: React.FormEvent) => {
+ e.preventDefault()
+ setSubmitting(true)
+ setError(null)
+ try {
+ const data: any = {
+ name: formData.name,
+ calendar_type: formData.calendar_type,
+ tags: formData.tags,
+ check_interval: formData.check_interval,
+ sync_past_days: formData.sync_past_days,
+ sync_future_days: formData.sync_future_days,
+ }
+
+ if (formData.calendar_type === 'caldav') {
+ data.caldav_url = formData.caldav_url
+ data.caldav_username = formData.caldav_username
+ if (formData.caldav_password) {
+ data.caldav_password = formData.caldav_password
+ }
+ } else {
+ data.google_account_id = formData.google_account_id
+ }
+
+ await onSubmit(data)
+ } catch (e) {
+ setError(e instanceof Error ? e.message : 'Failed to save')
+ } finally {
+ setSubmitting(false)
+ }
+ }
+
+ return (
+
+
+
+ )
+}
+
interface GoogleFolderFormProps {
accountId: number
onSubmit: (data: any) => Promise
diff --git a/frontend/src/hooks/useSources.ts b/frontend/src/hooks/useSources.ts
index 90f14c5..1c54aa1 100644
--- a/frontend/src/hooks/useSources.ts
+++ b/frontend/src/hooks/useSources.ts
@@ -227,12 +227,79 @@ export interface BrowseResponse {
next_page_token: string | null
}
+// Types for Calendar Accounts
+export interface CalendarGoogleAccountInfo {
+ id: number
+ name: string
+ email: string
+}
+
+export interface CalendarAccount {
+ id: number
+ name: string
+ calendar_type: 'caldav' | 'google'
+ caldav_url: string | null
+ caldav_username: string | null
+ google_account_id: number | null
+ google_account: CalendarGoogleAccountInfo | null
+ calendar_ids: string[]
+ tags: string[]
+ check_interval: number
+ sync_past_days: number
+ sync_future_days: number
+ last_sync_at: string | null
+ sync_error: string | null
+ active: boolean
+ created_at: string
+ updated_at: string
+}
+
+export interface CalendarAccountCreate {
+ name: string
+ calendar_type: 'caldav' | 'google'
+ caldav_url?: string
+ caldav_username?: string
+ caldav_password?: string
+ google_account_id?: number
+ calendar_ids?: string[]
+ tags?: string[]
+ check_interval?: number
+ sync_past_days?: number
+ sync_future_days?: number
+}
+
+export interface CalendarAccountUpdate {
+ name?: string
+ caldav_url?: string
+ caldav_username?: string
+ caldav_password?: string
+ google_account_id?: number
+ calendar_ids?: string[]
+ tags?: string[]
+ check_interval?: number
+ sync_past_days?: number
+ sync_future_days?: number
+ active?: boolean
+}
+
// Task response
export interface TaskResponse {
task_id: string
status: string
}
+// Calendar Event
+export interface CalendarEvent {
+ id: number
+ event_title: string
+ start_time: string
+ end_time: string | null
+ all_day: boolean
+ location: string | null
+ calendar_name: string | null
+ recurrence_rule: string | null
+}
+
export const useSources = () => {
const { apiCall } = useAuth()
@@ -525,6 +592,64 @@ export const useSources = () => {
if (!response.ok) throw new Error('Failed to delete Google OAuth config')
}, [apiCall])
+ // === Calendar Accounts ===
+
+ const listCalendarAccounts = useCallback(async (): Promise => {
+ const response = await apiCall('/calendar-accounts')
+ if (!response.ok) throw new Error('Failed to fetch calendar accounts')
+ return response.json()
+ }, [apiCall])
+
+ const createCalendarAccount = useCallback(async (data: CalendarAccountCreate): Promise => {
+ const response = await apiCall('/calendar-accounts', {
+ method: 'POST',
+ body: JSON.stringify(data),
+ })
+ if (!response.ok) {
+ const error = await response.json()
+ throw new Error(error.detail || 'Failed to create calendar account')
+ }
+ return response.json()
+ }, [apiCall])
+
+ const updateCalendarAccount = useCallback(async (id: number, data: CalendarAccountUpdate): Promise => {
+ const response = await apiCall(`/calendar-accounts/${id}`, {
+ method: 'PATCH',
+ body: JSON.stringify(data),
+ })
+ if (!response.ok) {
+ const error = await response.json()
+ throw new Error(error.detail || 'Failed to update calendar account')
+ }
+ return response.json()
+ }, [apiCall])
+
+ const deleteCalendarAccount = useCallback(async (id: number): Promise => {
+ const response = await apiCall(`/calendar-accounts/${id}`, { method: 'DELETE' })
+ if (!response.ok) throw new Error('Failed to delete calendar account')
+ }, [apiCall])
+
+ const syncCalendarAccount = useCallback(async (id: number, forceFull = false): Promise => {
+ const response = await apiCall(`/calendar-accounts/${id}/sync?force_full=${forceFull}`, { method: 'POST' })
+ if (!response.ok) throw new Error('Failed to sync calendar account')
+ return response.json()
+ }, [apiCall])
+
+ const getUpcomingEvents = useCallback(async (
+ options: { days?: number; limit?: number; startDate?: string; endDate?: string } = {}
+ ): Promise => {
+ const { days = 7, limit = 100, startDate, endDate } = options
+ let url = `/calendar-accounts/events/upcoming?limit=${limit}`
+ if (startDate && endDate) {
+ url += `&start_date=${encodeURIComponent(startDate)}&end_date=${encodeURIComponent(endDate)}`
+ } else {
+ url += `&days=${days}`
+ }
+ const response = await apiCall(url)
+ if (!response.ok) throw new Error('Failed to fetch upcoming events')
+ return response.json()
+ }, [apiCall])
+
return {
// Email
listEmailAccounts,
@@ -564,5 +689,13 @@ export const useSources = () => {
getGoogleOAuthConfig,
uploadGoogleOAuthConfig,
deleteGoogleOAuthConfig,
+ // Calendar Accounts
+ listCalendarAccounts,
+ createCalendarAccount,
+ updateCalendarAccount,
+ deleteCalendarAccount,
+ syncCalendarAccount,
+ // Calendar Events
+ getUpcomingEvents,
}
}
diff --git a/frontend/vite.config.js b/frontend/vite.config.js
index bc2735a..9efeece 100644
--- a/frontend/vite.config.js
+++ b/frontend/vite.config.js
@@ -20,6 +20,7 @@ export default defineConfig({
'/auth': 'http://localhost:8000',
'/health': 'http://localhost:8000',
'/email-accounts': 'http://localhost:8000',
+ '/calendar-accounts': 'http://localhost:8000',
'/article-feeds': 'http://localhost:8000',
'/github': 'http://localhost:8000',
'/google-drive': 'http://localhost:8000',
diff --git a/requirements/requirements-ingesters.txt b/requirements/requirements-ingesters.txt
index fb04079..bc18dc3 100644
--- a/requirements/requirements-ingesters.txt
+++ b/requirements/requirements-ingesters.txt
@@ -1,2 +1,3 @@
discord.py==2.3.2
-uvicorn==0.29.0
\ No newline at end of file
+uvicorn==0.29.0
+caldav
\ No newline at end of file
diff --git a/requirements/requirements-workers.txt b/requirements/requirements-workers.txt
index e59798b..62380e2 100644
--- a/requirements/requirements-workers.txt
+++ b/requirements/requirements-workers.txt
@@ -1,2 +1,3 @@
boto3
-awscli==1.42.64
\ No newline at end of file
+awscli==1.42.64
+caldav
\ No newline at end of file
diff --git a/src/memory/api/MCP/base.py b/src/memory/api/MCP/base.py
index 719c4e0..7ffcc2c 100644
--- a/src/memory/api/MCP/base.py
+++ b/src/memory/api/MCP/base.py
@@ -13,6 +13,7 @@ from memory.api.MCP.servers.core import core_mcp
from memory.api.MCP.servers.github import github_mcp
from memory.api.MCP.servers.meta import meta_mcp
from memory.api.MCP.servers.meta import set_auth_provider as set_meta_auth
+from memory.api.MCP.servers.organizer import organizer_mcp
from memory.api.MCP.servers.people import people_mcp
from memory.api.MCP.servers.schedule import schedule_mcp
from memory.api.MCP.servers.schedule import set_auth_provider as set_schedule_auth
@@ -163,6 +164,7 @@ set_meta_auth(get_current_user)
# Tools will be prefixed with their server name (e.g., core_search_knowledge_base)
mcp.mount(core_mcp, prefix="core")
mcp.mount(github_mcp, prefix="github")
+mcp.mount(organizer_mcp, prefix="organizer")
mcp.mount(people_mcp, prefix="people")
mcp.mount(schedule_mcp, prefix="schedule")
mcp.mount(books_mcp, prefix="books")
diff --git a/src/memory/api/MCP/oauth_provider.py b/src/memory/api/MCP/oauth_provider.py
index db5b6f4..7c64646 100644
--- a/src/memory/api/MCP/oauth_provider.py
+++ b/src/memory/api/MCP/oauth_provider.py
@@ -172,7 +172,7 @@ class SimpleOAuthProvider(OAuthProvider):
return None
- def get_client(self, client_id: str) -> OAuthClientInformationFull | None:
+ async def get_client(self, client_id: str) -> OAuthClientInformationFull | None:
"""Get OAuth client information."""
with make_session() as session:
client = session.get(OAuthClientInformation, client_id)
diff --git a/src/memory/api/MCP/servers/__init__.py b/src/memory/api/MCP/servers/__init__.py
index 68122c0..85fd44f 100644
--- a/src/memory/api/MCP/servers/__init__.py
+++ b/src/memory/api/MCP/servers/__init__.py
@@ -2,6 +2,7 @@
from memory.api.MCP.servers.core import core_mcp
from memory.api.MCP.servers.github import github_mcp
+from memory.api.MCP.servers.organizer import organizer_mcp
from memory.api.MCP.servers.people import people_mcp
from memory.api.MCP.servers.schedule import schedule_mcp
from memory.api.MCP.servers.books import books_mcp
@@ -10,6 +11,7 @@ from memory.api.MCP.servers.meta import meta_mcp
__all__ = [
"core_mcp",
"github_mcp",
+ "organizer_mcp",
"people_mcp",
"schedule_mcp",
"books_mcp",
diff --git a/src/memory/api/MCP/servers/organizer.py b/src/memory/api/MCP/servers/organizer.py
new file mode 100644
index 0000000..25a9234
--- /dev/null
+++ b/src/memory/api/MCP/servers/organizer.py
@@ -0,0 +1,45 @@
+"""
+MCP subserver for organizational tools: calendar, todos, reminders.
+"""
+
+import logging
+
+from fastmcp import FastMCP
+
+from memory.common.calendar import get_events_in_range, parse_date_range
+from memory.common.db.connection import make_session
+
+logger = logging.getLogger(__name__)
+
+organizer_mcp = FastMCP("memory-organizer")
+
+
+@organizer_mcp.tool()
+async def get_upcoming_events(
+ start_date: str | None = None,
+ end_date: str | None = None,
+ days: int = 7,
+ limit: int = 50,
+) -> list[dict]:
+ """
+ Get calendar events within a time span.
+ Use to check the user's schedule, find meetings, or plan around events.
+ Automatically expands recurring events to show all occurrences in the range.
+
+ Args:
+ start_date: ISO format start date (e.g., "2024-01-15" or "2024-01-15T09:00:00Z").
+ Defaults to now if not provided.
+ end_date: ISO format end date. Defaults to start_date + days if not provided.
+ days: Number of days from start_date if end_date not specified (default 7, max 365)
+ limit: Maximum number of events to return (default 50, max 200)
+
+ Returns: List of events with id, event_title, start_time, end_time, all_day,
+ location, calendar_name, recurrence_rule. Sorted by start_time.
+ """
+ days = min(max(days, 1), 365)
+ limit = min(max(limit, 1), 200)
+
+ range_start, range_end = parse_date_range(start_date, end_date, days)
+
+ with make_session() as session:
+ return get_events_in_range(session, range_start, range_end, limit)
diff --git a/src/memory/api/app.py b/src/memory/api/app.py
index 077da8b..c3bd5bf 100644
--- a/src/memory/api/app.py
+++ b/src/memory/api/app.py
@@ -27,6 +27,7 @@ from memory.api.google_drive import router as google_drive_router
from memory.api.email_accounts import router as email_accounts_router
from memory.api.article_feeds import router as article_feeds_router
from memory.api.github_sources import router as github_sources_router
+from memory.api.calendar_accounts import router as calendar_accounts_router
from memory.api.MCP.base import mcp
logger = logging.getLogger(__name__)
@@ -157,6 +158,7 @@ app.include_router(google_drive_router)
app.include_router(email_accounts_router)
app.include_router(article_feeds_router)
app.include_router(github_sources_router)
+app.include_router(calendar_accounts_router)
# Add health check to MCP server instead of main app
diff --git a/src/memory/api/article_feeds.py b/src/memory/api/article_feeds.py
index 876cd41..e838ed6 100644
--- a/src/memory/api/article_feeds.py
+++ b/src/memory/api/article_feeds.py
@@ -59,7 +59,9 @@ def feed_to_response(feed: ArticleFeed) -> ArticleFeedResponse:
description=cast(str | None, feed.description),
tags=list(feed.tags or []),
check_interval=cast(int, feed.check_interval),
- last_checked_at=feed.last_checked_at.isoformat() if feed.last_checked_at else None,
+ last_checked_at=feed.last_checked_at.isoformat()
+ if feed.last_checked_at
+ else None,
active=cast(bool, feed.active),
created_at=feed.created_at.isoformat() if feed.created_at else "",
updated_at=feed.updated_at.isoformat() if feed.updated_at else "",
@@ -171,13 +173,16 @@ def trigger_sync(
db: Session = Depends(get_session),
):
"""Manually trigger a sync for an article feed."""
- from memory.workers.tasks.blogs import sync_article_feed
+ from memory.common.celery_app import app, SYNC_ARTICLE_FEED
feed = db.get(ArticleFeed, feed_id)
if not feed:
raise HTTPException(status_code=404, detail="Feed not found")
- task = sync_article_feed.delay(feed_id)
+ task = app.send_task(
+ SYNC_ARTICLE_FEED,
+ args=[feed_id],
+ )
return {"task_id": task.id, "status": "scheduled"}
diff --git a/src/memory/api/calendar_accounts.py b/src/memory/api/calendar_accounts.py
new file mode 100644
index 0000000..9014014
--- /dev/null
+++ b/src/memory/api/calendar_accounts.py
@@ -0,0 +1,303 @@
+"""API endpoints for Calendar Account management."""
+
+from typing import Literal, cast
+
+from fastapi import APIRouter, Depends, HTTPException, Query
+from pydantic import BaseModel
+from sqlalchemy.orm import Session
+
+from memory.api.auth import get_current_user
+from memory.common.calendar import get_events_in_range, parse_date_range
+from memory.common.db.connection import get_session
+from memory.common.db.models import User
+from memory.common.db.models.sources import CalendarAccount, GoogleAccount
+
+router = APIRouter(prefix="/calendar-accounts", tags=["calendar-accounts"])
+
+
+class CalendarAccountCreate(BaseModel):
+ name: str
+ calendar_type: Literal["caldav", "google"]
+ # CalDAV fields
+ caldav_url: str | None = None
+ caldav_username: str | None = None
+ caldav_password: str | None = None
+ # Google Calendar fields
+ google_account_id: int | None = None
+ # Common fields
+ calendar_ids: list[str] = []
+ tags: list[str] = []
+ check_interval: int = 15 # Minutes
+ sync_past_days: int = 30
+ sync_future_days: int = 90
+
+
+class CalendarAccountUpdate(BaseModel):
+ name: str | None = None
+ caldav_url: str | None = None
+ caldav_username: str | None = None
+ caldav_password: str | None = None
+ google_account_id: int | None = None
+ calendar_ids: list[str] | None = None
+ tags: list[str] | None = None
+ check_interval: int | None = None
+ sync_past_days: int | None = None
+ sync_future_days: int | None = None
+ active: bool | None = None
+
+
+class GoogleAccountInfo(BaseModel):
+ id: int
+ name: str
+ email: str
+
+
+class CalendarEventResponse(BaseModel):
+ id: int
+ event_title: str
+ start_time: str
+ end_time: str | None
+ all_day: bool
+ location: str | None
+ calendar_name: str | None
+ recurrence_rule: str | None
+
+
+class CalendarAccountResponse(BaseModel):
+ id: int
+ name: str
+ calendar_type: str
+ caldav_url: str | None
+ caldav_username: str | None
+ google_account_id: int | None
+ google_account: GoogleAccountInfo | None
+ calendar_ids: list[str]
+ tags: list[str]
+ check_interval: int
+ sync_past_days: int
+ sync_future_days: int
+ last_sync_at: str | None
+ sync_error: str | None
+ active: bool
+ created_at: str
+ updated_at: str
+
+
+def account_to_response(account: CalendarAccount) -> CalendarAccountResponse:
+ """Convert a CalendarAccount model to a response model."""
+ google_info = None
+ if account.google_account:
+ google_info = GoogleAccountInfo(
+ id=cast(int, account.google_account.id),
+ name=cast(str, account.google_account.name),
+ email=cast(str, account.google_account.email),
+ )
+
+ return CalendarAccountResponse(
+ id=cast(int, account.id),
+ name=cast(str, account.name),
+ calendar_type=cast(str, account.calendar_type),
+ caldav_url=cast(str | None, account.caldav_url),
+ caldav_username=cast(str | None, account.caldav_username),
+ google_account_id=cast(int | None, account.google_account_id),
+ google_account=google_info,
+ calendar_ids=list(account.calendar_ids or []),
+ tags=list(account.tags or []),
+ check_interval=cast(int, account.check_interval),
+ sync_past_days=cast(int, account.sync_past_days),
+ sync_future_days=cast(int, account.sync_future_days),
+ last_sync_at=account.last_sync_at.isoformat() if account.last_sync_at else None,
+ sync_error=cast(str | None, account.sync_error),
+ active=cast(bool, account.active),
+ created_at=account.created_at.isoformat() if account.created_at else "",
+ updated_at=account.updated_at.isoformat() if account.updated_at else "",
+ )
+
+
+@router.get("")
+def list_accounts(
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+) -> list[CalendarAccountResponse]:
+ """List all calendar accounts."""
+ accounts = db.query(CalendarAccount).all()
+ return [account_to_response(account) for account in accounts]
+
+
+@router.post("")
+def create_account(
+ data: CalendarAccountCreate,
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+) -> CalendarAccountResponse:
+ """Create a new calendar account."""
+ # Validate based on type
+ if data.calendar_type == "caldav":
+ if not data.caldav_url or not data.caldav_username or not data.caldav_password:
+ raise HTTPException(
+ status_code=400,
+ detail="CalDAV accounts require caldav_url, caldav_username, and caldav_password",
+ )
+ elif data.calendar_type == "google":
+ if not data.google_account_id:
+ raise HTTPException(
+ status_code=400,
+ detail="Google Calendar accounts require google_account_id",
+ )
+ # Verify the Google account exists
+ google_account = db.get(GoogleAccount, data.google_account_id)
+ if not google_account:
+ raise HTTPException(status_code=400, detail="Google account not found")
+
+ account = CalendarAccount(
+ name=data.name,
+ calendar_type=data.calendar_type,
+ caldav_url=data.caldav_url,
+ caldav_username=data.caldav_username,
+ caldav_password=data.caldav_password,
+ google_account_id=data.google_account_id,
+ calendar_ids=data.calendar_ids,
+ tags=data.tags,
+ check_interval=data.check_interval,
+ sync_past_days=data.sync_past_days,
+ sync_future_days=data.sync_future_days,
+ )
+ db.add(account)
+ db.commit()
+ db.refresh(account)
+
+ return account_to_response(account)
+
+
+@router.get("/{account_id}")
+def get_account(
+ account_id: int,
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+) -> CalendarAccountResponse:
+ """Get a single calendar account."""
+ account = db.get(CalendarAccount, account_id)
+ if not account:
+ raise HTTPException(status_code=404, detail="Account not found")
+ return account_to_response(account)
+
+
+@router.patch("/{account_id}")
+def update_account(
+ account_id: int,
+ updates: CalendarAccountUpdate,
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+) -> CalendarAccountResponse:
+ """Update a calendar account."""
+ account = db.get(CalendarAccount, account_id)
+ if not account:
+ raise HTTPException(status_code=404, detail="Account not found")
+
+ if updates.name is not None:
+ account.name = updates.name
+ if updates.caldav_url is not None:
+ account.caldav_url = updates.caldav_url
+ if updates.caldav_username is not None:
+ account.caldav_username = updates.caldav_username
+ if updates.caldav_password is not None:
+ account.caldav_password = updates.caldav_password
+ if updates.google_account_id is not None:
+ # Verify the Google account exists
+ google_account = db.get(GoogleAccount, updates.google_account_id)
+ if not google_account:
+ raise HTTPException(status_code=400, detail="Google account not found")
+ account.google_account_id = updates.google_account_id
+ if updates.calendar_ids is not None:
+ account.calendar_ids = updates.calendar_ids
+ if updates.tags is not None:
+ account.tags = updates.tags
+ if updates.check_interval is not None:
+ account.check_interval = updates.check_interval
+ if updates.sync_past_days is not None:
+ account.sync_past_days = updates.sync_past_days
+ if updates.sync_future_days is not None:
+ account.sync_future_days = updates.sync_future_days
+ if updates.active is not None:
+ account.active = updates.active
+
+ db.commit()
+ db.refresh(account)
+
+ return account_to_response(account)
+
+
+@router.delete("/{account_id}")
+def delete_account(
+ account_id: int,
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+):
+ """Delete a calendar account."""
+ account = db.get(CalendarAccount, account_id)
+ if not account:
+ raise HTTPException(status_code=404, detail="Account not found")
+
+ db.delete(account)
+ db.commit()
+
+ return {"status": "deleted"}
+
+
+@router.post("/{account_id}/sync")
+def trigger_sync(
+ account_id: int,
+ force_full: bool = False,
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+):
+ """Manually trigger a sync for a calendar account."""
+ from memory.common.celery_app import app, SYNC_CALENDAR_ACCOUNT
+
+ account = db.get(CalendarAccount, account_id)
+ if not account:
+ raise HTTPException(status_code=404, detail="Account not found")
+
+ task = app.send_task(
+ SYNC_CALENDAR_ACCOUNT,
+ args=[account_id],
+ kwargs={"force_full": force_full},
+ )
+
+ return {"task_id": task.id, "status": "scheduled"}
+
+
+@router.get("/events/upcoming")
+def get_upcoming_events(
+ days: int = Query(default=7, ge=1, le=365),
+ limit: int = Query(default=10, ge=1, le=200),
+ start_date: str | None = Query(default=None, description="ISO format start date"),
+ end_date: str | None = Query(default=None, description="ISO format end date"),
+ user: User = Depends(get_current_user),
+ db: Session = Depends(get_session),
+) -> list[CalendarEventResponse]:
+ """Get calendar events within a date range.
+
+ If start_date/end_date provided, uses those. Otherwise uses days from now.
+ Expands recurring events to show future occurrences.
+ """
+ try:
+ range_start, range_end = parse_date_range(start_date, end_date, days)
+ except ValueError as e:
+ raise HTTPException(status_code=400, detail=str(e))
+
+ events = get_events_in_range(db, range_start, range_end, limit)
+
+ return [
+ CalendarEventResponse(
+ id=e["id"],
+ event_title=e["event_title"],
+ start_time=e["start_time"],
+ end_time=e["end_time"],
+ all_day=e["all_day"],
+ location=e["location"],
+ calendar_name=e["calendar_name"],
+ recurrence_rule=e["recurrence_rule"],
+ )
+ for e in events
+ ]
diff --git a/src/memory/api/email_accounts.py b/src/memory/api/email_accounts.py
index 28a04f4..bc414cb 100644
--- a/src/memory/api/email_accounts.py
+++ b/src/memory/api/email_accounts.py
@@ -192,13 +192,17 @@ def trigger_sync(
db: Session = Depends(get_session),
):
"""Manually trigger a sync for an email account."""
- from memory.workers.tasks.email import sync_account
+ from memory.common.celery_app import app, SYNC_ACCOUNT
account = db.get(EmailAccount, account_id)
if not account:
raise HTTPException(status_code=404, detail="Account not found")
- task = sync_account.delay(account_id, since_date=since_date)
+ task = app.send_task(
+ SYNC_ACCOUNT,
+ args=[account_id],
+ kwargs={"since_date": since_date},
+ )
return {"task_id": task.id, "status": "scheduled"}
diff --git a/src/memory/api/github_sources.py b/src/memory/api/github_sources.py
index 731f036..1b18f8d 100644
--- a/src/memory/api/github_sources.py
+++ b/src/memory/api/github_sources.py
@@ -124,7 +124,9 @@ def repo_to_response(repo: GithubRepo) -> GithubRepoResponse:
check_interval=cast(int, repo.check_interval),
full_sync_interval=cast(int, repo.full_sync_interval),
last_sync_at=repo.last_sync_at.isoformat() if repo.last_sync_at else None,
- last_full_sync_at=repo.last_full_sync_at.isoformat() if repo.last_full_sync_at else None,
+ last_full_sync_at=repo.last_full_sync_at.isoformat()
+ if repo.last_full_sync_at
+ else None,
active=cast(bool, repo.active),
created_at=repo.created_at.isoformat() if repo.created_at else "",
)
@@ -432,7 +434,7 @@ def trigger_repo_sync(
db: Session = Depends(get_session),
):
"""Manually trigger a sync for a repo."""
- from memory.workers.tasks.github import sync_github_repo
+ from memory.common.celery_app import app, SYNC_GITHUB_REPO
repo = (
db.query(GithubRepo)
@@ -442,6 +444,10 @@ def trigger_repo_sync(
if not repo:
raise HTTPException(status_code=404, detail="Repo not found")
- task = sync_github_repo.delay(repo_id, force_full=force_full)
+ task = app.send_task(
+ SYNC_GITHUB_REPO,
+ args=[repo_id],
+ kwargs={"force_full": force_full},
+ )
return {"task_id": task.id, "status": "scheduled"}
diff --git a/src/memory/api/google_drive.py b/src/memory/api/google_drive.py
index 57ad533..c5e69db 100644
--- a/src/memory/api/google_drive.py
+++ b/src/memory/api/google_drive.py
@@ -3,7 +3,6 @@
import json
import os
import secrets
-from datetime import datetime, timedelta, timezone
from typing import cast
# Allow Google to return additional scopes (like 'openid') without raising an error
@@ -17,7 +16,11 @@ from sqlalchemy.orm import Session
from memory.common import settings
from memory.common.db.connection import get_session, make_session
from memory.common.db.models import User
-from memory.common.db.models.sources import GoogleAccount, GoogleFolder, GoogleOAuthConfig
+from memory.common.db.models.sources import (
+ GoogleAccount,
+ GoogleFolder,
+ GoogleOAuthConfig,
+)
from memory.api.auth import get_current_user
router = APIRouter(prefix="/google-drive", tags=["google-drive"])
@@ -25,7 +28,11 @@ router = APIRouter(prefix="/google-drive", tags=["google-drive"])
def get_oauth_config(session: Session) -> GoogleOAuthConfig:
"""Get the OAuth config from database, falling back to env vars if not found."""
- config = session.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == "default").first()
+ config = (
+ session.query(GoogleOAuthConfig)
+ .filter(GoogleOAuthConfig.name == "default")
+ .first()
+ )
if config:
return config
@@ -116,6 +123,7 @@ class OAuthConfigResponse(BaseModel):
# Browse endpoint models
class DriveItem(BaseModel):
"""A file or folder in Google Drive."""
+
id: str
name: str
mime_type: str
@@ -126,6 +134,7 @@ class DriveItem(BaseModel):
class BrowseResponse(BaseModel):
"""Response from browsing a Google Drive folder."""
+
folder_id: str
folder_name: str
parent_id: str | None = None
@@ -151,15 +160,21 @@ async def upload_oauth_config(
raise HTTPException(status_code=400, detail=f"Invalid JSON file: {e}")
# Check if config already exists
- existing = db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == name).first()
+ existing = (
+ db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == name).first()
+ )
if existing:
# Update existing config
creds = json_data.get("web") or json_data.get("installed") or json_data
existing.client_id = creds["client_id"]
existing.client_secret = creds["client_secret"]
existing.project_id = creds.get("project_id")
- existing.auth_uri = creds.get("auth_uri", "https://accounts.google.com/o/oauth2/auth")
- existing.token_uri = creds.get("token_uri", "https://oauth2.googleapis.com/token")
+ existing.auth_uri = creds.get(
+ "auth_uri", "https://accounts.google.com/o/oauth2/auth"
+ )
+ existing.token_uri = creds.get(
+ "token_uri", "https://oauth2.googleapis.com/token"
+ )
existing.redirect_uris = creds.get("redirect_uris", [])
existing.javascript_origins = creds.get("javascript_origins", [])
db.commit()
@@ -188,7 +203,9 @@ def get_config(
db: Session = Depends(get_session),
) -> OAuthConfigResponse | None:
"""Get current OAuth configuration (without secrets)."""
- config = db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == "default").first()
+ config = (
+ db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == "default").first()
+ )
if not config:
return None
@@ -208,7 +225,9 @@ def delete_config(
db: Session = Depends(get_session),
):
"""Delete OAuth configuration."""
- config = db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == "default").first()
+ config = (
+ db.query(GoogleOAuthConfig).filter(GoogleOAuthConfig.name == "default").first()
+ )
if not config:
raise HTTPException(status_code=404, detail="Config not found")
@@ -426,11 +445,15 @@ def browse_folder(
else:
# Get folder info for a specific folder
try:
- folder_info = service.files().get(
- fileId=folder_id,
- fields="name, parents",
- supportsAllDrives=True,
- ).execute()
+ folder_info = (
+ service.files()
+ .get(
+ fileId=folder_id,
+ fields="name, parents",
+ supportsAllDrives=True,
+ )
+ .execute()
+ )
folder_name = folder_info.get("name", folder_id)
parents = folder_info.get("parents", [])
parent_id = parents[0] if parents else None
@@ -439,16 +462,20 @@ def browse_folder(
query = f"'{folder_id}' in parents and trashed=false"
try:
- response = service.files().list(
- q=query,
- spaces="drive",
- fields="nextPageToken, files(id, name, mimeType, size, modifiedTime)",
- pageToken=page_token,
- pageSize=page_size,
- orderBy="folder,name", # Folders first, then by name
- includeItemsFromAllDrives=True,
- supportsAllDrives=True,
- ).execute()
+ response = (
+ service.files()
+ .list(
+ q=query,
+ spaces="drive",
+ fields="nextPageToken, files(id, name, mimeType, size, modifiedTime)",
+ pageToken=page_token,
+ pageSize=page_size,
+ orderBy="folder,name", # Folders first, then by name
+ includeItemsFromAllDrives=True,
+ supportsAllDrives=True,
+ )
+ .execute()
+ )
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to list folder: {e}")
@@ -457,25 +484,29 @@ def browse_folder(
# Add "Shared with me" as a virtual folder when at root
if folder_id == "root":
- items.append(DriveItem(
- id="shared",
- name="Shared with me",
- mime_type="application/vnd.google-apps.folder",
- is_folder=True,
- size=None,
- modified_at=None,
- ))
+ items.append(
+ DriveItem(
+ id="shared",
+ name="Shared with me",
+ mime_type="application/vnd.google-apps.folder",
+ is_folder=True,
+ size=None,
+ modified_at=None,
+ )
+ )
for file in response.get("files", []):
is_folder = file["mimeType"] == "application/vnd.google-apps.folder"
- items.append(DriveItem(
- id=file["id"],
- name=file["name"],
- mime_type=file["mimeType"],
- is_folder=is_folder,
- size=file.get("size"),
- modified_at=file.get("modifiedTime"),
- ))
+ items.append(
+ DriveItem(
+ id=file["id"],
+ name=file["name"],
+ mime_type=file["mimeType"],
+ is_folder=is_folder,
+ size=file.get("size"),
+ modified_at=file.get("modifiedTime"),
+ )
+ )
return BrowseResponse(
folder_id=folder_id,
@@ -586,9 +617,7 @@ def update_folder(
include_shared=cast(bool, folder.include_shared),
tags=cast(list[str], folder.tags) or [],
check_interval=cast(int, folder.check_interval),
- last_sync_at=(
- folder.last_sync_at.isoformat() if folder.last_sync_at else None
- ),
+ last_sync_at=(folder.last_sync_at.isoformat() if folder.last_sync_at else None),
active=cast(bool, folder.active),
exclude_folder_ids=cast(list[str], folder.exclude_folder_ids) or [],
)
@@ -629,7 +658,7 @@ def trigger_sync(
db: Session = Depends(get_session),
):
"""Manually trigger a sync for a folder."""
- from memory.workers.tasks.google_drive import sync_google_folder
+ from memory.common.celery_app import app, SYNC_GOOGLE_FOLDER
folder = (
db.query(GoogleFolder)
@@ -643,7 +672,11 @@ def trigger_sync(
if not folder:
raise HTTPException(status_code=404, detail="Folder not found")
- task = sync_google_folder.delay(folder.id, force_full=force_full)
+ task = app.send_task(
+ SYNC_GOOGLE_FOLDER,
+ args=[folder.id],
+ kwargs={"force_full": force_full},
+ )
return {"task_id": task.id, "status": "scheduled"}
diff --git a/src/memory/common/calendar.py b/src/memory/common/calendar.py
new file mode 100644
index 0000000..ce557e9
--- /dev/null
+++ b/src/memory/common/calendar.py
@@ -0,0 +1,168 @@
+"""
+Common calendar utilities for event expansion and querying.
+"""
+
+from datetime import datetime, timedelta, timezone
+from typing import TypedDict
+
+from dateutil.rrule import rrulestr
+from sqlalchemy.orm import Session
+
+from memory.common.db.models import CalendarEvent
+
+
+class EventDict(TypedDict):
+ id: int
+ event_title: str
+ start_time: str
+ end_time: str | None
+ all_day: bool
+ location: str | None
+ calendar_name: str | None
+ recurrence_rule: str | None
+
+
+def expand_recurring_event(
+ event: CalendarEvent,
+ start_range: datetime,
+ end_range: datetime,
+) -> list[tuple[datetime, datetime | None]]:
+ """Expand a recurring event into occurrences within the given range.
+
+ Returns list of (start_time, end_time) tuples for each occurrence.
+ """
+ if not event.recurrence_rule or not event.start_time:
+ return []
+
+ try:
+ rule = rrulestr(
+ f"RRULE:{event.recurrence_rule}",
+ dtstart=event.start_time,
+ )
+
+ duration = None
+ if event.end_time and event.start_time:
+ duration = event.end_time - event.start_time
+
+ occurrences = []
+ for occ_start in rule.between(start_range, end_range, inc=True):
+ occ_end = occ_start + duration if duration else None
+ occurrences.append((occ_start, occ_end))
+
+ return occurrences
+ except Exception:
+ return []
+
+
+def event_to_dict(
+ event: CalendarEvent,
+ start_time: datetime | None = None,
+ end_time: datetime | None = None,
+) -> EventDict:
+ """Convert a CalendarEvent to a dictionary.
+
+ If start_time/end_time are provided, they override the event's times
+ (used for recurring event occurrences).
+ """
+ st = start_time or event.start_time
+ et = end_time or event.end_time
+
+ return EventDict(
+ id=event.id, # type: ignore
+ event_title=event.event_title or "", # type: ignore
+ start_time=st.isoformat() if st else "",
+ end_time=et.isoformat() if et else None,
+ all_day=event.all_day or False, # type: ignore
+ location=event.location, # type: ignore
+ calendar_name=event.calendar_name, # type: ignore
+ recurrence_rule=event.recurrence_rule, # type: ignore
+ )
+
+
+def get_events_in_range(
+ session: Session,
+ start_date: datetime,
+ end_date: datetime,
+ limit: int = 200,
+) -> list[EventDict]:
+ """Get all calendar events (including expanded recurring) in a date range.
+
+ Args:
+ session: Database session
+ start_date: Start of the date range (inclusive)
+ end_date: End of the date range (inclusive)
+ limit: Maximum number of events to return
+
+ Returns:
+ List of event dictionaries, sorted by start_time
+ """
+ # Get non-recurring events in range
+ non_recurring = (
+ session.query(CalendarEvent)
+ .filter(
+ CalendarEvent.start_time >= start_date,
+ CalendarEvent.start_time <= end_date,
+ CalendarEvent.recurrence_rule.is_(None),
+ )
+ .all()
+ )
+
+ # Get all recurring events (they might have occurrences in range)
+ recurring = (
+ session.query(CalendarEvent)
+ .filter(CalendarEvent.recurrence_rule.isnot(None))
+ .all()
+ )
+
+ results: list[tuple[datetime, EventDict]] = []
+
+ # Add non-recurring events
+ for e in non_recurring:
+ if e.start_time:
+ results.append((e.start_time, event_to_dict(e)))
+
+ # Expand recurring events
+ for e in recurring:
+ for occ_start, occ_end in expand_recurring_event(e, start_date, end_date):
+ results.append((occ_start, event_to_dict(e, occ_start, occ_end)))
+
+ # Sort by start time and apply limit
+ results.sort(key=lambda x: x[0])
+ return [r[1] for r in results[:limit]]
+
+
+def parse_date_range(
+ start_date: str | None = None,
+ end_date: str | None = None,
+ days: int = 7,
+) -> tuple[datetime, datetime]:
+ """Parse date range from string inputs.
+
+ Args:
+ start_date: ISO format start date (defaults to now)
+ end_date: ISO format end date (defaults to start + days)
+ days: Number of days if end_date not specified
+
+ Returns:
+ Tuple of (start_datetime, end_datetime)
+
+ Raises:
+ ValueError: If date format is invalid
+ """
+ if start_date:
+ try:
+ range_start = datetime.fromisoformat(start_date.replace("Z", "+00:00"))
+ except ValueError:
+ raise ValueError(f"Invalid start_date format: {start_date}")
+ else:
+ range_start = datetime.now(timezone.utc)
+
+ if end_date:
+ try:
+ range_end = datetime.fromisoformat(end_date.replace("Z", "+00:00"))
+ except ValueError:
+ raise ValueError(f"Invalid end_date format: {end_date}")
+ else:
+ range_end = range_start + timedelta(days=days)
+
+ return range_start, range_end
diff --git a/src/memory/common/celery_app.py b/src/memory/common/celery_app.py
index b08f340..fa14e60 100644
--- a/src/memory/common/celery_app.py
+++ b/src/memory/common/celery_app.py
@@ -19,6 +19,7 @@ GITHUB_ROOT = "memory.workers.tasks.github"
PEOPLE_ROOT = "memory.workers.tasks.people"
PROACTIVE_ROOT = "memory.workers.tasks.proactive"
GOOGLE_ROOT = "memory.workers.tasks.google_drive"
+CALENDAR_ROOT = "memory.workers.tasks.calendar"
ADD_DISCORD_MESSAGE = f"{DISCORD_ROOT}.add_discord_message"
EDIT_DISCORD_MESSAGE = f"{DISCORD_ROOT}.edit_discord_message"
PROCESS_DISCORD_MESSAGE = f"{DISCORD_ROOT}.process_discord_message"
@@ -83,6 +84,11 @@ SYNC_GOOGLE_FOLDER = f"{GOOGLE_ROOT}.sync_google_folder"
SYNC_GOOGLE_DOC = f"{GOOGLE_ROOT}.sync_google_doc"
SYNC_ALL_GOOGLE_ACCOUNTS = f"{GOOGLE_ROOT}.sync_all_google_accounts"
+# Calendar tasks
+SYNC_CALENDAR_ACCOUNT = f"{CALENDAR_ROOT}.sync_calendar_account"
+SYNC_CALENDAR_EVENT = f"{CALENDAR_ROOT}.sync_calendar_event"
+SYNC_ALL_CALENDARS = f"{CALENDAR_ROOT}.sync_all_calendars"
+
def get_broker_url() -> str:
protocol = settings.CELERY_BROKER_TYPE
@@ -142,6 +148,7 @@ app.conf.update(
f"{PEOPLE_ROOT}.*": {"queue": f"{settings.CELERY_QUEUE_PREFIX}-people"},
f"{PROACTIVE_ROOT}.*": {"queue": f"{settings.CELERY_QUEUE_PREFIX}-discord"},
f"{GOOGLE_ROOT}.*": {"queue": f"{settings.CELERY_QUEUE_PREFIX}-google"},
+ f"{CALENDAR_ROOT}.*": {"queue": f"{settings.CELERY_QUEUE_PREFIX}-calendar"},
},
beat_schedule={
"sync-github-repos-hourly": {
@@ -156,6 +163,10 @@ app.conf.update(
"task": SYNC_ALL_GOOGLE_ACCOUNTS,
"schedule": crontab(minute=30), # Every hour at :30
},
+ "sync-calendars-hourly": {
+ "task": SYNC_ALL_CALENDARS,
+ "schedule": crontab(minute=45), # Every hour at :45
+ },
},
)
diff --git a/src/memory/common/db/models/__init__.py b/src/memory/common/db/models/__init__.py
index f9f324c..afcd641 100644
--- a/src/memory/common/db/models/__init__.py
+++ b/src/memory/common/db/models/__init__.py
@@ -23,6 +23,8 @@ from memory.common.db.models.source_items import (
MiscDoc,
Note,
GoogleDoc,
+ Task,
+ CalendarEvent,
MailMessagePayload,
EmailAttachmentPayload,
AgentObservationPayload,
@@ -32,6 +34,8 @@ from memory.common.db.models.source_items import (
NotePayload,
ForumPostPayload,
GoogleDocPayload,
+ TaskPayload,
+ CalendarEventPayload,
)
from memory.common.db.models.discord import (
DiscordServer,
@@ -62,6 +66,7 @@ from memory.common.db.models.sources import (
GoogleOAuthConfig,
GoogleAccount,
GoogleFolder,
+ CalendarAccount,
)
from memory.common.db.models.users import (
User,
@@ -89,6 +94,8 @@ Payload = (
| MailMessagePayload
| PersonPayload
| GoogleDocPayload
+ | TaskPayload
+ | CalendarEventPayload
)
__all__ = [
@@ -114,6 +121,10 @@ __all__ = [
"Note",
"GoogleDoc",
"GoogleDocPayload",
+ "Task",
+ "TaskPayload",
+ "CalendarEvent",
+ "CalendarEventPayload",
# Observations
"ObservationContradiction",
"ReactionPattern",
@@ -123,6 +134,10 @@ __all__ = [
# People
"Person",
"PersonPayload",
+ # Calendar
+ "CalendarAccount",
+ "CalendarEvent",
+ "CalendarEventPayload",
# Sources
"Book",
"ArticleFeed",
@@ -132,6 +147,9 @@ __all__ = [
"GoogleOAuthConfig",
"GoogleAccount",
"GoogleFolder",
+ "CalendarAccount",
+ "CalendarEvent",
+ "CalendarEventPayload",
"DiscordServer",
"DiscordChannel",
"DiscordUser",
diff --git a/src/memory/common/db/models/source_items.py b/src/memory/common/db/models/source_items.py
index 98370f3..ae3d404 100644
--- a/src/memory/common/db/models/source_items.py
+++ b/src/memory/common/db/models/source_items.py
@@ -1302,3 +1302,226 @@ class GoogleDoc(SourceItem):
@classmethod
def get_collections(cls) -> list[str]:
return ["doc"]
+
+
+class TaskPayload(SourceItemPayload):
+ title: Annotated[str, "Title of the task"]
+ due_date: Annotated[str | None, "Due date in ISO format"]
+ priority: Annotated[str | None, "Priority level: low, medium, high, urgent"]
+ status: Annotated[str, "Status: pending, in_progress, done, cancelled"]
+ recurrence: Annotated[str | None, "Recurrence rule (RRULE format)"]
+ source_item_id: Annotated[int | None, "Source item that spawned this task"]
+
+
+class Task(SourceItem):
+ """Explicit task/todo item."""
+
+ __tablename__ = "task"
+
+ id = Column(
+ BigInteger, ForeignKey("source_item.id", ondelete="CASCADE"), primary_key=True
+ )
+
+ task_title = Column(Text, nullable=False)
+ due_date = Column(DateTime(timezone=True), nullable=True)
+ priority = Column(Text, nullable=True) # low, medium, high, urgent
+ status = Column(Text, nullable=False, server_default="pending")
+ recurrence = Column(Text, nullable=True) # RRULE format for habits
+ completed_at = Column(DateTime(timezone=True), nullable=True)
+
+ # Link to source that spawned this task (email, note, etc.)
+ source_item_id = Column(
+ BigInteger, ForeignKey("source_item.id", ondelete="SET NULL"), nullable=True
+ )
+ source_item = relationship(
+ "SourceItem", foreign_keys=[source_item_id], backref="spawned_tasks"
+ )
+
+ __mapper_args__ = {
+ "polymorphic_identity": "task",
+ "inherit_condition": id == SourceItem.id,
+ }
+
+ __table_args__ = (
+ CheckConstraint(
+ "status IN ('pending', 'in_progress', 'done', 'cancelled')",
+ name="task_status_check",
+ ),
+ CheckConstraint(
+ "priority IS NULL OR priority IN ('low', 'medium', 'high', 'urgent')",
+ name="task_priority_check",
+ ),
+ Index("task_due_date_idx", "due_date"),
+ Index("task_status_idx", "status"),
+ Index("task_priority_idx", "priority"),
+ Index("task_source_item_idx", "source_item_id"),
+ )
+
+ def __init__(self, **kwargs):
+ if not kwargs.get("modality"):
+ kwargs["modality"] = "task"
+ super().__init__(**kwargs)
+
+ def as_payload(self) -> TaskPayload:
+ return TaskPayload(
+ **super().as_payload(),
+ title=cast(str, self.task_title),
+ due_date=(self.due_date and self.due_date.isoformat() or None),
+ priority=cast(str | None, self.priority),
+ status=cast(str, self.status),
+ recurrence=cast(str | None, self.recurrence),
+ source_item_id=cast(int | None, self.source_item_id),
+ )
+
+ @property
+ def display_contents(self) -> dict:
+ return {
+ "title": self.task_title,
+ "description": self.content,
+ "due_date": self.due_date and self.due_date.isoformat(),
+ "priority": self.priority,
+ "status": self.status,
+ "recurrence": self.recurrence,
+ "tags": self.tags,
+ }
+
+ def _chunk_contents(self) -> Sequence[extract.DataChunk]:
+ parts = [cast(str, self.task_title)]
+ if self.content:
+ parts.append(cast(str, self.content))
+ if self.due_date:
+ parts.append(f"Due: {self.due_date.isoformat()}")
+ text = "\n\n".join(parts)
+ return extract.extract_text(text, modality="task")
+
+ @classmethod
+ def get_collections(cls) -> list[str]:
+ return ["task"]
+
+ @property
+ def title(self) -> str | None:
+ return cast(str | None, self.task_title)
+
+
+class CalendarEventPayload(SourceItemPayload):
+ event_title: Annotated[str, "Title of the event"]
+ start_time: Annotated[str, "Start time in ISO format"]
+ end_time: Annotated[str | None, "End time in ISO format"]
+ all_day: Annotated[bool, "Whether this is an all-day event"]
+ location: Annotated[str | None, "Event location"]
+ recurrence_rule: Annotated[str | None, "Recurrence rule (RRULE format)"]
+ calendar_account_id: Annotated[int | None, "Calendar account this event belongs to"]
+ calendar_name: Annotated[str | None, "Name of the calendar"]
+ external_id: Annotated[str | None, "External calendar ID for sync"]
+ event_metadata: Annotated[dict | None, "Additional metadata (attendees, links, etc.)"]
+
+
+class CalendarEvent(SourceItem):
+ """Calendar event from external calendar sources (CalDAV, Google, etc.)."""
+
+ __tablename__ = "calendar_event"
+
+ id = Column(
+ BigInteger, ForeignKey("source_item.id", ondelete="CASCADE"), primary_key=True
+ )
+
+ # Core event fields
+ event_title = Column(Text, nullable=False)
+ start_time = Column(DateTime(timezone=True), nullable=False)
+ end_time = Column(DateTime(timezone=True), nullable=True)
+ all_day = Column(Boolean, default=False, nullable=False)
+ location = Column(Text, nullable=True)
+ recurrence_rule = Column(Text, nullable=True) # RRULE format
+
+ # Sync metadata
+ calendar_account_id = Column(
+ BigInteger, ForeignKey("calendar_accounts.id", ondelete="SET NULL"), nullable=True
+ )
+ calendar_name = Column(Text, nullable=True)
+ external_id = Column(Text, nullable=True) # For dedup/sync
+
+ # Relationship
+ calendar_account = relationship("CalendarAccount", foreign_keys=[calendar_account_id])
+
+ # Flexible metadata (attendees, meeting links, conference info, etc.)
+ event_metadata = Column(JSONB, default=dict)
+
+ __mapper_args__ = {
+ "polymorphic_identity": "calendar_event",
+ }
+
+ __table_args__ = (
+ Index("calendar_event_start_idx", "start_time"),
+ Index("calendar_event_end_idx", "end_time"),
+ Index("calendar_event_account_idx", "calendar_account_id"),
+ Index("calendar_event_calendar_idx", "calendar_name"),
+ Index(
+ "calendar_event_external_idx",
+ "calendar_account_id",
+ "external_id",
+ unique=True,
+ postgresql_where="external_id IS NOT NULL",
+ ),
+ )
+
+ def __init__(self, **kwargs):
+ if not kwargs.get("modality"):
+ kwargs["modality"] = "calendar"
+ super().__init__(**kwargs)
+
+ def as_payload(self) -> CalendarEventPayload:
+ return CalendarEventPayload(
+ **super().as_payload(),
+ event_title=cast(str, self.event_title),
+ start_time=cast(datetime, self.start_time).isoformat(),
+ end_time=(self.end_time and self.end_time.isoformat() or None),
+ all_day=cast(bool, self.all_day),
+ location=cast(str | None, self.location),
+ recurrence_rule=cast(str | None, self.recurrence_rule),
+ calendar_account_id=cast(int | None, self.calendar_account_id),
+ calendar_name=cast(str | None, self.calendar_name),
+ external_id=cast(str | None, self.external_id),
+ event_metadata=cast(dict | None, self.event_metadata),
+ )
+
+ @property
+ def display_contents(self) -> dict:
+ return {
+ "title": self.event_title,
+ "description": self.content,
+ "start_time": cast(datetime, self.start_time).isoformat(),
+ "end_time": self.end_time and self.end_time.isoformat(),
+ "all_day": self.all_day,
+ "location": self.location,
+ "calendar": self.calendar_name,
+ "attendees": (self.event_metadata or {}).get("attendees"),
+ "tags": self.tags,
+ }
+
+ def _chunk_contents(self) -> Sequence[extract.DataChunk]:
+ parts = [cast(str, self.event_title)]
+
+ if self.content:
+ parts.append(cast(str, self.content))
+
+ if self.location:
+ parts.append(f"Location: {self.location}")
+
+ metadata = cast(dict | None, self.event_metadata) or {}
+ if attendees := metadata.get("attendees"):
+ if isinstance(attendees, list):
+ parts.append(f"Attendees: {', '.join(str(a) for a in attendees)}")
+
+ if meeting_link := metadata.get("meeting_link"):
+ parts.append(f"Meeting link: {meeting_link}")
+
+ text = "\n\n".join(parts)
+ return extract.extract_text(text, modality="calendar")
+
+ @classmethod
+ def get_collections(cls) -> list[str]:
+ return ["calendar"]
+
+ @property
+ def title(self) -> str | None:
+ return cast(str | None, self.event_title)
diff --git a/src/memory/common/db/models/sources.py b/src/memory/common/db/models/sources.py
index 096179b..c97b8b9 100644
--- a/src/memory/common/db/models/sources.py
+++ b/src/memory/common/db/models/sources.py
@@ -376,3 +376,56 @@ class GoogleFolder(Base):
UniqueConstraint("account_id", "folder_id", name="unique_folder_per_account"),
Index("google_folders_active_idx", "active", "last_sync_at"),
)
+
+
+class CalendarAccount(Base):
+ """Calendar source for syncing events (CalDAV, Google Calendar, etc.)."""
+
+ __tablename__ = "calendar_accounts"
+
+ id = Column(BigInteger, primary_key=True)
+ name = Column(Text, nullable=False) # Display name
+
+ # Calendar type
+ calendar_type = Column(Text, nullable=False) # 'caldav', 'google'
+
+ # For CalDAV (Radicale, etc.)
+ caldav_url = Column(Text, nullable=True) # CalDAV server URL
+ caldav_username = Column(Text, nullable=True)
+ caldav_password = Column(Text, nullable=True)
+
+ # For Google Calendar - link to existing GoogleAccount
+ google_account_id = Column(
+ BigInteger, ForeignKey("google_accounts.id", ondelete="SET NULL"), nullable=True
+ )
+
+ # Which calendars to sync (empty = all)
+ calendar_ids = Column(ARRAY(Text), nullable=False, server_default="{}")
+
+ # Tags to apply to all events from this account
+ tags = Column(ARRAY(Text), nullable=False, server_default="{}")
+
+ # Sync configuration
+ check_interval = Column(Integer, nullable=False, server_default="15") # Minutes
+ sync_past_days = Column(Integer, nullable=False, server_default="30") # How far back
+ sync_future_days = Column(Integer, nullable=False, server_default="90") # How far ahead
+ last_sync_at = Column(DateTime(timezone=True), nullable=True)
+ sync_error = Column(Text, nullable=True)
+
+ # Status
+ active = Column(Boolean, nullable=False, server_default="true")
+ created_at = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now()
+ )
+ updated_at = Column(
+ DateTime(timezone=True), nullable=False, server_default=func.now()
+ )
+
+ # Relationships
+ google_account = relationship("GoogleAccount", foreign_keys=[google_account_id])
+
+ __table_args__ = (
+ CheckConstraint("calendar_type IN ('caldav', 'google')"),
+ Index("calendar_accounts_active_idx", "active", "last_sync_at"),
+ Index("calendar_accounts_type_idx", "calendar_type"),
+ )
diff --git a/src/memory/common/settings.py b/src/memory/common/settings.py
index e85d992..1e62653 100644
--- a/src/memory/common/settings.py
+++ b/src/memory/common/settings.py
@@ -38,7 +38,7 @@ DB_URL = os.getenv("DATABASE_URL", make_db_url())
REDIS_HOST = os.getenv("REDIS_HOST", "redis")
REDIS_PORT = os.getenv("REDIS_PORT", "6379")
REDIS_DB = os.getenv("REDIS_DB", "0")
-REDIS_PASSWORD = os.getenv("REDIS_PASSWORD", None)
+REDIS_PASSWORD = os.getenv("REDIS_PASSWORD") or None # Treat empty string as None
if REDIS_PASSWORD:
REDIS_URL = f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}"
else:
@@ -180,7 +180,9 @@ ENABLE_BM25_SEARCH = boolean_env("ENABLE_BM25_SEARCH", True)
ENABLE_SEARCH_SCORING = boolean_env("ENABLE_SEARCH_SCORING", True)
ENABLE_HYDE_EXPANSION = boolean_env("ENABLE_HYDE_EXPANSION", True)
HYDE_TIMEOUT = float(os.getenv("HYDE_TIMEOUT", "3.0"))
-ENABLE_QUERY_ANALYSIS = boolean_env("ENABLE_QUERY_ANALYSIS", True) # Runs in parallel with HyDE
+ENABLE_QUERY_ANALYSIS = boolean_env(
+ "ENABLE_QUERY_ANALYSIS", True
+) # Runs in parallel with HyDE
ENABLE_RERANKING = boolean_env("ENABLE_RERANKING", True)
RERANK_MODEL = os.getenv("RERANK_MODEL", "rerank-2-lite")
MAX_PREVIEW_LENGTH = int(os.getenv("MAX_PREVIEW_LENGTH", DEFAULT_CHUNK_TOKENS * 16))
@@ -248,7 +250,9 @@ S3_BACKUP_INTERVAL = int(
# Google OAuth settings
GOOGLE_CLIENT_ID = os.getenv("GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.getenv("GOOGLE_CLIENT_SECRET", "")
-GOOGLE_REDIRECT_URI = os.getenv("GOOGLE_REDIRECT_URI", f"{SERVER_URL}/auth/callback/google")
+GOOGLE_REDIRECT_URI = os.getenv(
+ "GOOGLE_REDIRECT_URI", f"{SERVER_URL}/auth/callback/google"
+)
GOOGLE_SCOPES = [
"https://www.googleapis.com/auth/drive.readonly",
"https://www.googleapis.com/auth/userinfo.email",
diff --git a/src/memory/workers/tasks/__init__.py b/src/memory/workers/tasks/__init__.py
index 7e9cd0f..d1ca430 100644
--- a/src/memory/workers/tasks/__init__.py
+++ b/src/memory/workers/tasks/__init__.py
@@ -5,6 +5,7 @@ Import sub-modules so Celery can register their @app.task decorators.
from memory.workers.tasks import (
backup,
blogs,
+ calendar,
comic,
discord,
ebook,
@@ -23,6 +24,7 @@ from memory.workers.tasks import (
__all__ = [
"backup",
"blogs",
+ "calendar",
"comic",
"discord",
"ebook",
diff --git a/src/memory/workers/tasks/calendar.py b/src/memory/workers/tasks/calendar.py
new file mode 100644
index 0000000..3238bf3
--- /dev/null
+++ b/src/memory/workers/tasks/calendar.py
@@ -0,0 +1,520 @@
+"""Celery tasks for calendar syncing (CalDAV, Google Calendar)."""
+
+import hashlib
+import logging
+from datetime import datetime, timedelta, timezone
+from typing import Any, TypedDict, cast
+
+import caldav
+from sqlalchemy.orm import Session
+
+from memory.common.celery_app import (
+ SYNC_ALL_CALENDARS,
+ SYNC_CALENDAR_ACCOUNT,
+ SYNC_CALENDAR_EVENT,
+ app,
+)
+from memory.common.db.connection import make_session
+from memory.common.db.models import CalendarEvent
+from memory.common.db.models.sources import CalendarAccount
+from memory.parsers.google_drive import refresh_credentials
+from memory.workers.tasks.content_processing import (
+ create_task_result,
+ process_content_item,
+ safe_task_execution,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class EventData(TypedDict, total=False):
+ """Structured event data for calendar sync.
+
+ Required fields: title, start_time
+ """
+
+ title: str # Required
+ start_time: datetime # Required
+ end_time: datetime | None
+ all_day: bool
+ description: str
+ location: str | None
+ external_id: str | None
+ calendar_name: str
+ recurrence_rule: str | None
+ attendees: list[str]
+ meeting_link: str | None
+
+
+# -----------------------------------------------------------------------------
+# Helper functions
+# -----------------------------------------------------------------------------
+
+
+def _get_ical_component(event: Any) -> Any:
+ """Get the VEVENT component from a caldav event using icalendar."""
+ ical = event.icalendar_instance
+ for component in ical.walk():
+ if component.name == "VEVENT":
+ return component
+ return None
+
+
+def _get_vevent_attr(vevent: Any, attr: str, default: Any = None) -> Any:
+ """Safely get an attribute from an icalendar VEVENT component.
+
+ For rrule attributes, converts the rrule object to its string representation.
+ """
+ component = _get_ical_component(vevent)
+ if component is None:
+ return default
+
+ value = component.get(attr)
+ if value is None:
+ return default
+
+ # For date/datetime properties, extract the actual value
+ if hasattr(value, "dt"):
+ return value.dt
+
+ # rrule is a special case - it's an object that needs string conversion
+ if attr == "rrule" and value is not None:
+ return str(value.to_ical().decode("utf-8"))
+
+ return value
+
+
+def _create_event_hash(event_data: EventData) -> bytes:
+ """Create a hash for deduplication based on event content."""
+ content = (
+ f"{event_data.get('title', '')}"
+ f"{event_data.get('start_time', '')}"
+ f"{event_data.get('description', '')}"
+ )
+ return hashlib.sha256(content.encode()).digest()
+
+
+def _serialize_event_data(event_data: EventData) -> dict[str, Any]:
+ """Serialize event data for Celery task passing (datetime -> ISO string)."""
+ serialized: dict[str, Any] = dict(event_data)
+ if isinstance(serialized.get("start_time"), datetime):
+ serialized["start_time"] = serialized["start_time"].isoformat()
+ if isinstance(serialized.get("end_time"), datetime):
+ serialized["end_time"] = serialized["end_time"].isoformat()
+ return serialized
+
+
+def _deserialize_event_data(data: dict[str, Any]) -> EventData:
+ """Deserialize event data from Celery task (ISO string -> datetime)."""
+ result = dict(data)
+ if isinstance(result.get("start_time"), str):
+ result["start_time"] = datetime.fromisoformat(result["start_time"])
+ if isinstance(result.get("end_time"), str):
+ result["end_time"] = datetime.fromisoformat(result["end_time"])
+ return cast(EventData, result)
+
+
+def _ensure_timezone(dt: datetime | None) -> datetime | None:
+ """Ensure datetime has timezone info, defaulting to UTC."""
+ if dt is None:
+ return None
+ return dt if dt.tzinfo else dt.replace(tzinfo=timezone.utc)
+
+
+def _create_calendar_event(account: CalendarAccount, event_data: EventData) -> CalendarEvent:
+ """Create a CalendarEvent model from parsed event data."""
+ account_tags = cast(list[str], account.tags) or []
+
+ metadata: dict[str, Any] = {}
+ if event_data.get("attendees"):
+ metadata["attendees"] = event_data["attendees"]
+ if event_data.get("meeting_link"):
+ metadata["meeting_link"] = event_data["meeting_link"]
+
+ return CalendarEvent(
+ modality="calendar",
+ sha256=_create_event_hash(event_data),
+ content=event_data.get("description", ""),
+ event_title=event_data["title"],
+ start_time=event_data["start_time"],
+ end_time=event_data.get("end_time"),
+ all_day=event_data.get("all_day", False),
+ location=event_data.get("location"),
+ recurrence_rule=event_data.get("recurrence_rule"),
+ calendar_account_id=account.id,
+ calendar_name=event_data.get("calendar_name"),
+ external_id=event_data.get("external_id"),
+ event_metadata=metadata,
+ tags=account_tags,
+ )
+
+
+def _update_existing_event(existing: CalendarEvent, event_data: EventData) -> None:
+ """Update an existing CalendarEvent with new data."""
+ existing.event_title = event_data["title"]
+ existing.start_time = event_data["start_time"]
+ existing.end_time = event_data.get("end_time")
+ existing.all_day = event_data.get("all_day", False)
+ existing.location = event_data.get("location")
+ existing.content = event_data.get("description", "")
+ existing.recurrence_rule = event_data.get("recurrence_rule")
+
+ metadata = existing.event_metadata or {}
+ if event_data.get("attendees"):
+ metadata["attendees"] = event_data["attendees"]
+ if event_data.get("meeting_link"):
+ metadata["meeting_link"] = event_data["meeting_link"]
+ existing.event_metadata = metadata
+
+
+# -----------------------------------------------------------------------------
+# CalDAV parsing
+# -----------------------------------------------------------------------------
+
+
+def _parse_caldav_event(vevent: Any, calendar_name: str) -> EventData:
+ """Parse a CalDAV VEVENT into EventData format."""
+ summary = _get_vevent_attr(vevent, "summary", "Untitled Event")
+ dtstart = _get_vevent_attr(vevent, "dtstart")
+ dtend = _get_vevent_attr(vevent, "dtend")
+
+ if dtstart is None:
+ raise ValueError("Calendar event missing required start time (dtstart)")
+
+ # All-day events use date objects, timed events use datetime
+ all_day = not hasattr(dtstart, "hour")
+
+ if all_day:
+ start_time = datetime.combine(dtstart, datetime.min.time()).replace(tzinfo=timezone.utc)
+ end_time = (
+ datetime.combine(dtend, datetime.min.time()).replace(tzinfo=timezone.utc)
+ if dtend
+ else None
+ )
+ else:
+ start_time = dtstart if dtstart.tzinfo else dtstart.replace(tzinfo=timezone.utc)
+ end_time = _ensure_timezone(dtend)
+
+ # Parse attendees
+ attendees: list[str] = []
+ raw_attendees = _get_vevent_attr(vevent, "attendee")
+ if raw_attendees:
+ attendee_list = raw_attendees if isinstance(raw_attendees, list) else [raw_attendees]
+ attendees = [str(a).replace("mailto:", "") for a in attendee_list]
+
+ return EventData(
+ title=str(summary),
+ start_time=start_time,
+ end_time=end_time,
+ all_day=all_day,
+ description=str(_get_vevent_attr(vevent, "description", "")),
+ location=_get_vevent_attr(vevent, "location"),
+ external_id=_get_vevent_attr(vevent, "uid"),
+ calendar_name=calendar_name,
+ recurrence_rule=_get_vevent_attr(vevent, "rrule"),
+ attendees=attendees,
+ )
+
+
+def _fetch_caldav_events(
+ url: str,
+ username: str,
+ password: str,
+ calendar_ids: list[str],
+ since: datetime,
+ until: datetime,
+) -> list[EventData]:
+ """Fetch events from a CalDAV server.
+
+ Fetches ALL events (not date-filtered) to preserve recurring events with RRULE.
+ Recurring events are expanded at query time, not sync time.
+ """
+ client = caldav.DAVClient(url=url, username=username, password=password)
+ principal = client.principal()
+ events: list[EventData] = []
+
+ for calendar in principal.calendars():
+ calendar_name = calendar.name or "Unknown"
+
+ if calendar_ids and calendar.id not in calendar_ids:
+ continue
+
+ try:
+ # Fetch ALL events to get recurring events with RRULE intact
+ # We expand recurring events at query time, not sync time
+ vevents = calendar.events()
+ for vevent in vevents:
+ try:
+ events.append(_parse_caldav_event(vevent, calendar_name))
+ except Exception as e:
+ logger.error(f"Error parsing CalDAV event from {calendar_name}: {e}")
+ except Exception as e:
+ logger.error(f"Error fetching events from calendar {calendar_name}: {e}")
+
+ return events
+
+
+# -----------------------------------------------------------------------------
+# Google Calendar parsing
+# -----------------------------------------------------------------------------
+
+
+def _parse_google_event(event: dict[str, Any], calendar_name: str) -> EventData:
+ """Parse a Google Calendar event into EventData format."""
+ start = event.get("start", {})
+ end = event.get("end", {})
+ all_day = "date" in start
+
+ if all_day:
+ start_time = datetime.fromisoformat(start["date"]).replace(tzinfo=timezone.utc)
+ end_time = (
+ datetime.fromisoformat(end["date"]).replace(tzinfo=timezone.utc)
+ if end.get("date")
+ else None
+ )
+ else:
+ start_time = datetime.fromisoformat(start["dateTime"].replace("Z", "+00:00"))
+ end_time = (
+ datetime.fromisoformat(end["dateTime"].replace("Z", "+00:00"))
+ if end.get("dateTime")
+ else None
+ )
+
+ # Extract attendee emails
+ attendees = [a["email"] for a in event.get("attendees", []) if a.get("email")]
+
+ # Extract meeting link from hangoutLink or conferenceData
+ meeting_link = event.get("hangoutLink")
+ if not meeting_link and "conferenceData" in event:
+ for ep in event["conferenceData"].get("entryPoints", []):
+ if ep.get("entryPointType") == "video":
+ meeting_link = ep.get("uri")
+ break
+
+ # Extract recurrence rule (first one if multiple)
+ recurrence = event.get("recurrence", [])
+ recurrence_rule = recurrence[0] if recurrence else None
+
+ return EventData(
+ title=event.get("summary", "Untitled Event"),
+ start_time=start_time,
+ end_time=end_time,
+ all_day=all_day,
+ description=event.get("description", ""),
+ location=event.get("location"),
+ external_id=event.get("id"),
+ calendar_name=calendar_name,
+ recurrence_rule=recurrence_rule,
+ attendees=attendees,
+ meeting_link=meeting_link,
+ )
+
+
+def _fetch_google_calendar_events(
+ account: CalendarAccount,
+ calendar_ids: list[str],
+ since: datetime,
+ until: datetime,
+ session: Session,
+) -> list[EventData]:
+ """Fetch events from Google Calendar using existing GoogleAccount."""
+ google_account = account.google_account
+ if not google_account:
+ raise ValueError("Google Calendar account requires linked GoogleAccount")
+
+ credentials = refresh_credentials(google_account, session)
+
+ try:
+ from googleapiclient.discovery import build
+ except ImportError as e:
+ raise ImportError("google-api-python-client not installed") from e
+
+ service = build("calendar", "v3", credentials=credentials)
+ events: list[EventData] = []
+
+ time_min = since.isoformat()
+ time_max = until.isoformat()
+
+ # Determine which calendars to sync
+ calendars_to_sync = calendar_ids
+ if not calendars_to_sync:
+ try:
+ calendar_list = service.calendarList().list().execute()
+ calendars_to_sync = [cal["id"] for cal in calendar_list.get("items", [])]
+ except Exception as e:
+ logger.error(f"Error fetching calendar list, falling back to primary: {e}")
+ calendars_to_sync = ["primary"]
+
+ for calendar_id in calendars_to_sync:
+ try:
+ # Get calendar display name
+ try:
+ cal_info = service.calendars().get(calendarId=calendar_id).execute()
+ calendar_name = cal_info.get("summary", calendar_id)
+ except Exception:
+ calendar_name = calendar_id
+
+ events_result = (
+ service.events()
+ .list(
+ calendarId=calendar_id,
+ timeMin=time_min,
+ timeMax=time_max,
+ singleEvents=True,
+ orderBy="startTime",
+ )
+ .execute()
+ )
+
+ for event in events_result.get("items", []):
+ try:
+ events.append(_parse_google_event(event, calendar_name))
+ except Exception as e:
+ logger.error(f"Error parsing Google event: {e}")
+
+ except Exception as e:
+ logger.error(f"Error fetching events from calendar {calendar_id}: {e}")
+
+ return events
+
+
+# -----------------------------------------------------------------------------
+# Celery tasks
+# -----------------------------------------------------------------------------
+
+
+@app.task(name=SYNC_CALENDAR_EVENT)
+@safe_task_execution
+def sync_calendar_event(account_id: int, event_data_raw: dict[str, Any]) -> dict[str, Any]:
+ """Sync a single calendar event."""
+ event_data = _deserialize_event_data(event_data_raw)
+ logger.info(f"Syncing calendar event: {event_data.get('title')}")
+
+ with make_session() as session:
+ account = session.get(CalendarAccount, account_id)
+ if not account:
+ return {"status": "error", "error": "Account not found"}
+
+ # Check for existing event by external_id
+ external_id = event_data.get("external_id")
+ existing = None
+ if external_id:
+ existing = (
+ session.query(CalendarEvent)
+ .filter(
+ CalendarEvent.calendar_account_id == account_id,
+ CalendarEvent.external_id == external_id,
+ )
+ .first()
+ )
+
+ if existing:
+ _update_existing_event(existing, event_data)
+ session.commit()
+ return create_task_result(existing, "updated")
+
+ calendar_event = _create_calendar_event(account, event_data)
+ return process_content_item(calendar_event, session)
+
+
+@app.task(name=SYNC_CALENDAR_ACCOUNT)
+@safe_task_execution
+def sync_calendar_account(account_id: int, force_full: bool = False) -> dict[str, Any]:
+ """Sync all events from a calendar account."""
+ logger.info(f"Syncing calendar account {account_id}")
+
+ with make_session() as session:
+ account = session.get(CalendarAccount, account_id)
+ if not account or not cast(bool, account.active):
+ return {"status": "error", "error": "Account not found or inactive"}
+
+ now = datetime.now(timezone.utc)
+ last_sync = cast(datetime | None, account.last_sync_at)
+
+ # Skip if recently synced (unless force_full)
+ if last_sync and not force_full:
+ check_interval = cast(int, account.check_interval)
+ if now - last_sync < timedelta(minutes=check_interval):
+ return {"status": "skipped_recent_check", "account_id": account_id}
+
+ # Calculate sync window
+ sync_past = cast(int, account.sync_past_days)
+ sync_future = cast(int, account.sync_future_days)
+ since = now - timedelta(days=sync_past)
+ until = now + timedelta(days=sync_future)
+
+ calendar_type = cast(str, account.calendar_type)
+ calendar_ids = cast(list[str], account.calendar_ids) or []
+
+ try:
+ if calendar_type == "caldav":
+ caldav_url = cast(str, account.caldav_url)
+ caldav_username = cast(str, account.caldav_username)
+ caldav_password = cast(str, account.caldav_password)
+
+ if not all([caldav_url, caldav_username, caldav_password]):
+ return {"status": "error", "error": "CalDAV credentials incomplete"}
+
+ events = _fetch_caldav_events(
+ caldav_url, caldav_username, caldav_password, calendar_ids, since, until
+ )
+ elif calendar_type == "google":
+ events = _fetch_google_calendar_events(
+ account, calendar_ids, since, until, session
+ )
+ else:
+ return {"status": "error", "error": f"Unknown calendar type: {calendar_type}"}
+
+ # Queue sync tasks for each event
+ task_ids = []
+ for event_data in events:
+ try:
+ serialized = _serialize_event_data(event_data)
+ task = sync_calendar_event.delay(account.id, serialized)
+ task_ids.append(task.id)
+ except Exception as e:
+ logger.error(f"Error queuing event {event_data.get('title')}: {e}")
+
+ account.last_sync_at = now
+ account.sync_error = None
+ session.commit()
+
+ except Exception as e:
+ account.sync_error = str(e)
+ session.commit()
+ raise
+
+ return {
+ "status": "completed",
+ "sync_type": "full" if force_full else "incremental",
+ "account_id": account_id,
+ "account_name": account.name,
+ "calendar_type": calendar_type,
+ "events_synced": len(task_ids),
+ "task_ids": task_ids,
+ }
+
+
+@app.task(name=SYNC_ALL_CALENDARS)
+def sync_all_calendars(force_full: bool = False) -> list[dict[str, Any]]:
+ """Trigger sync for all active calendar accounts."""
+ with make_session() as session:
+ active_accounts = (
+ session.query(CalendarAccount).filter(CalendarAccount.active).all()
+ )
+
+ results = [
+ {
+ "account_id": account.id,
+ "account_name": account.name,
+ "calendar_type": account.calendar_type,
+ "task_id": sync_calendar_account.delay(account.id, force_full=force_full).id,
+ }
+ for account in active_accounts
+ ]
+
+ logger.info(
+ f"Scheduled {'full' if force_full else 'incremental'} sync "
+ f"for {len(results)} active calendar accounts"
+ )
+ return results
diff --git a/tests/memory/common/test_calendar.py b/tests/memory/common/test_calendar.py
new file mode 100644
index 0000000..89163a2
--- /dev/null
+++ b/tests/memory/common/test_calendar.py
@@ -0,0 +1,442 @@
+"""Tests for common calendar utilities."""
+
+import pytest
+from datetime import datetime, timedelta, timezone
+
+from memory.common.calendar import (
+ expand_recurring_event,
+ event_to_dict,
+ get_events_in_range,
+ parse_date_range,
+ EventDict,
+)
+from memory.common.db.models import CalendarEvent
+from memory.common.db.models.sources import CalendarAccount
+
+
+@pytest.fixture
+def calendar_account(db_session) -> CalendarAccount:
+ """Create a calendar account for testing."""
+ account = CalendarAccount(
+ name="Test Calendar",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="testuser",
+ caldav_password="testpass",
+ active=True,
+ )
+ db_session.add(account)
+ db_session.commit()
+ return account
+
+
+@pytest.fixture
+def simple_event(db_session, calendar_account) -> CalendarEvent:
+ """Create a simple non-recurring event."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"0" * 32,
+ event_title="Team Meeting",
+ start_time=datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ end_time=datetime(2024, 1, 15, 11, 0, 0, tzinfo=timezone.utc),
+ all_day=False,
+ location="Conference Room A",
+ calendar_name="Work",
+ calendar_account_id=calendar_account.id,
+ recurrence_rule=None,
+ )
+ db_session.add(event)
+ db_session.commit()
+ return event
+
+
+@pytest.fixture
+def all_day_event(db_session, calendar_account) -> CalendarEvent:
+ """Create an all-day event."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"1" * 32,
+ event_title="Holiday",
+ start_time=datetime(2024, 1, 20, 0, 0, 0, tzinfo=timezone.utc),
+ end_time=datetime(2024, 1, 21, 0, 0, 0, tzinfo=timezone.utc),
+ all_day=True,
+ location=None,
+ calendar_name="Holidays",
+ calendar_account_id=calendar_account.id,
+ recurrence_rule=None,
+ )
+ db_session.add(event)
+ db_session.commit()
+ return event
+
+
+@pytest.fixture
+def recurring_event(db_session, calendar_account) -> CalendarEvent:
+ """Create a recurring event (daily on weekdays)."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"2" * 32,
+ event_title="Daily Standup",
+ start_time=datetime(2024, 1, 1, 9, 0, 0, tzinfo=timezone.utc),
+ end_time=datetime(2024, 1, 1, 9, 15, 0, tzinfo=timezone.utc),
+ all_day=False,
+ location="Zoom",
+ calendar_name="Work",
+ calendar_account_id=calendar_account.id,
+ recurrence_rule="FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR",
+ )
+ db_session.add(event)
+ db_session.commit()
+ return event
+
+
+@pytest.fixture
+def weekly_recurring_event(db_session, calendar_account) -> CalendarEvent:
+ """Create a weekly recurring event."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"3" * 32,
+ event_title="Weekly Review",
+ start_time=datetime(2024, 1, 5, 14, 0, 0, tzinfo=timezone.utc), # Friday
+ end_time=datetime(2024, 1, 5, 15, 0, 0, tzinfo=timezone.utc),
+ all_day=False,
+ location=None,
+ calendar_name="Work",
+ calendar_account_id=calendar_account.id,
+ recurrence_rule="FREQ=WEEKLY;BYDAY=FR",
+ )
+ db_session.add(event)
+ db_session.commit()
+ return event
+
+
+# =============================================================================
+# Tests for parse_date_range
+# =============================================================================
+
+
+def test_parse_date_range_with_both_dates():
+ """Test parsing with both start and end date provided."""
+ start, end = parse_date_range("2024-01-15", "2024-01-20")
+
+ assert start.year == 2024
+ assert start.month == 1
+ assert start.day == 15
+ assert end.day == 20
+
+
+def test_parse_date_range_with_iso_format():
+ """Test parsing with full ISO format."""
+ start, end = parse_date_range(
+ "2024-01-15T10:00:00Z",
+ "2024-01-20T18:00:00Z"
+ )
+
+ assert start.hour == 10
+ assert end.hour == 18
+
+
+def test_parse_date_range_with_timezone():
+ """Test parsing with timezone offset."""
+ start, end = parse_date_range(
+ "2024-01-15T10:00:00+00:00",
+ "2024-01-20T18:00:00+00:00"
+ )
+
+ assert start.tzinfo is not None
+ assert end.tzinfo is not None
+
+
+def test_parse_date_range_defaults_to_now():
+ """Test that start defaults to now when not provided."""
+ before = datetime.now(timezone.utc)
+ start, end = parse_date_range(None, None, days=7)
+ after = datetime.now(timezone.utc)
+
+ assert before <= start <= after
+ assert end > start
+
+
+def test_parse_date_range_uses_days():
+ """Test that days parameter is used for end date."""
+ start, end = parse_date_range("2024-01-15", None, days=10)
+
+ assert start.day == 15
+ expected_end = start + timedelta(days=10)
+ assert end.day == expected_end.day
+
+
+def test_parse_date_range_invalid_start_date():
+ """Test error on invalid start date."""
+ with pytest.raises(ValueError, match="Invalid start_date"):
+ parse_date_range("not-a-date", None)
+
+
+def test_parse_date_range_invalid_end_date():
+ """Test error on invalid end date."""
+ with pytest.raises(ValueError, match="Invalid end_date"):
+ parse_date_range("2024-01-15", "not-a-date")
+
+
+# =============================================================================
+# Tests for expand_recurring_event
+# =============================================================================
+
+
+def test_expand_recurring_event_daily(recurring_event):
+ """Test expanding a daily recurring event."""
+ start = datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 19, 23, 59, 59, tzinfo=timezone.utc)
+
+ occurrences = expand_recurring_event(recurring_event, start, end)
+
+ # Mon-Fri should give us 5 occurrences
+ assert len(occurrences) == 5
+
+ # Check first occurrence
+ first_start, first_end = occurrences[0]
+ assert first_start.day == 15
+ assert first_start.hour == 9
+ assert first_end.hour == 9
+ assert first_end.minute == 15
+
+
+def test_expand_recurring_event_weekly(weekly_recurring_event):
+ """Test expanding a weekly recurring event."""
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ occurrences = expand_recurring_event(weekly_recurring_event, start, end)
+
+ # January has 4-5 Fridays: 5th, 12th, 19th, 26th = 4 occurrences
+ assert len(occurrences) >= 4
+
+ # All should be Fridays
+ for occ_start, _ in occurrences:
+ assert occ_start.weekday() == 4 # Friday
+
+
+def test_expand_recurring_event_preserves_duration(recurring_event):
+ """Test that expansion preserves event duration."""
+ start = datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 16, 23, 59, 59, tzinfo=timezone.utc)
+
+ occurrences = expand_recurring_event(recurring_event, start, end)
+
+ for occ_start, occ_end in occurrences:
+ duration = occ_end - occ_start
+ assert duration == timedelta(minutes=15)
+
+
+def test_expand_recurring_event_non_recurring_returns_empty(simple_event):
+ """Test that non-recurring events return empty list."""
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ occurrences = expand_recurring_event(simple_event, start, end)
+
+ assert occurrences == []
+
+
+def test_expand_recurring_event_no_start_time():
+ """Test handling event without start time."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"x" * 32,
+ event_title="No Start",
+ start_time=None,
+ recurrence_rule="FREQ=DAILY",
+ )
+
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ occurrences = expand_recurring_event(event, start, end)
+
+ assert occurrences == []
+
+
+def test_expand_recurring_event_invalid_rule():
+ """Test handling invalid recurrence rule."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"y" * 32,
+ event_title="Bad Rule",
+ start_time=datetime(2024, 1, 1, 9, 0, 0, tzinfo=timezone.utc),
+ recurrence_rule="INVALID_RULE",
+ )
+
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ # Should return empty list, not raise
+ occurrences = expand_recurring_event(event, start, end)
+ assert occurrences == []
+
+
+# =============================================================================
+# Tests for event_to_dict
+# =============================================================================
+
+
+def test_event_to_dict_basic(simple_event):
+ """Test converting event to dict."""
+ result = event_to_dict(simple_event)
+
+ assert result["id"] == simple_event.id
+ assert result["event_title"] == "Team Meeting"
+ assert result["location"] == "Conference Room A"
+ assert result["calendar_name"] == "Work"
+ assert result["all_day"] is False
+ assert result["recurrence_rule"] is None
+ assert "2024-01-15" in result["start_time"]
+ assert "2024-01-15" in result["end_time"]
+
+
+def test_event_to_dict_all_day(all_day_event):
+ """Test converting all-day event."""
+ result = event_to_dict(all_day_event)
+
+ assert result["all_day"] is True
+ assert result["event_title"] == "Holiday"
+
+
+def test_event_to_dict_with_override_times(simple_event):
+ """Test overriding times for recurring occurrences."""
+ override_start = datetime(2024, 2, 15, 10, 0, 0, tzinfo=timezone.utc)
+ override_end = datetime(2024, 2, 15, 11, 0, 0, tzinfo=timezone.utc)
+
+ result = event_to_dict(simple_event, override_start, override_end)
+
+ assert "2024-02-15" in result["start_time"]
+ assert "2024-02-15" in result["end_time"]
+
+
+def test_event_to_dict_no_end_time():
+ """Test event without end time."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"z" * 32,
+ event_title="Open-ended",
+ start_time=datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ end_time=None,
+ all_day=False,
+ )
+
+ result = event_to_dict(event)
+
+ assert result["end_time"] is None
+
+
+# =============================================================================
+# Tests for get_events_in_range
+# =============================================================================
+
+
+def test_get_events_in_range_simple(db_session, simple_event):
+ """Test fetching non-recurring events in range."""
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ assert len(events) == 1
+ assert events[0]["event_title"] == "Team Meeting"
+
+
+def test_get_events_in_range_excludes_out_of_range(db_session, simple_event):
+ """Test that events outside range are excluded."""
+ start = datetime(2024, 2, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 2, 28, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ assert len(events) == 0
+
+
+def test_get_events_in_range_expands_recurring(db_session, recurring_event):
+ """Test that recurring events are expanded."""
+ start = datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 19, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ # 5 weekdays
+ assert len(events) == 5
+ for event in events:
+ assert event["event_title"] == "Daily Standup"
+
+
+def test_get_events_in_range_mixed_events(
+ db_session, simple_event, all_day_event, recurring_event
+):
+ """Test fetching mix of recurring and non-recurring."""
+ start = datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 21, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ titles = [e["event_title"] for e in events]
+ assert "Team Meeting" in titles
+ assert "Holiday" in titles
+ assert "Daily Standup" in titles
+
+
+def test_get_events_in_range_sorted_by_start_time(
+ db_session, simple_event, recurring_event
+):
+ """Test that events are sorted by start time."""
+ start = datetime(2024, 1, 15, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 16, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ # Verify sorted order
+ times = [e["start_time"] for e in events]
+ assert times == sorted(times)
+
+
+def test_get_events_in_range_respects_limit(db_session, recurring_event):
+ """Test that limit parameter is respected."""
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end, limit=3)
+
+ assert len(events) == 3
+
+
+def test_get_events_in_range_empty_database(db_session, calendar_account):
+ """Test with no events in database."""
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 31, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ assert events == []
+
+
+def test_get_events_in_range_recurring_no_end_time(db_session, calendar_account):
+ """Test recurring event without end time."""
+ event = CalendarEvent(
+ modality="calendar",
+ sha256=b"4" * 32,
+ event_title="All Day Recurring",
+ start_time=datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
+ end_time=None,
+ all_day=True,
+ calendar_account_id=calendar_account.id,
+ recurrence_rule="FREQ=WEEKLY;BYDAY=MO",
+ )
+ db_session.add(event)
+ db_session.commit()
+
+ start = datetime(2024, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
+ end = datetime(2024, 1, 15, 23, 59, 59, tzinfo=timezone.utc)
+
+ events = get_events_in_range(db_session, start, end)
+
+ # Should have occurrences on Mondays: 1st, 8th, 15th = 3
+ assert len(events) >= 2
+ for event in events:
+ assert event["end_time"] is None
diff --git a/tests/memory/workers/tasks/test_calendar_tasks.py b/tests/memory/workers/tasks/test_calendar_tasks.py
new file mode 100644
index 0000000..fa8e883
--- /dev/null
+++ b/tests/memory/workers/tasks/test_calendar_tasks.py
@@ -0,0 +1,680 @@
+"""Tests for calendar syncing tasks."""
+
+import pytest
+from datetime import datetime, timedelta, timezone
+from unittest.mock import Mock, patch, MagicMock
+
+from memory.common.db.models import CalendarEvent
+from memory.common.db.models.sources import CalendarAccount, GoogleAccount
+from memory.workers.tasks import calendar
+from memory.workers.tasks.calendar import (
+ _create_event_hash,
+ _parse_google_event,
+ _create_calendar_event,
+ _serialize_event_data,
+)
+from memory.common.db import connection as db_connection
+
+
+@pytest.fixture(autouse=True)
+def reset_db_cache():
+ """Reset the cached database engine between tests."""
+ db_connection._engine = None
+ db_connection._session_factory = None
+ db_connection._scoped_session = None
+ yield
+ db_connection._engine = None
+ db_connection._session_factory = None
+ db_connection._scoped_session = None
+
+
+@pytest.fixture
+def mock_event_data() -> dict:
+ """Mock event data for testing."""
+ return {
+ "title": "Team Meeting",
+ "start_time": datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 1, 15, 11, 0, 0, tzinfo=timezone.utc),
+ "all_day": False,
+ "description": "Weekly sync meeting with the team",
+ "location": "Conference Room A",
+ "external_id": "event-123",
+ "calendar_name": "Work",
+ "recurrence_rule": None,
+ "attendees": ["alice@example.com", "bob@example.com"],
+ "meeting_link": "https://meet.example.com/abc123",
+ }
+
+
+@pytest.fixture
+def mock_all_day_event() -> dict:
+ """Mock all-day event data."""
+ return {
+ "title": "Company Holiday",
+ "start_time": datetime(2024, 12, 25, 0, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 12, 26, 0, 0, 0, tzinfo=timezone.utc),
+ "all_day": True,
+ "description": "Christmas Day",
+ "location": None,
+ "external_id": "holiday-123",
+ "calendar_name": "Holidays",
+ "recurrence_rule": None,
+ "attendees": [],
+ }
+
+
+@pytest.fixture
+def mock_recurring_event() -> dict:
+ """Mock recurring event data."""
+ return {
+ "title": "Daily Standup",
+ "start_time": datetime(2024, 1, 15, 9, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 1, 15, 9, 15, 0, tzinfo=timezone.utc),
+ "all_day": False,
+ "description": "Quick daily sync",
+ "location": None,
+ "external_id": "standup-123",
+ "calendar_name": "Work",
+ "recurrence_rule": "FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR",
+ "attendees": ["team@example.com"],
+ }
+
+
+@pytest.fixture
+def caldav_account(db_session) -> CalendarAccount:
+ """Create a CalDAV calendar account for testing."""
+ account = CalendarAccount(
+ name="Test CalDAV",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="testuser",
+ caldav_password="testpass",
+ calendar_ids=[],
+ tags=["calendar", "test"],
+ check_interval=15,
+ sync_past_days=30,
+ sync_future_days=90,
+ active=True,
+ )
+ db_session.add(account)
+ db_session.commit()
+ return account
+
+
+@pytest.fixture
+def google_account(db_session) -> GoogleAccount:
+ """Create a Google account for testing."""
+ account = GoogleAccount(
+ name="Test Google",
+ email="test@gmail.com",
+ access_token="test_access_token",
+ refresh_token="test_refresh_token",
+ token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1),
+ scopes=["calendar"],
+ active=True,
+ )
+ db_session.add(account)
+ db_session.commit()
+ return account
+
+
+@pytest.fixture
+def google_calendar_account(db_session, google_account) -> CalendarAccount:
+ """Create a Google Calendar account for testing."""
+ account = CalendarAccount(
+ name="Test Google Calendar",
+ calendar_type="google",
+ google_account_id=google_account.id,
+ calendar_ids=[],
+ tags=["calendar", "google"],
+ check_interval=15,
+ sync_past_days=30,
+ sync_future_days=90,
+ active=True,
+ )
+ db_session.add(account)
+ db_session.commit()
+ return account
+
+
+@pytest.fixture
+def inactive_account(db_session) -> CalendarAccount:
+ """Create an inactive calendar account."""
+ account = CalendarAccount(
+ name="Inactive CalDAV",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="testuser",
+ caldav_password="testpass",
+ active=False,
+ )
+ db_session.add(account)
+ db_session.commit()
+ return account
+
+
+# =============================================================================
+# Tests for helper functions
+# =============================================================================
+
+
+def test_create_event_hash_basic(mock_event_data):
+ """Test event hash creation."""
+ hash1 = _create_event_hash(mock_event_data)
+ hash2 = _create_event_hash(mock_event_data)
+ assert hash1 == hash2
+ assert len(hash1) == 32 # SHA256 = 32 bytes
+
+
+def test_create_event_hash_different_events():
+ """Test that different events have different hashes."""
+ event1 = {"title": "Event 1", "start_time": "2024-01-15T10:00:00Z", "description": ""}
+ event2 = {"title": "Event 2", "start_time": "2024-01-15T10:00:00Z", "description": ""}
+
+ hash1 = _create_event_hash(event1)
+ hash2 = _create_event_hash(event2)
+ assert hash1 != hash2
+
+
+def test_serialize_event_data(mock_event_data):
+ """Test event data serialization for Celery."""
+ serialized = _serialize_event_data(mock_event_data)
+
+ # Datetimes should be converted to ISO strings
+ assert isinstance(serialized["start_time"], str)
+ assert isinstance(serialized["end_time"], str)
+ assert serialized["title"] == "Team Meeting"
+
+
+def test_serialize_event_data_none_end_time():
+ """Test serialization with None end_time."""
+ event = {
+ "title": "Open Event",
+ "start_time": datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ "end_time": None,
+ }
+ serialized = _serialize_event_data(event)
+ assert serialized["end_time"] is None
+
+
+def test_parse_google_event_regular():
+ """Test parsing a regular Google Calendar event."""
+ google_event = {
+ "id": "google-event-123",
+ "summary": "Team Sync",
+ "description": "Weekly team meeting",
+ "location": "Zoom",
+ "start": {"dateTime": "2024-01-15T14:00:00Z"},
+ "end": {"dateTime": "2024-01-15T15:00:00Z"},
+ "attendees": [
+ {"email": "alice@example.com"},
+ {"email": "bob@example.com"},
+ ],
+ "hangoutLink": "https://meet.google.com/abc-123",
+ }
+
+ result = _parse_google_event(google_event, "Work Calendar")
+
+ assert result["title"] == "Team Sync"
+ assert result["external_id"] == "google-event-123"
+ assert result["calendar_name"] == "Work Calendar"
+ assert result["all_day"] is False
+ assert result["location"] == "Zoom"
+ assert result["meeting_link"] == "https://meet.google.com/abc-123"
+ assert "alice@example.com" in result["attendees"]
+ assert "bob@example.com" in result["attendees"]
+
+
+def test_parse_google_event_all_day():
+ """Test parsing an all-day Google Calendar event."""
+ google_event = {
+ "id": "holiday-event",
+ "summary": "Company Holiday",
+ "start": {"date": "2024-12-25"},
+ "end": {"date": "2024-12-26"},
+ }
+
+ result = _parse_google_event(google_event, "Holidays")
+
+ assert result["title"] == "Company Holiday"
+ assert result["all_day"] is True
+ assert result["start_time"].date().isoformat() == "2024-12-25"
+
+
+def test_parse_google_event_with_conference_data():
+ """Test parsing Google event with conference data instead of hangoutLink."""
+ google_event = {
+ "id": "meet-event",
+ "summary": "Video Call",
+ "start": {"dateTime": "2024-01-15T14:00:00Z"},
+ "end": {"dateTime": "2024-01-15T15:00:00Z"},
+ "conferenceData": {
+ "entryPoints": [
+ {"entryPointType": "phone", "uri": "tel:+1234567890"},
+ {"entryPointType": "video", "uri": "https://zoom.us/j/123456"},
+ ]
+ },
+ }
+
+ result = _parse_google_event(google_event, "Work")
+
+ assert result["meeting_link"] == "https://zoom.us/j/123456"
+
+
+def test_parse_google_event_no_description():
+ """Test parsing event without description."""
+ google_event = {
+ "id": "simple-event",
+ "summary": "Quick Meeting",
+ "start": {"dateTime": "2024-01-15T14:00:00Z"},
+ "end": {"dateTime": "2024-01-15T15:00:00Z"},
+ }
+
+ result = _parse_google_event(google_event, "Work")
+
+ assert result["description"] == ""
+ assert result["attendees"] == []
+ assert result["meeting_link"] is None
+
+
+def test_parse_google_event_with_recurrence():
+ """Test parsing event with recurrence rule."""
+ google_event = {
+ "id": "recurring-event",
+ "summary": "Daily Standup",
+ "start": {"dateTime": "2024-01-15T09:00:00Z"},
+ "end": {"dateTime": "2024-01-15T09:15:00Z"},
+ "recurrence": ["RRULE:FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR"],
+ }
+
+ result = _parse_google_event(google_event, "Work")
+
+ assert result["recurrence_rule"] == "RRULE:FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR"
+
+
+# =============================================================================
+# Tests for _create_calendar_event
+# =============================================================================
+
+
+def test_create_calendar_event(caldav_account, mock_event_data):
+ """Test creating a CalendarEvent from event data."""
+ event = _create_calendar_event(caldav_account, mock_event_data)
+
+ assert event.event_title == "Team Meeting"
+ assert event.start_time == mock_event_data["start_time"]
+ assert event.end_time == mock_event_data["end_time"]
+ assert event.all_day is False
+ assert event.location == "Conference Room A"
+ assert event.external_id == "event-123"
+ assert event.calendar_account_id == caldav_account.id
+ assert event.modality == "calendar"
+ assert "calendar" in event.tags
+ assert "test" in event.tags # From account tags
+
+
+def test_create_calendar_event_with_metadata(caldav_account, mock_event_data):
+ """Test that attendees and meeting link are stored in metadata."""
+ event = _create_calendar_event(caldav_account, mock_event_data)
+
+ assert event.event_metadata is not None
+ assert event.event_metadata["attendees"] == ["alice@example.com", "bob@example.com"]
+ assert event.event_metadata["meeting_link"] == "https://meet.example.com/abc123"
+
+
+def test_create_calendar_event_no_attendees(caldav_account, mock_all_day_event):
+ """Test creating event without attendees."""
+ event = _create_calendar_event(caldav_account, mock_all_day_event)
+
+ # Should not have attendees in metadata
+ assert "attendees" not in event.event_metadata or event.event_metadata.get("attendees") == []
+
+
+# =============================================================================
+# Tests for sync_calendar_event
+# =============================================================================
+
+
+def test_sync_calendar_event_new(mock_event_data, caldav_account, db_session, qdrant):
+ """Test syncing a new calendar event."""
+ serialized = _serialize_event_data(mock_event_data)
+
+ result = calendar.sync_calendar_event(caldav_account.id, serialized)
+
+ assert result["status"] == "processed"
+
+ # Verify event was created
+ event = (
+ db_session.query(CalendarEvent)
+ .filter_by(external_id="event-123")
+ .first()
+ )
+ assert event is not None
+ assert event.event_title == "Team Meeting"
+ assert event.calendar_account_id == caldav_account.id
+
+
+def test_sync_calendar_event_account_not_found(mock_event_data, db_session):
+ """Test syncing with non-existent account."""
+ serialized = _serialize_event_data(mock_event_data)
+
+ result = calendar.sync_calendar_event(99999, serialized)
+
+ assert result["status"] == "error"
+ assert "Account not found" in result["error"]
+
+
+def test_sync_calendar_event_update_existing(
+ mock_event_data, caldav_account, db_session, qdrant
+):
+ """Test updating an existing calendar event."""
+ # First sync
+ serialized = _serialize_event_data(mock_event_data)
+ calendar.sync_calendar_event(caldav_account.id, serialized)
+
+ # Update the event
+ mock_event_data["title"] = "Updated Team Meeting"
+ mock_event_data["location"] = "Conference Room B"
+ serialized = _serialize_event_data(mock_event_data)
+
+ result = calendar.sync_calendar_event(caldav_account.id, serialized)
+
+ assert result["status"] == "updated"
+
+ # Verify event was updated
+ db_session.expire_all()
+ event = (
+ db_session.query(CalendarEvent)
+ .filter_by(external_id="event-123")
+ .first()
+ )
+ assert event.event_title == "Updated Team Meeting"
+ assert event.location == "Conference Room B"
+
+
+def test_sync_calendar_event_without_external_id(caldav_account, db_session, qdrant):
+ """Test syncing event without external_id creates new each time."""
+ event_data = {
+ "title": "Ad-hoc Meeting",
+ "start_time": datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 1, 15, 11, 0, 0, tzinfo=timezone.utc),
+ "all_day": False,
+ "description": "",
+ "location": None,
+ "external_id": None, # No external ID
+ "calendar_name": "Work",
+ }
+ serialized = _serialize_event_data(event_data)
+
+ result = calendar.sync_calendar_event(caldav_account.id, serialized)
+ assert result["status"] == "processed"
+
+
+# =============================================================================
+# Tests for sync_calendar_account
+# =============================================================================
+
+
+def test_sync_calendar_account_not_found(db_session):
+ """Test sync with non-existent account."""
+ result = calendar.sync_calendar_account(99999)
+
+ assert result["status"] == "error"
+ assert "Account not found or inactive" in result["error"]
+
+
+def test_sync_calendar_account_inactive(inactive_account, db_session):
+ """Test sync with inactive account."""
+ result = calendar.sync_calendar_account(inactive_account.id)
+
+ assert result["status"] == "error"
+ assert "Account not found or inactive" in result["error"]
+
+
+@pytest.mark.parametrize(
+ "check_interval_minutes,seconds_since_check,should_skip",
+ [
+ (15, 60, True), # 15min interval, checked 1min ago -> skip
+ (15, 800, True), # 15min interval, checked 13min ago -> skip
+ (15, 1000, False), # 15min interval, checked 16min ago -> don't skip
+ (30, 1000, True), # 30min interval, checked 16min ago -> skip
+ (30, 2000, False), # 30min interval, checked 33min ago -> don't skip
+ ],
+)
+def test_sync_calendar_account_check_interval(
+ check_interval_minutes,
+ seconds_since_check,
+ should_skip,
+ db_session,
+):
+ """Test sync respects check interval."""
+ from sqlalchemy import text
+
+ account = CalendarAccount(
+ name="Interval Test",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="user",
+ caldav_password="pass",
+ check_interval=check_interval_minutes,
+ active=True,
+ )
+ db_session.add(account)
+ db_session.flush()
+
+ # Set last_sync_at
+ last_sync_time = datetime.now(timezone.utc) - timedelta(seconds=seconds_since_check)
+ db_session.execute(
+ text(
+ "UPDATE calendar_accounts SET last_sync_at = :timestamp WHERE id = :account_id"
+ ),
+ {"timestamp": last_sync_time, "account_id": account.id},
+ )
+ db_session.commit()
+
+ result = calendar.sync_calendar_account(account.id)
+
+ if should_skip:
+ assert result["status"] == "skipped_recent_check"
+ else:
+ # Would fail with incomplete caldav credentials error, but that's expected
+ assert "status" in result
+
+
+def test_sync_calendar_account_force_full_bypasses_interval(db_session):
+ """Test force_full bypasses check interval."""
+ from sqlalchemy import text
+
+ account = CalendarAccount(
+ name="Force Test",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="user",
+ caldav_password="pass",
+ check_interval=60,
+ active=True,
+ )
+ db_session.add(account)
+ db_session.flush()
+
+ # Set recent last_sync_at
+ last_sync_time = datetime.now(timezone.utc) - timedelta(seconds=30)
+ db_session.execute(
+ text(
+ "UPDATE calendar_accounts SET last_sync_at = :timestamp WHERE id = :account_id"
+ ),
+ {"timestamp": last_sync_time, "account_id": account.id},
+ )
+ db_session.commit()
+
+ # Even with recent sync, force_full should proceed
+ # (It will fail due to fake caldav URL, but won't be skipped)
+ result = calendar.sync_calendar_account(account.id, force_full=True)
+
+ assert result["status"] != "skipped_recent_check"
+
+
+def test_sync_calendar_account_incomplete_caldav_credentials(db_session):
+ """Test sync fails gracefully with incomplete CalDAV credentials."""
+ account = CalendarAccount(
+ name="Incomplete CalDAV",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username=None, # Missing username
+ caldav_password=None, # Missing password
+ active=True,
+ )
+ db_session.add(account)
+ db_session.commit()
+
+ result = calendar.sync_calendar_account(account.id)
+
+ assert result["status"] == "error"
+ assert "incomplete" in result["error"].lower()
+
+
+@patch("memory.workers.tasks.calendar._fetch_caldav_events")
+@patch("memory.workers.tasks.calendar.sync_calendar_event")
+def test_sync_calendar_account_caldav_success(
+ mock_sync_event, mock_fetch, caldav_account, db_session
+):
+ """Test successful CalDAV sync."""
+ mock_fetch.return_value = [
+ {
+ "title": "Test Event",
+ "start_time": datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 1, 15, 11, 0, 0, tzinfo=timezone.utc),
+ "all_day": False,
+ "description": "",
+ "location": None,
+ "external_id": "caldav-1",
+ "calendar_name": "Default",
+ "recurrence_rule": None,
+ "attendees": [],
+ },
+ ]
+ mock_sync_event.delay.return_value = Mock(id="task-123")
+
+ result = calendar.sync_calendar_account(caldav_account.id)
+
+ assert result["status"] == "completed"
+ assert result["events_synced"] == 1
+ assert result["calendar_type"] == "caldav"
+ mock_sync_event.delay.assert_called_once()
+
+
+@patch("memory.workers.tasks.calendar._fetch_google_calendar_events")
+@patch("memory.workers.tasks.calendar.sync_calendar_event")
+def test_sync_calendar_account_google_success(
+ mock_sync_event, mock_fetch, google_calendar_account, db_session
+):
+ """Test successful Google Calendar sync."""
+ mock_fetch.return_value = [
+ {
+ "title": "Google Event",
+ "start_time": datetime(2024, 1, 15, 10, 0, 0, tzinfo=timezone.utc),
+ "end_time": datetime(2024, 1, 15, 11, 0, 0, tzinfo=timezone.utc),
+ "all_day": False,
+ "description": "",
+ "location": None,
+ "external_id": "google-1",
+ "calendar_name": "Primary",
+ "recurrence_rule": None,
+ "attendees": [],
+ },
+ ]
+ mock_sync_event.delay.return_value = Mock(id="task-456")
+
+ result = calendar.sync_calendar_account(google_calendar_account.id)
+
+ assert result["status"] == "completed"
+ assert result["events_synced"] == 1
+ assert result["calendar_type"] == "google"
+
+
+def test_sync_calendar_account_updates_timestamp(caldav_account, db_session):
+ """Test that sync updates last_sync_at timestamp."""
+ with patch("memory.workers.tasks.calendar._fetch_caldav_events") as mock_fetch:
+ mock_fetch.return_value = []
+
+ assert caldav_account.last_sync_at is None
+
+ calendar.sync_calendar_account(caldav_account.id)
+
+ db_session.refresh(caldav_account)
+ assert caldav_account.last_sync_at is not None
+
+
+# =============================================================================
+# Tests for sync_all_calendars
+# =============================================================================
+
+
+@patch("memory.workers.tasks.calendar.sync_calendar_account")
+def test_sync_all_calendars(mock_sync_account, db_session):
+ """Test syncing all active calendar accounts."""
+ account1 = CalendarAccount(
+ name="Account 1",
+ calendar_type="caldav",
+ caldav_url="https://caldav1.example.com",
+ caldav_username="user1",
+ caldav_password="pass1",
+ active=True,
+ )
+ account2 = CalendarAccount(
+ name="Account 2",
+ calendar_type="caldav",
+ caldav_url="https://caldav2.example.com",
+ caldav_username="user2",
+ caldav_password="pass2",
+ active=True,
+ )
+ inactive = CalendarAccount(
+ name="Inactive",
+ calendar_type="caldav",
+ caldav_url="https://caldav3.example.com",
+ caldav_username="user3",
+ caldav_password="pass3",
+ active=False,
+ )
+ db_session.add_all([account1, account2, inactive])
+ db_session.commit()
+
+ mock_sync_account.delay.side_effect = [Mock(id="task-1"), Mock(id="task-2")]
+
+ result = calendar.sync_all_calendars()
+
+ # Should only sync active accounts
+ assert len(result) == 2
+ assert result[0]["task_id"] == "task-1"
+ assert result[1]["task_id"] == "task-2"
+
+
+def test_sync_all_calendars_no_active(db_session):
+ """Test sync_all when no active accounts exist."""
+ inactive = CalendarAccount(
+ name="Inactive",
+ calendar_type="caldav",
+ caldav_url="https://caldav.example.com",
+ caldav_username="user",
+ caldav_password="pass",
+ active=False,
+ )
+ db_session.add(inactive)
+ db_session.commit()
+
+ result = calendar.sync_all_calendars()
+
+ assert result == []
+
+
+@patch("memory.workers.tasks.calendar.sync_calendar_account")
+def test_sync_all_calendars_force_full(mock_sync_account, caldav_account, db_session):
+ """Test force_full is passed through to individual syncs."""
+ mock_sync_account.delay.return_value = Mock(id="task-123")
+
+ calendar.sync_all_calendars(force_full=True)
+
+ mock_sync_account.delay.assert_called_once_with(
+ caldav_account.id, force_full=True
+ )