mirror of
https://github.com/DRYTRIX/TimeTracker.git
synced 2026-01-28 15:39:03 -06:00
fix: complete backend implementations and integration improvements
This commit addresses multiple incomplete implementations identified in the codebase analysis, focusing on security, functionality, and error handling. Backend Fixes: - Issues module: Implement proper permission filtering for non-admin users - Users can only see issues for projects they have access to - Added permission checks to view_issue and edit_issue routes - Statistics now respect user permissions - Push notifications: Implement proper subscription storage - Created PushSubscription model for browser push notification subscriptions - Updated routes to use new model with proper CRUD operations - Added support for multiple subscriptions per user - Added endpoint to list user subscriptions Integration Improvements: - GitHub: Implement webhook signature verification - Added HMAC SHA-256 signature verification using webhook secret - Uses constant-time comparison to prevent timing attacks - Added webhook_secret field to config schema - QuickBooks: Implement customer and account mapping - Added support for customer mappings (client → QuickBooks customer) - Added support for item mappings (invoice items → QuickBooks items) - Added support for account mappings (expense categories → accounts) - Added default expense account configuration - Improved error handling and logging - Xero: Add customer and account mapping support - Added contact mappings (client → Xero Contact ID) - Added item mappings (invoice items → Xero item codes) - Added account mappings (expense categories → Xero account codes) - Added default expense account configuration - CalDAV: Implement bidirectional sync - Added TimeTracker to Calendar sync direction - Implemented iCalendar event generation from time entries - Added create_or_update_event method to CalDAVClient - Supports bidirectional sync (both directions simultaneously) - Improved error handling for event creation/updates - Trello: Implement bidirectional sync - Added TimeTracker to Trello sync direction - Implemented task to card creation and updates - Automatic board creation for projects if needed - Maps task status to Trello lists - Supports bidirectional sync - Exception handling: Improve error logging in integrations - Replaced silent pass statements with proper error logging - Added debug logging for non-critical failures (user info fetch) - Improved error visibility for debugging - Affected: Google Calendar, Outlook Calendar, Microsoft Teams, Asana, GitLab All changes include proper error handling, logging, and follow existing code patterns. Database migration required for push_subscriptions table.
This commit is contained in:
@@ -87,8 +87,11 @@ class AsanaConnector(BaseConnector):
|
||||
"name": user_data.get("name"),
|
||||
"email": user_data.get("email"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Log error but don't fail - user info is optional
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(f"Could not fetch Asana user info: {e}")
|
||||
|
||||
return {
|
||||
"access_token": data.get("access_token"),
|
||||
|
||||
@@ -339,6 +339,57 @@ class CalDAVClient:
|
||||
|
||||
return events
|
||||
|
||||
def create_or_update_event(self, calendar_url: str, event_uid: str, ical_content: str, event_href: Optional[str] = None) -> bool:
|
||||
"""
|
||||
Create or update a calendar event using PUT request.
|
||||
|
||||
Args:
|
||||
calendar_url: Calendar collection URL
|
||||
event_uid: Unique identifier for the event
|
||||
ical_content: iCalendar content (VCALENDAR with VEVENT)
|
||||
event_href: Optional existing event href for updates
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
calendar_url = _ensure_trailing_slash(calendar_url)
|
||||
# Use provided href if available, otherwise construct from UID
|
||||
if event_href:
|
||||
event_url = event_href
|
||||
else:
|
||||
# Event URL is typically: calendar_url + event_uid + ".ics"
|
||||
event_url = urljoin(calendar_url, f"{event_uid}.ics")
|
||||
|
||||
headers = {
|
||||
"Content-Type": "text/calendar; charset=utf-8",
|
||||
}
|
||||
|
||||
try:
|
||||
resp = self._request("PUT", event_url, headers=headers, data=ical_content)
|
||||
resp.raise_for_status()
|
||||
return True
|
||||
except requests.exceptions.HTTPError as e:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
if e.response.status_code == 404:
|
||||
logger.warning(f"CalDAV event {event_uid} not found at {event_url}, attempting to create")
|
||||
# Try creating with standard URL if custom href failed
|
||||
if event_href and event_href != urljoin(calendar_url, f"{event_uid}.ics"):
|
||||
standard_url = urljoin(calendar_url, f"{event_uid}.ics")
|
||||
try:
|
||||
resp = self._request("PUT", standard_url, headers=headers, data=ical_content)
|
||||
resp.raise_for_status()
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
logger.error(f"Failed to create/update CalDAV event {event_uid}: {e}")
|
||||
return False
|
||||
except Exception as e:
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Failed to create/update CalDAV event {event_uid}: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def _find_href(self, root: ET.Element, prop_paths: List[Tuple[str, ...]]) -> Optional[str]:
|
||||
"""
|
||||
Find a DAV:href under a given prop path.
|
||||
@@ -508,159 +559,33 @@ class CalDAVCalendarConnector(BaseConnector):
|
||||
lookback_days = int(cfg.get("lookback_days", 90))
|
||||
|
||||
if sync_direction in ("calendar_to_time_tracker", "bidirectional"):
|
||||
if not default_project_id:
|
||||
return {"success": False, "message": "default_project_id is required to import calendar events as time entries."}
|
||||
calendar_result = self._sync_calendar_to_time_tracker(cfg, calendar_url, sync_type, default_project_id, lookback_days)
|
||||
# If bidirectional, also do TimeTracker to Calendar sync
|
||||
if sync_direction == "bidirectional":
|
||||
tracker_result = self._sync_time_tracker_to_calendar(cfg, calendar_url, sync_type)
|
||||
# Merge results
|
||||
if calendar_result.get("success") and tracker_result.get("success"):
|
||||
return {
|
||||
"success": True,
|
||||
"synced_items": calendar_result.get("synced_items", 0) + tracker_result.get("synced_items", 0),
|
||||
"imported": calendar_result.get("imported", 0),
|
||||
"skipped": calendar_result.get("skipped", 0),
|
||||
"errors": calendar_result.get("errors", []) + tracker_result.get("errors", []),
|
||||
"message": f"Bidirectional sync: Calendar→TimeTracker: {calendar_result.get('message', '')} | TimeTracker→Calendar: {tracker_result.get('message', '')}",
|
||||
}
|
||||
elif calendar_result.get("success"):
|
||||
return calendar_result
|
||||
elif tracker_result.get("success"):
|
||||
return tracker_result
|
||||
else:
|
||||
return {"success": False, "message": f"Both sync directions failed. Calendar→TimeTracker: {calendar_result.get('message')}, TimeTracker→Calendar: {tracker_result.get('message')}"}
|
||||
return calendar_result
|
||||
|
||||
# Determine time window
|
||||
if sync_type == "incremental" and self.integration.last_sync_at:
|
||||
# last_sync_at stored as naive UTC in Integration; treat as UTC
|
||||
time_min_utc = self.integration.last_sync_at.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time_min_utc = datetime.now(timezone.utc) - timedelta(days=lookback_days)
|
||||
time_max_utc = datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
logger.info(f"Fetching events from {calendar_url} between {time_min_utc} and {time_max_utc}")
|
||||
client = self._client()
|
||||
try:
|
||||
events = client.fetch_events(calendar_url, time_min_utc, time_max_utc)
|
||||
logger.info(f"Fetched {len(events)} events from CalDAV calendar")
|
||||
if len(events) == 0:
|
||||
logger.warning(f"No events found in calendar {calendar_url} for time range {time_min_utc} to {time_max_utc}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch events from calendar: {e}", exc_info=True)
|
||||
return {"success": False, "message": f"Failed to fetch events from calendar: {str(e)}"}
|
||||
|
||||
# Preload projects for title matching
|
||||
projects = Project.query.filter_by(status="active").order_by(Project.name).all()
|
||||
|
||||
imported = 0
|
||||
skipped = 0
|
||||
errors: List[str] = []
|
||||
|
||||
if len(events) == 0:
|
||||
# Update integration status even if no events found (this is a successful sync)
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success"
|
||||
self.integration.last_error = None
|
||||
db.session.commit()
|
||||
return {
|
||||
"success": True,
|
||||
"imported": 0,
|
||||
"skipped": 0,
|
||||
"synced_items": 0,
|
||||
"errors": [],
|
||||
"message": f"No events found in calendar for the specified time range ({time_min_utc.date()} to {time_max_utc.date()}).",
|
||||
}
|
||||
|
||||
for ev in events:
|
||||
try:
|
||||
uid = ev["uid"]
|
||||
# Check if this event was already imported (idempotency)
|
||||
existing_link = IntegrationExternalEventLink.query.filter_by(
|
||||
integration_id=self.integration.id, external_uid=uid
|
||||
).first()
|
||||
if existing_link:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
start_dt: datetime = ev["start"]
|
||||
end_dt: datetime = ev["end"]
|
||||
|
||||
# Convert to local naive for DB storage
|
||||
start_local = _to_local_naive(start_dt)
|
||||
end_local = _to_local_naive(end_dt)
|
||||
|
||||
# Ensure valid duration
|
||||
if end_local <= start_local:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
# Try project match, else default
|
||||
project_id = int(default_project_id)
|
||||
title = (ev.get("summary") or "").strip()
|
||||
for p in projects:
|
||||
if p and p.name and p.name in title:
|
||||
project_id = p.id
|
||||
break
|
||||
|
||||
notes_parts = []
|
||||
if title:
|
||||
notes_parts.append(title)
|
||||
desc = (ev.get("description") or "").strip()
|
||||
if desc:
|
||||
notes_parts.append(desc)
|
||||
notes = "\n\n".join(notes_parts) if notes_parts else None
|
||||
|
||||
time_entry = TimeEntry(
|
||||
user_id=self.integration.user_id,
|
||||
project_id=project_id,
|
||||
start_time=start_local,
|
||||
end_time=end_local,
|
||||
notes=notes,
|
||||
source="auto",
|
||||
billable=True,
|
||||
)
|
||||
|
||||
db.session.add(time_entry)
|
||||
db.session.flush() # get id
|
||||
|
||||
link = IntegrationExternalEventLink(
|
||||
integration_id=self.integration.id,
|
||||
time_entry_id=time_entry.id,
|
||||
external_uid=uid,
|
||||
external_href=ev.get("href"),
|
||||
)
|
||||
db.session.add(link)
|
||||
# Flush to check for duplicate UID constraint violation
|
||||
db.session.flush()
|
||||
|
||||
imported += 1
|
||||
except Exception as e:
|
||||
# Check if it's a duplicate UID error (unique constraint violation)
|
||||
# This can happen in rare race conditions
|
||||
error_str = str(e).lower()
|
||||
if "unique" in error_str or "duplicate" in error_str or "uq_integration_external_uid" in error_str:
|
||||
# Duplicate UID - mark as skipped (likely imported by another process)
|
||||
skipped += 1
|
||||
logger.debug(f"Event {ev.get('uid', 'unknown')} already imported (duplicate UID - race condition)")
|
||||
# Don't rollback - the time_entry might have been created
|
||||
# Just continue to next event
|
||||
else:
|
||||
# Other error - log it and continue
|
||||
error_msg = f"Event {ev.get('uid', 'unknown')}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(f"Failed to import event {ev.get('uid', 'unknown')}: {e}")
|
||||
# For other errors, we might want to rollback this specific event
|
||||
# but that's complex with SQLAlchemy, so we'll let the final commit handle it
|
||||
# The duplicate check at the start should catch most issues
|
||||
|
||||
# Update integration status
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success" if not errors else "partial"
|
||||
self.integration.last_error = "; ".join(errors[:3]) if errors else None
|
||||
|
||||
db.session.commit()
|
||||
|
||||
# Build detailed message
|
||||
if imported == 0 and skipped > 0:
|
||||
message = f"No new events imported ({skipped} already imported, {len(events)} total found)."
|
||||
elif imported == 0:
|
||||
message = f"No events found in calendar for the specified time range ({time_min_utc.date()} to {time_max_utc.date()})."
|
||||
else:
|
||||
message = f"Imported {imported} events ({skipped} skipped, {len(events)} total found)."
|
||||
|
||||
logger.info(f"CalDAV sync completed: {message}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"imported": imported,
|
||||
"skipped": skipped,
|
||||
"synced_items": imported, # For compatibility with scheduled_tasks
|
||||
"errors": errors,
|
||||
"message": message,
|
||||
}
|
||||
|
||||
return {"success": False, "message": "Sync direction not implemented for CalDAV yet."}
|
||||
# Handle TimeTracker to Calendar sync
|
||||
if sync_direction == "time_tracker_to_calendar":
|
||||
return self._sync_time_tracker_to_calendar(cfg, calendar_url, sync_type)
|
||||
|
||||
return {"success": False, "message": f"Unknown sync direction: {sync_direction}"}
|
||||
except Exception as e:
|
||||
try:
|
||||
from app import db
|
||||
@@ -678,5 +603,298 @@ class CalDAVCalendarConnector(BaseConnector):
|
||||
except Exception:
|
||||
pass
|
||||
return {"success": False, "message": f"Sync failed: {str(e)}"}
|
||||
|
||||
def _sync_calendar_to_time_tracker(self, cfg: Dict[str, Any], calendar_url: str, sync_type: str, default_project_id: Optional[int], lookback_days: int) -> Dict[str, Any]:
|
||||
"""Sync calendar events to TimeTracker time entries."""
|
||||
from app.models import Project, TimeEntry
|
||||
from app.models.integration_external_event_link import IntegrationExternalEventLink
|
||||
|
||||
if not default_project_id:
|
||||
return {"success": False, "message": "default_project_id is required to import calendar events as time entries."}
|
||||
|
||||
# Determine time window
|
||||
if sync_type == "incremental" and self.integration.last_sync_at:
|
||||
time_min_utc = self.integration.last_sync_at.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time_min_utc = datetime.now(timezone.utc) - timedelta(days=lookback_days)
|
||||
time_max_utc = datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
logger.info(f"Fetching events from {calendar_url} between {time_min_utc} and {time_max_utc}")
|
||||
client = self._client()
|
||||
try:
|
||||
events = client.fetch_events(calendar_url, time_min_utc, time_max_utc)
|
||||
logger.info(f"Fetched {len(events)} events from CalDAV calendar")
|
||||
if len(events) == 0:
|
||||
logger.warning(f"No events found in calendar {calendar_url} for time range {time_min_utc} to {time_max_utc}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch events from calendar: {e}", exc_info=True)
|
||||
return {"success": False, "message": f"Failed to fetch events from calendar: {str(e)}"}
|
||||
|
||||
# Preload projects for title matching
|
||||
projects = Project.query.filter_by(status="active").order_by(Project.name).all()
|
||||
|
||||
imported = 0
|
||||
skipped = 0
|
||||
errors: List[str] = []
|
||||
|
||||
if len(events) == 0:
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success"
|
||||
self.integration.last_error = None
|
||||
db.session.commit()
|
||||
return {
|
||||
"success": True,
|
||||
"imported": 0,
|
||||
"skipped": 0,
|
||||
"synced_items": 0,
|
||||
"errors": [],
|
||||
"message": f"No events found in calendar for the specified time range ({time_min_utc.date()} to {time_max_utc.date()}).",
|
||||
}
|
||||
|
||||
for ev in events:
|
||||
try:
|
||||
uid = ev["uid"]
|
||||
existing_link = IntegrationExternalEventLink.query.filter_by(
|
||||
integration_id=self.integration.id, external_uid=uid
|
||||
).first()
|
||||
if existing_link:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
start_dt: datetime = ev["start"]
|
||||
end_dt: datetime = ev["end"]
|
||||
|
||||
start_local = _to_local_naive(start_dt)
|
||||
end_local = _to_local_naive(end_dt)
|
||||
|
||||
if end_local <= start_local:
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
project_id = int(default_project_id)
|
||||
title = (ev.get("summary") or "").strip()
|
||||
for p in projects:
|
||||
if p and p.name and p.name in title:
|
||||
project_id = p.id
|
||||
break
|
||||
|
||||
notes_parts = []
|
||||
if title:
|
||||
notes_parts.append(title)
|
||||
desc = (ev.get("description") or "").strip()
|
||||
if desc:
|
||||
notes_parts.append(desc)
|
||||
notes = "\n\n".join(notes_parts) if notes_parts else None
|
||||
|
||||
time_entry = TimeEntry(
|
||||
user_id=self.integration.user_id,
|
||||
project_id=project_id,
|
||||
start_time=start_local,
|
||||
end_time=end_local,
|
||||
notes=notes,
|
||||
source="auto",
|
||||
billable=True,
|
||||
)
|
||||
|
||||
db.session.add(time_entry)
|
||||
db.session.flush()
|
||||
|
||||
link = IntegrationExternalEventLink(
|
||||
integration_id=self.integration.id,
|
||||
time_entry_id=time_entry.id,
|
||||
external_uid=uid,
|
||||
external_href=ev.get("href"),
|
||||
)
|
||||
db.session.add(link)
|
||||
db.session.flush()
|
||||
|
||||
imported += 1
|
||||
except Exception as e:
|
||||
error_str = str(e).lower()
|
||||
if "unique" in error_str or "duplicate" in error_str or "uq_integration_external_uid" in error_str:
|
||||
skipped += 1
|
||||
logger.debug(f"Event {ev.get('uid', 'unknown')} already imported (duplicate UID - race condition)")
|
||||
else:
|
||||
error_msg = f"Event {ev.get('uid', 'unknown')}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(f"Failed to import event {ev.get('uid', 'unknown')}: {e}")
|
||||
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success" if not errors else "partial"
|
||||
self.integration.last_error = "; ".join(errors[:3]) if errors else None
|
||||
|
||||
db.session.commit()
|
||||
|
||||
if imported == 0 and skipped > 0:
|
||||
message = f"No new events imported ({skipped} already imported, {len(events)} total found)."
|
||||
elif imported == 0:
|
||||
message = f"No events found in calendar for the specified time range ({time_min_utc.date()} to {time_max_utc.date()})."
|
||||
else:
|
||||
message = f"Imported {imported} events ({skipped} skipped, {len(events)} total found)."
|
||||
|
||||
logger.info(f"CalDAV sync completed: {message}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"imported": imported,
|
||||
"skipped": skipped,
|
||||
"synced_items": imported,
|
||||
"errors": errors,
|
||||
"message": message,
|
||||
}
|
||||
|
||||
def _sync_time_tracker_to_calendar(self, cfg: Dict[str, Any], calendar_url: str, sync_type: str) -> Dict[str, Any]:
|
||||
"""Sync TimeTracker time entries to CalDAV calendar."""
|
||||
from app.models import TimeEntry, Project, Task
|
||||
from app.models.integration_external_event_link import IntegrationExternalEventLink
|
||||
|
||||
lookback_days = int(cfg.get("lookback_days", 90))
|
||||
lookahead_days = int(cfg.get("lookahead_days", 7))
|
||||
|
||||
if sync_type == "incremental" and self.integration.last_sync_at:
|
||||
time_min = self.integration.last_sync_at.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
time_min = datetime.now(timezone.utc) - timedelta(days=lookback_days)
|
||||
time_max = datetime.now(timezone.utc) + timedelta(days=lookahead_days)
|
||||
|
||||
time_min_local = _to_local_naive(time_min)
|
||||
time_max_local = _to_local_naive(time_max)
|
||||
|
||||
time_entries = TimeEntry.query.filter(
|
||||
TimeEntry.user_id == self.integration.user_id,
|
||||
TimeEntry.start_time >= time_min_local,
|
||||
TimeEntry.start_time <= time_max_local,
|
||||
TimeEntry.end_time.isnot(None),
|
||||
).order_by(TimeEntry.start_time).all()
|
||||
|
||||
if not time_entries:
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success"
|
||||
self.integration.last_error = None
|
||||
db.session.commit()
|
||||
return {
|
||||
"success": True,
|
||||
"synced_items": 0,
|
||||
"errors": [],
|
||||
"message": f"No time entries found in the specified time range ({time_min_local.date()} to {time_max_local.date()}).",
|
||||
}
|
||||
|
||||
client = self._client()
|
||||
synced = 0
|
||||
updated = 0
|
||||
errors: List[str] = []
|
||||
|
||||
for time_entry in time_entries:
|
||||
try:
|
||||
event_uid = f"timetracker-{time_entry.id}@timetracker.local"
|
||||
|
||||
existing_link = IntegrationExternalEventLink.query.filter_by(
|
||||
integration_id=self.integration.id,
|
||||
time_entry_id=time_entry.id
|
||||
).first()
|
||||
|
||||
project = Project.query.get(time_entry.project_id) if time_entry.project_id else None
|
||||
task = Task.query.get(time_entry.task_id) if time_entry.task_id else None
|
||||
|
||||
title_parts = []
|
||||
if project:
|
||||
title_parts.append(project.name)
|
||||
if task:
|
||||
title_parts.append(task.name)
|
||||
if not title_parts:
|
||||
title_parts.append("Time Entry")
|
||||
title = " - ".join(title_parts)
|
||||
|
||||
description_parts = []
|
||||
if time_entry.notes:
|
||||
description_parts.append(time_entry.notes)
|
||||
if time_entry.tags:
|
||||
description_parts.append(f"Tags: {time_entry.tags}")
|
||||
description = "\n\n".join(description_parts) if description_parts else "TimeTracker: Created from time entry"
|
||||
|
||||
start_utc = local_to_utc(time_entry.start_time)
|
||||
end_utc = local_to_utc(time_entry.end_time) if time_entry.end_time else start_utc + timedelta(hours=1)
|
||||
|
||||
ical_content = self._generate_icalendar_event(
|
||||
uid=event_uid,
|
||||
title=title,
|
||||
description=description,
|
||||
start=start_utc,
|
||||
end=end_utc,
|
||||
created=time_entry.created_at.replace(tzinfo=timezone.utc) if time_entry.created_at else datetime.now(timezone.utc),
|
||||
updated=time_entry.updated_at.replace(tzinfo=timezone.utc) if time_entry.updated_at else datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Use existing href if available, otherwise generate new one
|
||||
event_href = existing_link.external_href if existing_link else urljoin(calendar_url, f"{event_uid}.ics")
|
||||
|
||||
# For updates, we need to use the existing href
|
||||
if existing_link:
|
||||
# Update existing event using its href
|
||||
success = client.create_or_update_event(calendar_url, event_uid, ical_content, event_href=existing_link.external_href)
|
||||
if success:
|
||||
updated += 1
|
||||
else:
|
||||
errors.append(f"Failed to update time entry {time_entry.id} in calendar")
|
||||
else:
|
||||
# Create new event
|
||||
success = client.create_or_update_event(calendar_url, event_uid, ical_content)
|
||||
if success:
|
||||
link = IntegrationExternalEventLink(
|
||||
integration_id=self.integration.id,
|
||||
time_entry_id=time_entry.id,
|
||||
external_uid=event_uid,
|
||||
external_href=event_href,
|
||||
)
|
||||
db.session.add(link)
|
||||
synced += 1
|
||||
else:
|
||||
errors.append(f"Failed to create time entry {time_entry.id} in calendar")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Time entry {time_entry.id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(f"Failed to sync time entry {time_entry.id} to CalDAV: {e}")
|
||||
|
||||
self.integration.last_sync_at = datetime.utcnow()
|
||||
self.integration.last_sync_status = "success" if not errors else "partial"
|
||||
self.integration.last_error = "; ".join(errors[:3]) if errors else None
|
||||
|
||||
db.session.commit()
|
||||
|
||||
message = f"Synced {synced} new events, updated {updated} events to CalDAV calendar."
|
||||
logger.info(f"CalDAV TimeTracker→Calendar sync completed: {message}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"synced_items": synced + updated,
|
||||
"errors": errors,
|
||||
"message": message,
|
||||
}
|
||||
|
||||
def _generate_icalendar_event(self, uid: str, title: str, description: str, start: datetime, end: datetime, created: datetime, updated: datetime) -> str:
|
||||
"""Generate iCalendar content for an event."""
|
||||
from icalendar import Event
|
||||
|
||||
event = Event()
|
||||
event.add('uid', uid)
|
||||
event.add('summary', title)
|
||||
event.add('description', description)
|
||||
event.add('dtstart', start)
|
||||
event.add('dtend', end)
|
||||
event.add('dtstamp', datetime.now(timezone.utc))
|
||||
event.add('created', created)
|
||||
event.add('last-modified', updated)
|
||||
event.add('status', 'CONFIRMED')
|
||||
event.add('transp', 'OPAQUE')
|
||||
|
||||
cal = Calendar()
|
||||
cal.add('prodid', '-//TimeTracker//CalDAV Integration//EN')
|
||||
cal.add('version', '2.0')
|
||||
cal.add('calscale', 'GREGORIAN')
|
||||
cal.add('method', 'PUBLISH')
|
||||
cal.add_component(event)
|
||||
|
||||
return cal.to_ical().decode('utf-8')
|
||||
|
||||
|
||||
|
||||
@@ -137,99 +137,175 @@ class GitHubConnector(BaseConnector):
|
||||
from app.models import Task, Project
|
||||
from app import db
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
token = self.get_access_token()
|
||||
if not token:
|
||||
return {"success": False, "message": "No access token available"}
|
||||
return {"success": False, "message": "No access token available. Please reconnect the integration."}
|
||||
|
||||
# Get repositories from config
|
||||
repos_str = self.integration.config.get("repositories", "")
|
||||
if not repos_str:
|
||||
# Get user's repositories
|
||||
repos_response = requests.get(
|
||||
"https://api.github.com/user/repos", headers={"Authorization": f"token {token}"}
|
||||
)
|
||||
if repos_response.status_code == 200:
|
||||
repos = repos_response.json()
|
||||
repos_list = [f"{r['owner']['login']}/{r['name']}" for r in repos[:10]] # Limit to 10 repos
|
||||
else:
|
||||
return {"success": False, "message": "Could not fetch repositories"}
|
||||
try:
|
||||
repos_response = requests.get(
|
||||
"https://api.github.com/user/repos",
|
||||
headers={"Authorization": f"token {token}", "Accept": "application/vnd.github.v3+json"},
|
||||
timeout=30
|
||||
)
|
||||
if repos_response.status_code == 200:
|
||||
repos = repos_response.json()
|
||||
repos_list = [f"{r['owner']['login']}/{r['name']}" for r in repos[:10]] # Limit to 10 repos
|
||||
elif repos_response.status_code == 401:
|
||||
return {"success": False, "message": "GitHub authentication failed. Please reconnect the integration."}
|
||||
else:
|
||||
error_msg = f"Could not fetch repositories: {repos_response.status_code} - {repos_response.text[:200]}"
|
||||
logger.error(error_msg)
|
||||
return {"success": False, "message": error_msg}
|
||||
except requests.exceptions.Timeout:
|
||||
return {"success": False, "message": "GitHub API request timed out. Please try again."}
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
return {"success": False, "message": f"Failed to connect to GitHub API: {str(e)}"}
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching repositories: {e}", exc_info=True)
|
||||
return {"success": False, "message": f"Error fetching repositories: {str(e)}"}
|
||||
else:
|
||||
repos_list = [r.strip() for r in repos_str.split(",") if r.strip()]
|
||||
|
||||
if not repos_list:
|
||||
return {"success": False, "message": "No repositories configured or found"}
|
||||
|
||||
synced_count = 0
|
||||
errors = []
|
||||
|
||||
try:
|
||||
for repo in repos_list:
|
||||
try:
|
||||
owner, repo_name = repo.split("/")
|
||||
if "/" not in repo:
|
||||
errors.append(f"Invalid repository format: {repo} (expected owner/repo)")
|
||||
continue
|
||||
|
||||
owner, repo_name = repo.split("/", 1)
|
||||
|
||||
# Find or create project
|
||||
project = Project.query.filter_by(user_id=self.integration.user_id, name=repo).first()
|
||||
|
||||
if not project:
|
||||
project = Project(
|
||||
name=repo,
|
||||
description=f"GitHub repository: {repo}",
|
||||
user_id=self.integration.user_id,
|
||||
status="active",
|
||||
)
|
||||
db.session.add(project)
|
||||
db.session.flush()
|
||||
try:
|
||||
project = Project(
|
||||
name=repo,
|
||||
description=f"GitHub repository: {repo}",
|
||||
user_id=self.integration.user_id,
|
||||
status="active",
|
||||
)
|
||||
db.session.add(project)
|
||||
db.session.flush()
|
||||
except Exception as e:
|
||||
errors.append(f"Error creating project for {repo}: {str(e)}")
|
||||
logger.error(f"Error creating project for {repo}: {e}", exc_info=True)
|
||||
continue
|
||||
|
||||
# Fetch issues
|
||||
issues_response = requests.get(
|
||||
f"https://api.github.com/repos/{repo}/issues",
|
||||
headers={"Authorization": f"token {token}", "Accept": "application/vnd.github.v3+json"},
|
||||
params={"state": "open", "per_page": 100},
|
||||
)
|
||||
try:
|
||||
issues_response = requests.get(
|
||||
f"https://api.github.com/repos/{repo}/issues",
|
||||
headers={"Authorization": f"token {token}", "Accept": "application/vnd.github.v3+json"},
|
||||
params={"state": "open", "per_page": 100},
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if issues_response.status_code != 200:
|
||||
errors.append(f"Error fetching issues for {repo}: {issues_response.status_code}")
|
||||
if issues_response.status_code == 404:
|
||||
errors.append(f"Repository {repo} not found or access denied")
|
||||
continue
|
||||
elif issues_response.status_code == 401:
|
||||
errors.append(f"Authentication failed for repository {repo}")
|
||||
continue
|
||||
elif issues_response.status_code != 200:
|
||||
error_text = issues_response.text[:200] if issues_response.text else ""
|
||||
errors.append(f"Error fetching issues for {repo}: {issues_response.status_code} - {error_text}")
|
||||
continue
|
||||
|
||||
issues = issues_response.json()
|
||||
except requests.exceptions.Timeout:
|
||||
errors.append(f"Timeout fetching issues for {repo}")
|
||||
continue
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
errors.append(f"Connection error for {repo}: {str(e)}")
|
||||
continue
|
||||
except Exception as e:
|
||||
errors.append(f"Error fetching issues for {repo}: {str(e)}")
|
||||
logger.error(f"Error fetching issues for {repo}: {e}", exc_info=True)
|
||||
continue
|
||||
|
||||
issues = issues_response.json()
|
||||
|
||||
for issue in issues:
|
||||
try:
|
||||
issue_number = issue.get("number")
|
||||
issue_title = issue.get("title", "")
|
||||
|
||||
if not issue_number:
|
||||
continue
|
||||
|
||||
# Find or create task
|
||||
task = Task.query.filter_by(
|
||||
project_id=project.id, name=f"#{issue_number}: {issue_title}"
|
||||
).first()
|
||||
|
||||
if not task:
|
||||
task = Task(
|
||||
project_id=project.id,
|
||||
name=f"#{issue_number}: {issue_title}",
|
||||
description=issue.get("body", ""),
|
||||
status="todo",
|
||||
notes=f"GitHub Issue: {issue.get('html_url', '')}",
|
||||
)
|
||||
db.session.add(task)
|
||||
db.session.flush()
|
||||
try:
|
||||
task = Task(
|
||||
project_id=project.id,
|
||||
name=f"#{issue_number}: {issue_title}",
|
||||
description=issue.get("body", ""),
|
||||
status="todo",
|
||||
notes=f"GitHub Issue: {issue.get('html_url', '')}",
|
||||
)
|
||||
db.session.add(task)
|
||||
db.session.flush()
|
||||
except Exception as e:
|
||||
errors.append(f"Error creating task for issue #{issue_number} in {repo}: {str(e)}")
|
||||
logger.error(f"Error creating task for issue #{issue_number} in {repo}: {e}", exc_info=True)
|
||||
continue
|
||||
|
||||
# Store GitHub issue info in task metadata
|
||||
if not hasattr(task, "metadata") or not task.metadata:
|
||||
task.metadata = {}
|
||||
task.metadata["github_repo"] = repo
|
||||
task.metadata["github_issue_number"] = issue_number
|
||||
task.metadata["github_issue_id"] = issue.get("id")
|
||||
task.metadata["github_issue_url"] = issue.get("html_url")
|
||||
try:
|
||||
if not hasattr(task, "metadata") or not task.metadata:
|
||||
task.metadata = {}
|
||||
task.metadata["github_repo"] = repo
|
||||
task.metadata["github_issue_number"] = issue_number
|
||||
task.metadata["github_issue_id"] = issue.get("id")
|
||||
task.metadata["github_issue_url"] = issue.get("html_url")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error updating task metadata for issue #{issue_number}: {e}")
|
||||
|
||||
synced_count += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing issue #{issue.get('number', 'unknown')} in {repo}: {str(e)}")
|
||||
except ValueError:
|
||||
errors.append(f"Invalid repository format: {repo}")
|
||||
logger.error(f"Error syncing issue #{issue.get('number', 'unknown')} in {repo}: {e}", exc_info=True)
|
||||
except ValueError as e:
|
||||
errors.append(f"Invalid repository format: {repo} - {str(e)}")
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing repository {repo}: {str(e)}")
|
||||
logger.error(f"Error syncing repository {repo}: {e}", exc_info=True)
|
||||
|
||||
db.session.commit()
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
error_msg = f"Database error during sync: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
return {"success": False, "message": error_msg, "synced_items": synced_count, "errors": errors}
|
||||
|
||||
if errors:
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Sync completed with {len(errors)} error(s). Synced {synced_count} issues.",
|
||||
"synced_items": synced_count,
|
||||
"errors": errors,
|
||||
}
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Sync completed. Synced {synced_count} issues.",
|
||||
@@ -237,17 +313,82 @@ class GitHubConnector(BaseConnector):
|
||||
"errors": errors,
|
||||
}
|
||||
except Exception as e:
|
||||
return {"success": False, "message": f"Sync failed: {str(e)}"}
|
||||
logger.error(f"GitHub sync failed: {e}", exc_info=True)
|
||||
try:
|
||||
db.session.rollback()
|
||||
except Exception:
|
||||
pass
|
||||
return {"success": False, "message": f"Sync failed: {str(e)}", "errors": errors}
|
||||
|
||||
def handle_webhook(self, payload: Dict[str, Any], headers: Dict[str, str]) -> Dict[str, Any]:
|
||||
def handle_webhook(self, payload: Dict[str, Any], headers: Dict[str, str], raw_body: Optional[bytes] = None) -> Dict[str, Any]:
|
||||
"""Handle incoming webhook from GitHub."""
|
||||
import hmac
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
# Verify webhook signature if secret is configured
|
||||
signature = headers.get("X-Hub-Signature-256", "")
|
||||
if signature:
|
||||
# Signature verification would go here
|
||||
pass
|
||||
# Get webhook secret from integration config
|
||||
webhook_secret = self.integration.config.get("webhook_secret") if self.integration else None
|
||||
|
||||
if webhook_secret:
|
||||
# GitHub sends signature as "sha256=<hash>"
|
||||
if not signature.startswith("sha256="):
|
||||
logger.warning("GitHub webhook signature format invalid (expected sha256= prefix)")
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Invalid webhook signature format"
|
||||
}
|
||||
|
||||
signature_hash = signature[7:] # Remove "sha256=" prefix
|
||||
|
||||
# GitHub signs the raw request body bytes, not the parsed JSON
|
||||
# This is critical for signature verification to work correctly
|
||||
if raw_body is None:
|
||||
# Fallback: try to reconstruct from payload (not ideal but better than nothing)
|
||||
import json
|
||||
raw_body = json.dumps(payload, sort_keys=True, separators=(',', ':')).encode('utf-8')
|
||||
logger.warning("GitHub webhook: Using reconstructed payload for signature verification (raw body not available)")
|
||||
|
||||
# Compute expected signature using raw body bytes
|
||||
expected_signature = hmac.new(
|
||||
webhook_secret.encode('utf-8'),
|
||||
raw_body,
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
|
||||
# Use constant-time comparison to prevent timing attacks
|
||||
if not hmac.compare_digest(signature_hash, expected_signature):
|
||||
logger.warning("GitHub webhook signature verification failed")
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Webhook signature verification failed"
|
||||
}
|
||||
|
||||
logger.debug("GitHub webhook signature verified successfully")
|
||||
else:
|
||||
# Signature provided but no secret configured - reject for security
|
||||
logger.warning("GitHub webhook signature provided but no secret configured - rejecting webhook")
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Webhook secret not configured"
|
||||
}
|
||||
else:
|
||||
# No signature provided - check if secret is configured
|
||||
webhook_secret = self.integration.config.get("webhook_secret") if self.integration else None
|
||||
if webhook_secret:
|
||||
# Secret configured but no signature - reject for security
|
||||
logger.warning("GitHub webhook secret configured but no signature provided - rejecting webhook")
|
||||
return {
|
||||
"success": False,
|
||||
"message": "Webhook signature required but not provided"
|
||||
}
|
||||
|
||||
# Process webhook event
|
||||
action = payload.get("action")
|
||||
event_type = headers.get("X-GitHub-Event", "")
|
||||
|
||||
@@ -273,7 +414,13 @@ class GitHubConnector(BaseConnector):
|
||||
}
|
||||
|
||||
return {"success": True, "message": f"Webhook processed: {event_type}"}
|
||||
except ValueError as e:
|
||||
# Handle validation errors
|
||||
logger.error(f"GitHub webhook validation error: {e}")
|
||||
return {"success": False, "message": f"Webhook validation error: {str(e)}"}
|
||||
except Exception as e:
|
||||
# Handle all other errors
|
||||
logger.error(f"GitHub webhook processing error: {e}", exc_info=True)
|
||||
return {"success": False, "message": f"Error processing webhook: {str(e)}"}
|
||||
|
||||
def get_config_schema(self) -> Dict[str, Any]:
|
||||
@@ -295,6 +442,14 @@ class GitHubConnector(BaseConnector):
|
||||
"default": True,
|
||||
"description": "Automatically sync when webhooks are received",
|
||||
},
|
||||
{
|
||||
"name": "webhook_secret",
|
||||
"label": "Webhook Secret",
|
||||
"type": "password",
|
||||
"required": False,
|
||||
"placeholder": "Enter webhook secret from GitHub",
|
||||
"help": "Secret token for verifying webhook signatures (configure in GitHub webhook settings)",
|
||||
},
|
||||
],
|
||||
"required": [],
|
||||
}
|
||||
|
||||
@@ -104,8 +104,11 @@ class GitLabConnector(BaseConnector):
|
||||
"name": user_data.get("name"),
|
||||
"email": user_data.get("email"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Log error but don't fail - user info is optional
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(f"Could not fetch GitLab user info: {e}")
|
||||
|
||||
return {
|
||||
"access_token": data.get("access_token"),
|
||||
|
||||
@@ -104,8 +104,11 @@ class GoogleCalendarConnector(BaseConnector):
|
||||
"name": user_info_response.get("name"),
|
||||
"picture": user_info_response.get("picture"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Log error but don't fail - user info is optional
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(f"Could not fetch Google user info: {e}")
|
||||
|
||||
return {
|
||||
"access_token": credentials.token,
|
||||
|
||||
@@ -112,8 +112,11 @@ class MicrosoftTeamsConnector(BaseConnector):
|
||||
"displayName": user_data.get("displayName"),
|
||||
"mail": user_data.get("mail"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Log error but don't fail - user info is optional
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(f"Could not fetch Microsoft Teams user info: {e}")
|
||||
|
||||
return {
|
||||
"access_token": data.get("access_token"),
|
||||
|
||||
@@ -113,8 +113,11 @@ class OutlookCalendarConnector(BaseConnector):
|
||||
"mail": user_data.get("mail"),
|
||||
"userPrincipalName": user_data.get("userPrincipalName"),
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
except Exception as e:
|
||||
# Log error but don't fail - user info is optional
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.debug(f"Could not fetch Outlook user info: {e}")
|
||||
|
||||
return {
|
||||
"access_token": data.get("access_token"),
|
||||
|
||||
@@ -183,7 +183,7 @@ class QuickBooksConnector(BaseConnector):
|
||||
except Exception as e:
|
||||
return {"success": False, "message": f"Connection test failed: {str(e)}"}
|
||||
|
||||
def _api_request(self, method: str, endpoint: str, access_token: str, realm_id: str) -> Optional[Dict]:
|
||||
def _api_request(self, method: str, endpoint: str, access_token: str, realm_id: str, json_data: Optional[Dict] = None) -> Optional[Dict]:
|
||||
"""Make API request to QuickBooks"""
|
||||
base_url = self.get_base_url()
|
||||
url = f"{base_url}{endpoint}"
|
||||
@@ -199,17 +199,39 @@ class QuickBooksConnector(BaseConnector):
|
||||
|
||||
try:
|
||||
if method.upper() == "GET":
|
||||
response = requests.get(url, headers=headers, timeout=10)
|
||||
response = requests.get(url, headers=headers, timeout=30)
|
||||
elif method.upper() == "POST":
|
||||
response = requests.post(url, headers=headers, timeout=10, json={})
|
||||
response = requests.post(url, headers=headers, timeout=30, json=json_data or {})
|
||||
elif method.upper() == "PUT":
|
||||
response = requests.put(url, headers=headers, timeout=30, json=json_data or {})
|
||||
else:
|
||||
response = requests.request(method, url, headers=headers, timeout=10)
|
||||
response = requests.request(method, url, headers=headers, timeout=30, json=json_data)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error(f"QuickBooks API request timeout: {method} {endpoint}")
|
||||
raise ValueError("QuickBooks API request timed out. Please try again.")
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.error(f"QuickBooks API connection error: {e}")
|
||||
raise ValueError(f"Failed to connect to QuickBooks API: {str(e)}")
|
||||
except requests.exceptions.HTTPError as e:
|
||||
error_detail = ""
|
||||
if e.response:
|
||||
try:
|
||||
error_data = e.response.json()
|
||||
error_detail = error_data.get("fault", {}).get("error", [{}])[0].get("detail", "")
|
||||
if not error_detail:
|
||||
error_detail = error_data.get("fault", {}).get("error", [{}])[0].get("message", "")
|
||||
except Exception:
|
||||
error_detail = e.response.text[:200] if e.response.text else ""
|
||||
|
||||
error_msg = f"QuickBooks API error ({e.response.status_code}): {error_detail or str(e)}"
|
||||
logger.error(f"QuickBooks API request failed: {error_msg}")
|
||||
raise ValueError(error_msg)
|
||||
except Exception as e:
|
||||
logger.error(f"QuickBooks API request failed: {e}")
|
||||
return None
|
||||
logger.error(f"QuickBooks API request failed: {e}", exc_info=True)
|
||||
raise ValueError(f"QuickBooks API request failed: {str(e)}")
|
||||
|
||||
def sync_data(self, sync_type: str = "full") -> Dict[str, Any]:
|
||||
"""Sync invoices and expenses with QuickBooks"""
|
||||
@@ -222,94 +244,364 @@ class QuickBooksConnector(BaseConnector):
|
||||
return {"success": False, "message": "QuickBooks company not configured"}
|
||||
|
||||
access_token = self.get_access_token()
|
||||
if not access_token:
|
||||
return {"success": False, "message": "No access token available. Please reconnect the integration."}
|
||||
|
||||
synced_count = 0
|
||||
errors = []
|
||||
|
||||
# Sync invoices (create as invoices in QuickBooks)
|
||||
if sync_type == "full" or sync_type == "invoices":
|
||||
invoices = Invoice.query.filter(
|
||||
Invoice.status.in_(["sent", "paid"]), Invoice.created_at >= datetime.utcnow() - timedelta(days=90)
|
||||
).all()
|
||||
try:
|
||||
invoices = Invoice.query.filter(
|
||||
Invoice.status.in_(["sent", "paid"]), Invoice.created_at >= datetime.utcnow() - timedelta(days=90)
|
||||
).all()
|
||||
|
||||
for invoice in invoices:
|
||||
try:
|
||||
qb_invoice = self._create_quickbooks_invoice(invoice, access_token, realm_id)
|
||||
if qb_invoice:
|
||||
# Store QuickBooks ID in invoice metadata
|
||||
if not hasattr(invoice, "metadata") or not invoice.metadata:
|
||||
invoice.metadata = {}
|
||||
invoice.metadata["quickbooks_id"] = qb_invoice.get("Id")
|
||||
synced_count += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing invoice {invoice.id}: {str(e)}")
|
||||
for invoice in invoices:
|
||||
try:
|
||||
# Skip if already synced (has QuickBooks ID)
|
||||
if hasattr(invoice, "metadata") and invoice.metadata and invoice.metadata.get("quickbooks_id"):
|
||||
continue
|
||||
|
||||
qb_invoice = self._create_quickbooks_invoice(invoice, access_token, realm_id)
|
||||
if qb_invoice:
|
||||
# Store QuickBooks ID in invoice metadata
|
||||
if not hasattr(invoice, "metadata") or not invoice.metadata:
|
||||
invoice.metadata = {}
|
||||
invoice.metadata["quickbooks_id"] = qb_invoice.get("Id")
|
||||
synced_count += 1
|
||||
except ValueError as e:
|
||||
# Validation errors - log but continue
|
||||
error_msg = f"Invoice {invoice.id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(error_msg)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# API errors - log with details
|
||||
error_msg = f"Invoice {invoice.id}: QuickBooks API error - {e.response.status_code}: {e.response.text[:200] if e.response else str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
except Exception as e:
|
||||
# Other errors
|
||||
error_msg = f"Invoice {invoice.id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
except Exception as e:
|
||||
error_msg = f"Error fetching invoices: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
|
||||
# Sync expenses (create as expenses in QuickBooks)
|
||||
if sync_type == "full" or sync_type == "expenses":
|
||||
expenses = Expense.query.filter(Expense.date >= datetime.utcnow().date() - timedelta(days=90)).all()
|
||||
try:
|
||||
expenses = Expense.query.filter(Expense.date >= datetime.utcnow().date() - timedelta(days=90)).all()
|
||||
|
||||
for expense in expenses:
|
||||
try:
|
||||
qb_expense = self._create_quickbooks_expense(expense, access_token, realm_id)
|
||||
if qb_expense:
|
||||
if not hasattr(expense, "metadata") or not expense.metadata:
|
||||
expense.metadata = {}
|
||||
expense.metadata["quickbooks_id"] = qb_expense.get("Id")
|
||||
synced_count += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing expense {expense.id}: {str(e)}")
|
||||
for expense in expenses:
|
||||
try:
|
||||
# Skip if already synced
|
||||
if hasattr(expense, "metadata") and expense.metadata and expense.metadata.get("quickbooks_id"):
|
||||
continue
|
||||
|
||||
qb_expense = self._create_quickbooks_expense(expense, access_token, realm_id)
|
||||
if qb_expense:
|
||||
if not hasattr(expense, "metadata") or not expense.metadata:
|
||||
expense.metadata = {}
|
||||
expense.metadata["quickbooks_id"] = qb_expense.get("Id")
|
||||
synced_count += 1
|
||||
except ValueError as e:
|
||||
# Validation errors
|
||||
error_msg = f"Expense {expense.id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(error_msg)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
# API errors
|
||||
error_msg = f"Expense {expense.id}: QuickBooks API error - {e.response.status_code}: {e.response.text[:200] if e.response else str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
except Exception as e:
|
||||
# Other errors
|
||||
error_msg = f"Expense {expense.id}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
except Exception as e:
|
||||
error_msg = f"Error fetching expenses: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
|
||||
db.session.commit()
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
error_msg = f"Database error during sync: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.error(error_msg, exc_info=True)
|
||||
return {"success": False, "message": error_msg, "synced_count": synced_count, "errors": errors}
|
||||
|
||||
return {"success": True, "synced_count": synced_count, "errors": errors}
|
||||
if errors:
|
||||
return {
|
||||
"success": True,
|
||||
"synced_count": synced_count,
|
||||
"errors": errors,
|
||||
"message": f"Sync completed with {len(errors)} error(s). Synced {synced_count} items."
|
||||
}
|
||||
|
||||
return {"success": True, "synced_count": synced_count, "errors": errors, "message": f"Successfully synced {synced_count} items."}
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
error_msg = f"Network error during QuickBooks sync: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
return {"success": False, "message": error_msg}
|
||||
except Exception as e:
|
||||
return {"success": False, "message": f"Sync failed: {str(e)}"}
|
||||
error_msg = f"Sync failed: {str(e)}"
|
||||
logger.error(error_msg, exc_info=True)
|
||||
return {"success": False, "message": error_msg}
|
||||
|
||||
def _create_quickbooks_invoice(self, invoice, access_token: str, realm_id: str) -> Optional[Dict]:
|
||||
"""Create invoice in QuickBooks"""
|
||||
# Get customer mapping from integration config or invoice metadata
|
||||
customer_mapping = self.integration.config.get("customer_mappings", {}) if self.integration else {}
|
||||
item_mapping = self.integration.config.get("item_mappings", {}) if self.integration else {}
|
||||
|
||||
# Try to get QuickBooks customer ID from mapping or metadata
|
||||
customer_qb_id = None
|
||||
if invoice.client_id:
|
||||
# Check mapping first
|
||||
customer_qb_id = customer_mapping.get(str(invoice.client_id))
|
||||
# Fallback to invoice metadata
|
||||
if not customer_qb_id and hasattr(invoice, "metadata") and invoice.metadata:
|
||||
customer_qb_id = invoice.metadata.get("quickbooks_customer_id")
|
||||
|
||||
# If no mapping found, try to find customer by name in QuickBooks
|
||||
if not customer_qb_id and invoice.client_id:
|
||||
try:
|
||||
customer_name = invoice.client.name if invoice.client else None
|
||||
if customer_name:
|
||||
# Query QuickBooks for customer by DisplayName
|
||||
# QuickBooks query syntax: SELECT * FROM Customer WHERE DisplayName = 'CustomerName'
|
||||
# URL encode the query parameter
|
||||
from urllib.parse import quote
|
||||
query = f"SELECT * FROM Customer WHERE DisplayName = '{customer_name.replace(\"'\", \"''\")}'"
|
||||
query_url = f"/v3/company/{realm_id}/query?query={quote(query)}"
|
||||
|
||||
customers_response = self._api_request(
|
||||
"GET",
|
||||
query_url,
|
||||
access_token,
|
||||
realm_id
|
||||
)
|
||||
|
||||
if customers_response and "QueryResponse" in customers_response:
|
||||
customers = customers_response["QueryResponse"].get("Customer", [])
|
||||
if customers:
|
||||
# Handle both single customer and list of customers
|
||||
if isinstance(customers, list):
|
||||
if len(customers) > 0:
|
||||
customer_qb_id = customers[0].get("Id")
|
||||
else:
|
||||
customer_qb_id = customers.get("Id")
|
||||
|
||||
if customer_qb_id:
|
||||
# Auto-save mapping for future use
|
||||
if not self.integration.config:
|
||||
self.integration.config = {}
|
||||
if "customer_mappings" not in self.integration.config:
|
||||
self.integration.config["customer_mappings"] = {}
|
||||
self.integration.config["customer_mappings"][str(invoice.client_id)] = customer_qb_id
|
||||
logger.info(f"Auto-mapped client {invoice.client_id} to QuickBooks customer {customer_qb_id}")
|
||||
else:
|
||||
logger.warning(f"Customer '{customer_name}' not found in QuickBooks. Please configure customer mapping.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up QuickBooks customer: {e}", exc_info=True)
|
||||
|
||||
# If still no customer ID, we cannot create the invoice
|
||||
if not customer_qb_id:
|
||||
error_msg = f"Customer mapping not found for client {invoice.client_id}. Cannot create QuickBooks invoice."
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
# Build QuickBooks invoice structure
|
||||
qb_invoice = {"Line": []}
|
||||
qb_invoice = {
|
||||
"CustomerRef": {"value": customer_qb_id},
|
||||
"Line": []
|
||||
}
|
||||
|
||||
# Add invoice items
|
||||
for item in invoice.items:
|
||||
qb_invoice["Line"].append(
|
||||
{
|
||||
try:
|
||||
# Try to get QuickBooks item ID from mapping
|
||||
item_qb_id = item_mapping.get(str(item.id))
|
||||
if not item_qb_id and isinstance(item_mapping.get(item.description), dict):
|
||||
item_qb_id = item_mapping.get(item.description, {}).get("id")
|
||||
|
||||
item_qb_name = item.description or "Service"
|
||||
|
||||
# If no mapping, try to find item by name in QuickBooks
|
||||
if not item_qb_id:
|
||||
try:
|
||||
# Query QuickBooks for item by Name
|
||||
from urllib.parse import quote
|
||||
query = f"SELECT * FROM Item WHERE Name = '{item_qb_name.replace(\"'\", \"''\")}'"
|
||||
query_url = f"/v3/company/{realm_id}/query?query={quote(query)}"
|
||||
|
||||
items_response = self._api_request(
|
||||
"GET",
|
||||
query_url,
|
||||
access_token,
|
||||
realm_id
|
||||
)
|
||||
|
||||
if items_response and "QueryResponse" in items_response:
|
||||
items = items_response["QueryResponse"].get("Item", [])
|
||||
if items:
|
||||
# Handle both single item and list of items
|
||||
if isinstance(items, list):
|
||||
if len(items) > 0:
|
||||
item_qb_id = items[0].get("Id")
|
||||
else:
|
||||
item_qb_id = items.get("Id")
|
||||
|
||||
if item_qb_id:
|
||||
# Auto-save mapping for future use
|
||||
if "item_mappings" not in self.integration.config:
|
||||
self.integration.config["item_mappings"] = {}
|
||||
self.integration.config["item_mappings"][str(item.id)] = item_qb_id
|
||||
logger.info(f"Auto-mapped invoice item {item.id} to QuickBooks item {item_qb_id}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error looking up QuickBooks item '{item_qb_name}': {e}")
|
||||
|
||||
# Build line item
|
||||
line_item = {
|
||||
"Amount": float(item.quantity * item.unit_price),
|
||||
"DetailType": "SalesItemLineDetail",
|
||||
"SalesItemLineDetail": {
|
||||
"ItemRef": {
|
||||
"value": "1", # Would need to map to actual QuickBooks item
|
||||
"name": item.description,
|
||||
},
|
||||
"Qty": float(item.quantity),
|
||||
"UnitPrice": float(item.unit_price),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
# Add customer reference (would need customer mapping)
|
||||
# qb_invoice["CustomerRef"] = {"value": customer_qb_id}
|
||||
|
||||
|
||||
if item_qb_id:
|
||||
line_item["SalesItemLineDetail"]["ItemRef"] = {
|
||||
"value": item_qb_id,
|
||||
"name": item_qb_name,
|
||||
}
|
||||
else:
|
||||
# Use description as item name (QuickBooks will use or create item)
|
||||
line_item["SalesItemLineDetail"]["ItemRef"] = {
|
||||
"name": item_qb_name,
|
||||
}
|
||||
logger.warning(f"Item mapping not found for invoice item {item.id}. Using description as item name.")
|
||||
|
||||
qb_invoice["Line"].append(line_item)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing invoice item {item.id}: {e}", exc_info=True)
|
||||
# Continue with other items instead of failing completely
|
||||
continue
|
||||
|
||||
# Validate invoice has at least one line item
|
||||
if not qb_invoice["Line"]:
|
||||
error_msg = "Invoice has no valid line items"
|
||||
logger.error(error_msg)
|
||||
raise ValueError(error_msg)
|
||||
|
||||
# Add invoice date and due date
|
||||
if invoice.created_at:
|
||||
qb_invoice["TxnDate"] = invoice.created_at.strftime("%Y-%m-%d")
|
||||
if invoice.due_date:
|
||||
qb_invoice["DueDate"] = invoice.due_date.strftime("%Y-%m-%d")
|
||||
|
||||
endpoint = f"/v3/company/{realm_id}/invoice"
|
||||
return self._api_request("POST", endpoint, access_token, realm_id)
|
||||
result = self._api_request("POST", endpoint, access_token, realm_id, json_data=qb_invoice)
|
||||
|
||||
if not result:
|
||||
raise ValueError("Failed to create invoice in QuickBooks - no response from API")
|
||||
|
||||
# Validate response
|
||||
if "Invoice" not in result:
|
||||
raise ValueError(f"Invalid response from QuickBooks API: {result}")
|
||||
|
||||
return result
|
||||
|
||||
def _create_quickbooks_expense(self, expense, access_token: str, realm_id: str) -> Optional[Dict]:
|
||||
"""Create expense in QuickBooks"""
|
||||
# Get account mapping from integration config
|
||||
account_mapping = self.integration.config.get("account_mappings", {}) if self.integration else {}
|
||||
default_expense_account = self.integration.config.get("default_expense_account_id") if self.integration else None
|
||||
|
||||
# Try to get account ID from expense category mapping or use default
|
||||
account_id = default_expense_account
|
||||
if expense.category_id:
|
||||
account_id = account_mapping.get(str(expense.category_id), default_expense_account)
|
||||
elif hasattr(expense, "metadata") and expense.metadata:
|
||||
account_id = expense.metadata.get("quickbooks_account_id", default_expense_account)
|
||||
|
||||
# If no account ID found, try to find or use default expense account
|
||||
if not account_id:
|
||||
try:
|
||||
# Query for default expense accounts
|
||||
from urllib.parse import quote
|
||||
query = "SELECT * FROM Account WHERE AccountType = 'Expense' AND Active = true MAXRESULTS 1"
|
||||
query_url = f"/v3/company/{realm_id}/query?query={quote(query)}"
|
||||
|
||||
accounts_response = self._api_request(
|
||||
"GET",
|
||||
query_url,
|
||||
access_token,
|
||||
realm_id
|
||||
)
|
||||
|
||||
if accounts_response and "QueryResponse" in accounts_response:
|
||||
accounts = accounts_response["QueryResponse"].get("Account", [])
|
||||
if accounts:
|
||||
if isinstance(accounts, list):
|
||||
if len(accounts) > 0:
|
||||
account_id = accounts[0].get("Id")
|
||||
else:
|
||||
account_id = accounts.get("Id")
|
||||
|
||||
if not account_id:
|
||||
# Fallback to a common expense account ID
|
||||
account_id = "1"
|
||||
logger.warning("No expense account found, using default account ID 1")
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up QuickBooks expense account: {e}", exc_info=True)
|
||||
# Use fallback
|
||||
account_id = account_id or "1"
|
||||
|
||||
# Build QuickBooks expense structure
|
||||
qb_expense = {
|
||||
"PaymentType": "Cash",
|
||||
"AccountRef": {"value": "1"}, # Would need account mapping
|
||||
"AccountRef": {"value": account_id},
|
||||
"Line": [
|
||||
{
|
||||
"Amount": float(expense.amount),
|
||||
"DetailType": "AccountBasedExpenseLineDetail",
|
||||
"AccountBasedExpenseLineDetail": {"AccountRef": {"value": "1"}}, # Expense account
|
||||
"AccountBasedExpenseLineDetail": {"AccountRef": {"value": account_id}},
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# Add vendor if available
|
||||
if expense.vendor:
|
||||
qb_expense["EntityRef"] = {"name": expense.vendor}
|
||||
|
||||
# Add expense date
|
||||
if expense.date:
|
||||
qb_expense["TxnDate"] = expense.date.strftime("%Y-%m-%d")
|
||||
|
||||
# Add memo/description
|
||||
if expense.description:
|
||||
qb_expense["Line"][0]["Description"] = expense.description
|
||||
|
||||
endpoint = f"/v3/company/{realm_id}/purchase"
|
||||
return self._api_request("POST", endpoint, access_token, realm_id)
|
||||
result = self._api_request("POST", endpoint, access_token, realm_id, json_data=qb_expense)
|
||||
|
||||
if not result:
|
||||
raise ValueError("Failed to create expense in QuickBooks - no response from API")
|
||||
|
||||
# Validate response
|
||||
if "Purchase" not in result:
|
||||
raise ValueError(f"Invalid response from QuickBooks API: {result}")
|
||||
|
||||
return result
|
||||
|
||||
def get_config_schema(self) -> Dict[str, Any]:
|
||||
"""Get configuration schema."""
|
||||
@@ -330,6 +622,31 @@ class QuickBooksConnector(BaseConnector):
|
||||
},
|
||||
{"name": "sync_invoices", "type": "boolean", "label": "Sync Invoices", "default": True},
|
||||
{"name": "sync_expenses", "type": "boolean", "label": "Sync Expenses", "default": True},
|
||||
{
|
||||
"name": "default_expense_account_id",
|
||||
"type": "string",
|
||||
"label": "Default Expense Account ID",
|
||||
"description": "QuickBooks account ID to use for expenses when no mapping is configured",
|
||||
"default": "1",
|
||||
},
|
||||
{
|
||||
"name": "customer_mappings",
|
||||
"type": "json",
|
||||
"label": "Customer Mappings",
|
||||
"description": "JSON mapping of TimeTracker client IDs to QuickBooks customer IDs (e.g., {\"1\": \"qb_customer_id_123\"})",
|
||||
},
|
||||
{
|
||||
"name": "item_mappings",
|
||||
"type": "json",
|
||||
"label": "Item Mappings",
|
||||
"description": "JSON mapping of TimeTracker invoice items to QuickBooks items",
|
||||
},
|
||||
{
|
||||
"name": "account_mappings",
|
||||
"type": "json",
|
||||
"label": "Account Mappings",
|
||||
"description": "JSON mapping of TimeTracker expense category IDs to QuickBooks account IDs",
|
||||
},
|
||||
],
|
||||
"required": ["realm_id"],
|
||||
}
|
||||
|
||||
@@ -133,77 +133,269 @@ class TrelloConnector(BaseConnector):
|
||||
if not token or not api_key:
|
||||
return {"success": False, "message": "Trello credentials not configured"}
|
||||
|
||||
synced_count = 0
|
||||
errors = []
|
||||
|
||||
# Get boards
|
||||
boards_response = requests.get(
|
||||
f"{self.BASE_URL}/members/me/boards", params={"key": api_key, "token": token, "filter": "open"}
|
||||
)
|
||||
|
||||
if boards_response.status_code == 200:
|
||||
boards = boards_response.json()
|
||||
|
||||
for board in boards:
|
||||
try:
|
||||
# Create or update project from board
|
||||
project = Project.query.filter_by(
|
||||
user_id=self.integration.user_id, name=board.get("name")
|
||||
).first()
|
||||
|
||||
if not project:
|
||||
project = Project(
|
||||
name=board.get("name"),
|
||||
description=board.get("desc", ""),
|
||||
user_id=self.integration.user_id,
|
||||
status="active",
|
||||
)
|
||||
db.session.add(project)
|
||||
db.session.flush()
|
||||
|
||||
# Store Trello board ID in metadata
|
||||
if not hasattr(project, "metadata") or not project.metadata:
|
||||
project.metadata = {}
|
||||
project.metadata["trello_board_id"] = board.get("id")
|
||||
|
||||
# Sync cards as tasks
|
||||
cards_response = requests.get(
|
||||
f"{self.BASE_URL}/boards/{board.get('id')}/cards",
|
||||
params={"key": api_key, "token": token, "filter": "open"},
|
||||
)
|
||||
|
||||
if cards_response.status_code == 200:
|
||||
cards = cards_response.json()
|
||||
|
||||
for card in cards:
|
||||
# Find or create task
|
||||
task = Task.query.filter_by(project_id=project.id, name=card.get("name")).first()
|
||||
|
||||
if not task:
|
||||
task = Task(
|
||||
project_id=project.id,
|
||||
name=card.get("name"),
|
||||
description=card.get("desc", ""),
|
||||
status=self._map_trello_list_to_status(card.get("idList")),
|
||||
)
|
||||
db.session.add(task)
|
||||
db.session.flush()
|
||||
|
||||
# Store Trello card ID in metadata
|
||||
if not hasattr(task, "metadata") or not task.metadata:
|
||||
task.metadata = {}
|
||||
task.metadata["trello_card_id"] = card.get("id")
|
||||
|
||||
synced_count += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing board {board.get('name')}: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return {"success": True, "synced_count": synced_count, "errors": errors}
|
||||
# Get sync direction from config
|
||||
sync_direction = self.integration.config.get("sync_direction", "trello_to_timetracker") if self.integration else "trello_to_timetracker"
|
||||
|
||||
if sync_direction in ("trello_to_timetracker", "bidirectional"):
|
||||
trello_result = self._sync_trello_to_timetracker(api_key, token)
|
||||
# If bidirectional, also sync TimeTracker to Trello
|
||||
if sync_direction == "bidirectional":
|
||||
tracker_result = self._sync_timetracker_to_trello(api_key, token)
|
||||
# Merge results
|
||||
if trello_result.get("success") and tracker_result.get("success"):
|
||||
return {
|
||||
"success": True,
|
||||
"synced_count": trello_result.get("synced_count", 0) + tracker_result.get("synced_count", 0),
|
||||
"errors": trello_result.get("errors", []) + tracker_result.get("errors", []),
|
||||
"message": f"Bidirectional sync: Trello→TimeTracker: {trello_result.get('synced_count', 0)} items | TimeTracker→Trello: {tracker_result.get('synced_count', 0)} items",
|
||||
}
|
||||
elif trello_result.get("success"):
|
||||
return trello_result
|
||||
elif tracker_result.get("success"):
|
||||
return tracker_result
|
||||
else:
|
||||
return {"success": False, "message": f"Both sync directions failed. Trello→TimeTracker: {trello_result.get('message')}, TimeTracker→Trello: {tracker_result.get('message')}"}
|
||||
return trello_result
|
||||
|
||||
# Handle TimeTracker to Trello sync
|
||||
if sync_direction == "timetracker_to_trello":
|
||||
return self._sync_timetracker_to_trello(api_key, token)
|
||||
|
||||
return {"success": False, "message": f"Unknown sync direction: {sync_direction}"}
|
||||
|
||||
except Exception as e:
|
||||
return {"success": False, "message": f"Sync failed: {str(e)}"}
|
||||
|
||||
def _sync_trello_to_timetracker(self, api_key: str, token: str) -> Dict[str, Any]:
|
||||
"""Sync Trello boards and cards to TimeTracker projects and tasks."""
|
||||
from app.models import Project, Task
|
||||
from app import db
|
||||
|
||||
synced_count = 0
|
||||
errors = []
|
||||
|
||||
# Get boards
|
||||
boards_response = requests.get(
|
||||
f"{self.BASE_URL}/members/me/boards", params={"key": api_key, "token": token, "filter": "open"}
|
||||
)
|
||||
|
||||
if boards_response.status_code == 200:
|
||||
boards = boards_response.json()
|
||||
|
||||
# Filter by board_ids if configured
|
||||
board_ids = self.integration.config.get("board_ids", []) if self.integration else []
|
||||
if board_ids:
|
||||
boards = [b for b in boards if b.get("id") in board_ids]
|
||||
|
||||
for board in boards:
|
||||
try:
|
||||
# Create or update project from board
|
||||
project = Project.query.filter_by(
|
||||
user_id=self.integration.user_id, name=board.get("name")
|
||||
).first()
|
||||
|
||||
if not project:
|
||||
project = Project(
|
||||
name=board.get("name"),
|
||||
description=board.get("desc", ""),
|
||||
user_id=self.integration.user_id,
|
||||
status="active",
|
||||
)
|
||||
db.session.add(project)
|
||||
db.session.flush()
|
||||
|
||||
# Store Trello board ID in metadata
|
||||
if not hasattr(project, "metadata") or not project.metadata:
|
||||
project.metadata = {}
|
||||
project.metadata["trello_board_id"] = board.get("id")
|
||||
|
||||
# Sync cards as tasks
|
||||
cards_response = requests.get(
|
||||
f"{self.BASE_URL}/boards/{board.get('id')}/cards",
|
||||
params={"key": api_key, "token": token, "filter": "open"},
|
||||
)
|
||||
|
||||
if cards_response.status_code == 200:
|
||||
cards = cards_response.json()
|
||||
|
||||
for card in cards:
|
||||
# Find or create task
|
||||
task = Task.query.filter_by(project_id=project.id, name=card.get("name")).first()
|
||||
|
||||
if not task:
|
||||
task = Task(
|
||||
project_id=project.id,
|
||||
name=card.get("name"),
|
||||
description=card.get("desc", ""),
|
||||
status=self._map_trello_list_to_status(card.get("idList")),
|
||||
)
|
||||
db.session.add(task)
|
||||
db.session.flush()
|
||||
else:
|
||||
# Update existing task if needed
|
||||
if card.get("desc") and task.description != card.get("desc"):
|
||||
task.description = card.get("desc")
|
||||
# Update status based on list
|
||||
new_status = self._map_trello_list_to_status(card.get("idList"))
|
||||
if task.status != new_status:
|
||||
task.status = new_status
|
||||
|
||||
# Store Trello card ID in metadata
|
||||
if not hasattr(task, "metadata") or not task.metadata:
|
||||
task.metadata = {}
|
||||
task.metadata["trello_card_id"] = card.get("id")
|
||||
task.metadata["trello_list_id"] = card.get("idList")
|
||||
|
||||
synced_count += 1
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing board {board.get('name')}: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return {"success": True, "synced_count": synced_count, "errors": errors}
|
||||
|
||||
def _sync_timetracker_to_trello(self, api_key: str, token: str) -> Dict[str, Any]:
|
||||
"""Sync TimeTracker tasks to Trello cards."""
|
||||
from app.models import Project, Task
|
||||
from app import db
|
||||
|
||||
synced_count = 0
|
||||
errors = []
|
||||
|
||||
# Get all projects that have Trello board IDs
|
||||
projects = Project.query.filter_by(user_id=self.integration.user_id, status="active").all()
|
||||
|
||||
for project in projects:
|
||||
# Check if project has Trello board ID
|
||||
trello_board_id = None
|
||||
if hasattr(project, "metadata") and project.metadata:
|
||||
trello_board_id = project.metadata.get("trello_board_id")
|
||||
|
||||
if not trello_board_id:
|
||||
# Try to find or create board
|
||||
board_name = project.name
|
||||
boards_response = requests.get(
|
||||
f"{self.BASE_URL}/members/me/boards",
|
||||
params={"key": api_key, "token": token, "filter": "open"}
|
||||
)
|
||||
|
||||
if boards_response.status_code == 200:
|
||||
boards = boards_response.json()
|
||||
matching_board = next((b for b in boards if b.get("name") == board_name), None)
|
||||
|
||||
if matching_board:
|
||||
trello_board_id = matching_board.get("id")
|
||||
else:
|
||||
# Create new board (optional - might require additional permissions)
|
||||
try:
|
||||
create_response = requests.post(
|
||||
f"{self.BASE_URL}/boards",
|
||||
params={"key": api_key, "token": token, "name": board_name}
|
||||
)
|
||||
if create_response.status_code == 200:
|
||||
trello_board_id = create_response.json().get("id")
|
||||
except Exception as e:
|
||||
errors.append(f"Could not create Trello board for project {project.name}: {str(e)}")
|
||||
continue
|
||||
|
||||
if trello_board_id:
|
||||
if not hasattr(project, "metadata") or not project.metadata:
|
||||
project.metadata = {}
|
||||
project.metadata["trello_board_id"] = trello_board_id
|
||||
|
||||
if not trello_board_id:
|
||||
continue
|
||||
|
||||
# Get lists for this board
|
||||
lists_response = requests.get(
|
||||
f"{self.BASE_URL}/boards/{trello_board_id}/lists",
|
||||
params={"key": api_key, "token": token, "filter": "open"}
|
||||
)
|
||||
|
||||
if lists_response.status_code != 200:
|
||||
errors.append(f"Could not get lists for board {project.name}")
|
||||
continue
|
||||
|
||||
lists = lists_response.json()
|
||||
# Create a mapping of status to list ID
|
||||
status_to_list = {}
|
||||
for lst in lists:
|
||||
list_name = lst.get("name", "").lower()
|
||||
if "todo" in list_name or "to do" in list_name or "backlog" in list_name:
|
||||
status_to_list["todo"] = lst.get("id")
|
||||
elif "in progress" in list_name or "doing" in list_name or "active" in list_name:
|
||||
status_to_list["in_progress"] = lst.get("id")
|
||||
elif "done" in list_name or "completed" in list_name:
|
||||
status_to_list["done"] = lst.get("id")
|
||||
elif "review" in list_name:
|
||||
status_to_list["review"] = lst.get("id")
|
||||
|
||||
# Default to first list if no mapping found
|
||||
default_list_id = lists[0].get("id") if lists else None
|
||||
|
||||
# Get tasks for this project
|
||||
tasks = Task.query.filter_by(project_id=project.id).all()
|
||||
|
||||
for task in tasks:
|
||||
try:
|
||||
# Check if task already has Trello card ID
|
||||
trello_card_id = None
|
||||
if hasattr(task, "metadata") and task.metadata:
|
||||
trello_card_id = task.metadata.get("trello_card_id")
|
||||
|
||||
# Determine target list
|
||||
target_list_id = status_to_list.get(task.status, default_list_id)
|
||||
if not target_list_id:
|
||||
continue
|
||||
|
||||
if trello_card_id:
|
||||
# Update existing card
|
||||
update_data = {
|
||||
"name": task.name,
|
||||
"desc": task.description or "",
|
||||
"idList": target_list_id,
|
||||
}
|
||||
update_response = requests.put(
|
||||
f"{self.BASE_URL}/cards/{trello_card_id}",
|
||||
params={"key": api_key, "token": token},
|
||||
json=update_data
|
||||
)
|
||||
if update_response.status_code == 200:
|
||||
synced_count += 1
|
||||
else:
|
||||
errors.append(f"Failed to update Trello card for task {task.id}: {update_response.status_code}")
|
||||
else:
|
||||
# Create new card
|
||||
create_data = {
|
||||
"name": task.name,
|
||||
"desc": task.description or "",
|
||||
"idList": target_list_id,
|
||||
}
|
||||
create_response = requests.post(
|
||||
f"{self.BASE_URL}/cards",
|
||||
params={"key": api_key, "token": token},
|
||||
json=create_data
|
||||
)
|
||||
if create_response.status_code == 200:
|
||||
card_data = create_response.json()
|
||||
trello_card_id = card_data.get("id")
|
||||
|
||||
# Store Trello card ID in task metadata
|
||||
if not hasattr(task, "metadata") or not task.metadata:
|
||||
task.metadata = {}
|
||||
task.metadata["trello_card_id"] = trello_card_id
|
||||
task.metadata["trello_list_id"] = target_list_id
|
||||
|
||||
synced_count += 1
|
||||
else:
|
||||
errors.append(f"Failed to create Trello card for task {task.id}: {create_response.status_code}")
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"Error syncing task {task.id} to Trello: {str(e)}")
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return {"success": True, "synced_count": synced_count, "errors": errors}
|
||||
|
||||
def _map_trello_list_to_status(self, list_id: str) -> str:
|
||||
"""Map Trello list to task status."""
|
||||
|
||||
@@ -265,33 +265,72 @@ class XeroConnector(BaseConnector):
|
||||
|
||||
def _create_xero_invoice(self, invoice, access_token: str, tenant_id: str) -> Optional[Dict]:
|
||||
"""Create invoice in Xero"""
|
||||
# Get customer mapping from integration config or invoice metadata
|
||||
contact_mapping = self.integration.config.get("contact_mappings", {}) if self.integration else {}
|
||||
item_mapping = self.integration.config.get("item_mappings", {}) if self.integration else {}
|
||||
|
||||
# Try to get Xero contact ID from mapping or metadata
|
||||
contact_id = None
|
||||
contact_name = invoice.client.name if invoice.client else "Unknown"
|
||||
|
||||
if invoice.client_id:
|
||||
# Check mapping first
|
||||
contact_id = contact_mapping.get(str(invoice.client_id))
|
||||
# Fallback to invoice metadata
|
||||
if not contact_id and hasattr(invoice, "metadata") and invoice.metadata:
|
||||
contact_id = invoice.metadata.get("xero_contact_id")
|
||||
|
||||
# Build Xero invoice structure
|
||||
xero_invoice = {
|
||||
"Type": "ACCREC",
|
||||
"Contact": {"Name": invoice.client.name if invoice.client else "Unknown"},
|
||||
"Date": invoice.date.strftime("%Y-%m-%d") if invoice.date else datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
"DueDate": (
|
||||
invoice.due_date.strftime("%Y-%m-%d") if invoice.due_date else datetime.utcnow().strftime("%Y-%m-%d")
|
||||
),
|
||||
"LineItems": [],
|
||||
}
|
||||
|
||||
# Add contact - use ID if available, otherwise use name
|
||||
if contact_id:
|
||||
xero_invoice["Contact"] = {"ContactID": contact_id}
|
||||
else:
|
||||
xero_invoice["Contact"] = {"Name": contact_name}
|
||||
logger.warning(f"Contact mapping not found for client {invoice.client_id}. Using name: {contact_name}")
|
||||
|
||||
# Add invoice items
|
||||
for item in invoice.items:
|
||||
xero_invoice["LineItems"].append(
|
||||
{
|
||||
"Description": item.description,
|
||||
"Quantity": float(item.quantity),
|
||||
"UnitAmount": float(item.unit_price),
|
||||
"LineAmount": float(item.quantity * item.unit_price),
|
||||
}
|
||||
)
|
||||
# Try to get Xero item code from mapping
|
||||
item_code = item_mapping.get(str(item.id)) or item_mapping.get(item.description, {}).get("code")
|
||||
|
||||
line_item = {
|
||||
"Description": item.description,
|
||||
"Quantity": float(item.quantity),
|
||||
"UnitAmount": float(item.unit_price),
|
||||
"LineAmount": float(item.quantity * item.unit_price),
|
||||
}
|
||||
|
||||
# Add item code if available
|
||||
if item_code:
|
||||
line_item["ItemCode"] = item_code
|
||||
|
||||
xero_invoice["LineItems"].append(line_item)
|
||||
|
||||
endpoint = "/api.xro/2.0/Invoices"
|
||||
return self._api_request("POST", endpoint, access_token, tenant_id)
|
||||
|
||||
def _create_xero_expense(self, expense, access_token: str, tenant_id: str) -> Optional[Dict]:
|
||||
"""Create expense in Xero"""
|
||||
# Get account mapping from integration config
|
||||
account_mapping = self.integration.config.get("account_mappings", {}) if self.integration else {}
|
||||
default_expense_account = self.integration.config.get("default_expense_account_code", "200") if self.integration else "200"
|
||||
|
||||
# Try to get account code from expense category mapping or use default
|
||||
account_code = default_expense_account
|
||||
if expense.category_id:
|
||||
account_code = account_mapping.get(str(expense.category_id), default_expense_account)
|
||||
elif hasattr(expense, "metadata") and expense.metadata:
|
||||
account_code = expense.metadata.get("xero_account_code", default_expense_account)
|
||||
|
||||
# Build Xero expense structure
|
||||
xero_expense = {
|
||||
"Date": expense.date.strftime("%Y-%m-%d") if expense.date else datetime.utcnow().strftime("%Y-%m-%d"),
|
||||
@@ -302,6 +341,7 @@ class XeroConnector(BaseConnector):
|
||||
"Quantity": 1.0,
|
||||
"UnitAmount": float(expense.amount),
|
||||
"LineAmount": float(expense.amount),
|
||||
"AccountCode": account_code,
|
||||
}
|
||||
],
|
||||
}
|
||||
@@ -321,6 +361,31 @@ class XeroConnector(BaseConnector):
|
||||
},
|
||||
{"name": "sync_invoices", "type": "boolean", "label": "Sync Invoices", "default": True},
|
||||
{"name": "sync_expenses", "type": "boolean", "label": "Sync Expenses", "default": True},
|
||||
{
|
||||
"name": "default_expense_account_code",
|
||||
"type": "string",
|
||||
"label": "Default Expense Account Code",
|
||||
"description": "Xero account code to use for expenses when no mapping is configured",
|
||||
"default": "200",
|
||||
},
|
||||
{
|
||||
"name": "contact_mappings",
|
||||
"type": "json",
|
||||
"label": "Contact Mappings",
|
||||
"description": "JSON mapping of TimeTracker client IDs to Xero Contact IDs (e.g., {\"1\": \"contact-uuid-123\"})",
|
||||
},
|
||||
{
|
||||
"name": "item_mappings",
|
||||
"type": "json",
|
||||
"label": "Item Mappings",
|
||||
"description": "JSON mapping of TimeTracker invoice items to Xero item codes",
|
||||
},
|
||||
{
|
||||
"name": "account_mappings",
|
||||
"type": "json",
|
||||
"label": "Account Mappings",
|
||||
"description": "JSON mapping of TimeTracker expense category IDs to Xero account codes",
|
||||
},
|
||||
],
|
||||
"required": ["tenant_id"],
|
||||
}
|
||||
|
||||
@@ -39,6 +39,7 @@ from .audit_log import AuditLog
|
||||
from .recurring_invoice import RecurringInvoice
|
||||
from .invoice_email import InvoiceEmail
|
||||
from .webhook import Webhook, WebhookDelivery
|
||||
from .push_subscription import PushSubscription
|
||||
from .quote import Quote, QuoteItem, QuotePDFTemplate
|
||||
from .quote_attachment import QuoteAttachment
|
||||
from .project_attachment import ProjectAttachment
|
||||
|
||||
70
app/models/push_subscription.py
Normal file
70
app/models/push_subscription.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""
|
||||
Push Subscription model for storing browser push notification subscriptions.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from app import db
|
||||
from app.utils.timezone import now_in_app_timezone
|
||||
import json
|
||||
|
||||
|
||||
class PushSubscription(db.Model):
|
||||
"""Model for storing browser push notification subscriptions"""
|
||||
|
||||
__tablename__ = "push_subscriptions"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
user_id = db.Column(db.Integer, db.ForeignKey("users.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
|
||||
# Push subscription data (JSON format from browser Push API)
|
||||
endpoint = db.Column(db.Text, nullable=False) # Push service endpoint URL
|
||||
keys = db.Column(db.JSON, nullable=False) # p256dh and auth keys
|
||||
|
||||
# Metadata
|
||||
user_agent = db.Column(db.String(500), nullable=True) # Browser user agent
|
||||
created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)
|
||||
updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
|
||||
last_used_at = db.Column(db.DateTime, nullable=True) # Last time subscription was used
|
||||
|
||||
# Relationships
|
||||
user = db.relationship("User", backref="push_subscriptions", lazy="joined")
|
||||
|
||||
def __init__(self, user_id, endpoint, keys, user_agent=None):
|
||||
"""Create a push subscription"""
|
||||
self.user_id = user_id
|
||||
self.endpoint = endpoint
|
||||
self.keys = keys if isinstance(keys, dict) else json.loads(keys) if isinstance(keys, str) else {}
|
||||
self.user_agent = user_agent
|
||||
|
||||
def __repr__(self):
|
||||
return f"<PushSubscription {self.id} for user {self.user_id}>"
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert subscription to dictionary for API responses"""
|
||||
return {
|
||||
"id": self.id,
|
||||
"user_id": self.user_id,
|
||||
"endpoint": self.endpoint,
|
||||
"keys": self.keys,
|
||||
"user_agent": self.user_agent,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
"last_used_at": self.last_used_at.isoformat() if self.last_used_at else None,
|
||||
}
|
||||
|
||||
def update_last_used(self):
|
||||
"""Update the last_used_at timestamp"""
|
||||
self.last_used_at = now_in_app_timezone()
|
||||
self.updated_at = now_in_app_timezone()
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def get_user_subscriptions(cls, user_id):
|
||||
"""Get all active subscriptions for a user"""
|
||||
return cls.query.filter_by(user_id=user_id).order_by(cls.created_at.desc()).all()
|
||||
|
||||
@classmethod
|
||||
def find_by_endpoint(cls, user_id, endpoint):
|
||||
"""Find a subscription by user and endpoint"""
|
||||
return cls.query.filter_by(user_id=user_id, endpoint=endpoint).first()
|
||||
|
||||
@@ -54,10 +54,39 @@ def list_issues():
|
||||
|
||||
# Check permissions - non-admin users can only see issues for their assigned clients/projects
|
||||
if not current_user.is_admin:
|
||||
# Get user's accessible client IDs (through projects they have access to)
|
||||
# For simplicity, we'll show all issues but filter in template if needed
|
||||
# In a real implementation, you'd want to filter by user permissions here
|
||||
pass
|
||||
# Check if user has permission to view all issues
|
||||
has_view_all_issues = current_user.has_permission("view_all_issues") if hasattr(current_user, 'has_permission') else False
|
||||
|
||||
if not has_view_all_issues:
|
||||
# Get user's accessible project IDs (projects they created or have time entries for)
|
||||
from app.models.time_entry import TimeEntry
|
||||
|
||||
# Projects the user has time entries for
|
||||
user_project_ids = db.session.query(TimeEntry.project_id).filter_by(
|
||||
user_id=current_user.id
|
||||
).distinct().subquery()
|
||||
|
||||
# Get client IDs from accessible projects
|
||||
accessible_client_ids = db.session.query(Project.client_id).filter(
|
||||
db.or_(
|
||||
Project.id.in_(db.session.query(user_project_ids)),
|
||||
# Also include projects where user is assigned to tasks
|
||||
Project.id.in_(
|
||||
db.session.query(Task.project_id).filter_by(assigned_to=current_user.id).distinct().subquery()
|
||||
)
|
||||
)
|
||||
).distinct().subquery()
|
||||
|
||||
# Filter issues by:
|
||||
# 1. Issues assigned to the user
|
||||
# 2. Issues for clients/projects the user has access to
|
||||
query = query.filter(
|
||||
db.or_(
|
||||
Issue.assigned_to == current_user.id,
|
||||
Issue.client_id.in_(db.session.query(accessible_client_ids)),
|
||||
Issue.project_id.in_(db.session.query(user_project_ids))
|
||||
)
|
||||
)
|
||||
|
||||
# Order by priority and creation date
|
||||
query = query.order_by(
|
||||
@@ -74,11 +103,35 @@ def list_issues():
|
||||
projects = Project.query.filter_by(status="active").order_by(Project.name).limit(500).all()
|
||||
users = User.query.filter_by(is_active=True).order_by(User.username).limit(200).all()
|
||||
|
||||
# Calculate statistics
|
||||
total_issues = Issue.query.count()
|
||||
open_issues = Issue.query.filter(Issue.status.in_(["open", "in_progress"])).count()
|
||||
resolved_issues = Issue.query.filter_by(status="resolved").count()
|
||||
closed_issues = Issue.query.filter_by(status="closed").count()
|
||||
# Calculate statistics (respecting permissions)
|
||||
stats_query = Issue.query
|
||||
if not current_user.is_admin:
|
||||
has_view_all_issues = current_user.has_permission("view_all_issues") if hasattr(current_user, 'has_permission') else False
|
||||
if not has_view_all_issues:
|
||||
from app.models.time_entry import TimeEntry
|
||||
user_project_ids = db.session.query(TimeEntry.project_id).filter_by(
|
||||
user_id=current_user.id
|
||||
).distinct().subquery()
|
||||
accessible_client_ids = db.session.query(Project.client_id).filter(
|
||||
db.or_(
|
||||
Project.id.in_(db.session.query(user_project_ids)),
|
||||
Project.id.in_(
|
||||
db.session.query(Task.project_id).filter_by(assigned_to=current_user.id).distinct().subquery()
|
||||
)
|
||||
)
|
||||
).distinct().subquery()
|
||||
stats_query = stats_query.filter(
|
||||
db.or_(
|
||||
Issue.assigned_to == current_user.id,
|
||||
Issue.client_id.in_(db.session.query(accessible_client_ids)),
|
||||
Issue.project_id.in_(db.session.query(user_project_ids))
|
||||
)
|
||||
)
|
||||
|
||||
total_issues = stats_query.count()
|
||||
open_issues = stats_query.filter(Issue.status.in_(["open", "in_progress"])).count()
|
||||
resolved_issues = stats_query.filter_by(status="resolved").count()
|
||||
closed_issues = stats_query.filter_by(status="closed").count()
|
||||
|
||||
return render_template(
|
||||
"issues/list.html",
|
||||
@@ -106,6 +159,48 @@ def view_issue(issue_id):
|
||||
"""View a specific issue"""
|
||||
issue = Issue.query.get_or_404(issue_id)
|
||||
|
||||
# Check permissions - non-admin users can only view issues they have access to
|
||||
if not current_user.is_admin:
|
||||
has_view_all_issues = current_user.has_permission("view_all_issues") if hasattr(current_user, 'has_permission') else False
|
||||
if not has_view_all_issues:
|
||||
# Check if user has access to this issue
|
||||
has_access = False
|
||||
|
||||
# Check if assigned to user
|
||||
if issue.assigned_to == current_user.id:
|
||||
has_access = True
|
||||
else:
|
||||
# Check if user has access through projects
|
||||
from app.models.time_entry import TimeEntry
|
||||
user_project_ids = db.session.query(TimeEntry.project_id).filter_by(
|
||||
user_id=current_user.id
|
||||
).distinct().all()
|
||||
user_project_ids = [p[0] for p in user_project_ids]
|
||||
|
||||
# Also check projects where user is assigned to tasks
|
||||
user_task_project_ids = db.session.query(Task.project_id).filter_by(
|
||||
assigned_to=current_user.id
|
||||
).distinct().all()
|
||||
user_task_project_ids = [p[0] for p in user_task_project_ids]
|
||||
|
||||
all_accessible_project_ids = set(user_project_ids + user_task_project_ids)
|
||||
|
||||
# Check if issue's project or client's projects are accessible
|
||||
if issue.project_id and issue.project_id in all_accessible_project_ids:
|
||||
has_access = True
|
||||
elif issue.client_id:
|
||||
# Check if any project for this client is accessible
|
||||
client_project_ids = db.session.query(Project.id).filter_by(
|
||||
client_id=issue.client_id
|
||||
).all()
|
||||
client_project_ids = [p[0] for p in client_project_ids]
|
||||
if any(pid in all_accessible_project_ids for pid in client_project_ids):
|
||||
has_access = True
|
||||
|
||||
if not has_access:
|
||||
flash(_("You do not have permission to view this issue."), "error")
|
||||
return redirect(url_for("issues.list_issues"))
|
||||
|
||||
# Get related tasks if project is set
|
||||
related_tasks = []
|
||||
if issue.project_id:
|
||||
@@ -134,6 +229,40 @@ def edit_issue(issue_id):
|
||||
"""Edit an issue"""
|
||||
issue = Issue.query.get_or_404(issue_id)
|
||||
|
||||
# Check permissions - non-admin users can only edit issues they have access to
|
||||
if not current_user.is_admin:
|
||||
has_edit_all_issues = current_user.has_permission("edit_all_issues") if hasattr(current_user, 'has_permission') else False
|
||||
if not has_edit_all_issues:
|
||||
# Check if user has access to this issue (same logic as view_issue)
|
||||
has_access = False
|
||||
if issue.assigned_to == current_user.id:
|
||||
has_access = True
|
||||
else:
|
||||
from app.models.time_entry import TimeEntry
|
||||
user_project_ids = db.session.query(TimeEntry.project_id).filter_by(
|
||||
user_id=current_user.id
|
||||
).distinct().all()
|
||||
user_project_ids = [p[0] for p in user_project_ids]
|
||||
user_task_project_ids = db.session.query(Task.project_id).filter_by(
|
||||
assigned_to=current_user.id
|
||||
).distinct().all()
|
||||
user_task_project_ids = [p[0] for p in user_task_project_ids]
|
||||
all_accessible_project_ids = set(user_project_ids + user_task_project_ids)
|
||||
|
||||
if issue.project_id and issue.project_id in all_accessible_project_ids:
|
||||
has_access = True
|
||||
elif issue.client_id:
|
||||
client_project_ids = db.session.query(Project.id).filter_by(
|
||||
client_id=issue.client_id
|
||||
).all()
|
||||
client_project_ids = [p[0] for p in client_project_ids]
|
||||
if any(pid in all_accessible_project_ids for pid in client_project_ids):
|
||||
has_access = True
|
||||
|
||||
if not has_access:
|
||||
flash(_("You do not have permission to edit this issue."), "error")
|
||||
return redirect(url_for("issues.view_issue", issue_id=issue_id))
|
||||
|
||||
if request.method == "POST":
|
||||
title = request.form.get("title", "").strip()
|
||||
description = request.form.get("description", "").strip()
|
||||
|
||||
@@ -6,7 +6,7 @@ from flask import Blueprint, request, jsonify
|
||||
from flask_login import login_required, current_user
|
||||
from flask_babel import gettext as _
|
||||
from app import db
|
||||
from app.models import User
|
||||
from app.models import User, PushSubscription
|
||||
from app.utils.db import safe_commit
|
||||
import json
|
||||
|
||||
@@ -18,29 +18,46 @@ push_bp = Blueprint("push", __name__)
|
||||
def subscribe_push():
|
||||
"""Subscribe user to push notifications."""
|
||||
try:
|
||||
subscription = request.json
|
||||
|
||||
# Store subscription in user model or separate table
|
||||
# For now, store in user's settings/preferences
|
||||
if not hasattr(current_user, "push_subscription"):
|
||||
# Add push_subscription field to User model if needed
|
||||
pass
|
||||
|
||||
# Store subscription (could be in a separate PushSubscription model)
|
||||
# For simplicity, storing as JSON in user preferences
|
||||
user_prefs = getattr(current_user, "preferences", {}) or {}
|
||||
if not isinstance(user_prefs, dict):
|
||||
user_prefs = {}
|
||||
|
||||
user_prefs["push_subscription"] = subscription
|
||||
current_user.preferences = user_prefs
|
||||
|
||||
subscription_data = request.json
|
||||
|
||||
if not subscription_data:
|
||||
return jsonify({"success": False, "message": "Invalid subscription data"}), 400
|
||||
|
||||
# Extract subscription details
|
||||
endpoint = subscription_data.get("endpoint")
|
||||
keys = subscription_data.get("keys", {})
|
||||
user_agent = request.headers.get("User-Agent", "")
|
||||
|
||||
if not endpoint:
|
||||
return jsonify({"success": False, "message": "Endpoint is required"}), 400
|
||||
|
||||
# Check if subscription already exists for this user and endpoint
|
||||
existing = PushSubscription.find_by_endpoint(current_user.id, endpoint)
|
||||
|
||||
if existing:
|
||||
# Update existing subscription
|
||||
existing.keys = keys
|
||||
existing.user_agent = user_agent
|
||||
from app.utils.timezone import now_in_app_timezone
|
||||
existing.updated_at = now_in_app_timezone()
|
||||
existing.update_last_used()
|
||||
else:
|
||||
# Create new subscription
|
||||
subscription = PushSubscription(
|
||||
user_id=current_user.id,
|
||||
endpoint=endpoint,
|
||||
keys=keys,
|
||||
user_agent=user_agent
|
||||
)
|
||||
db.session.add(subscription)
|
||||
|
||||
if safe_commit("subscribe_push", {"user_id": current_user.id}):
|
||||
return jsonify({"success": True, "message": "Subscribed to push notifications"})
|
||||
else:
|
||||
return jsonify({"success": False, "message": "Failed to save subscription"}), 500
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({"success": False, "message": str(e)}), 500
|
||||
|
||||
|
||||
@@ -49,15 +66,41 @@ def subscribe_push():
|
||||
def unsubscribe_push():
|
||||
"""Unsubscribe user from push notifications."""
|
||||
try:
|
||||
user_prefs = getattr(current_user, "preferences", {}) or {}
|
||||
if isinstance(user_prefs, dict):
|
||||
user_prefs.pop("push_subscription", None)
|
||||
current_user.preferences = user_prefs
|
||||
|
||||
if safe_commit("unsubscribe_push", {"user_id": current_user.id}):
|
||||
return jsonify({"success": True, "message": "Unsubscribed from push notifications"})
|
||||
subscription_data = request.json
|
||||
endpoint = subscription_data.get("endpoint") if subscription_data else None
|
||||
|
||||
if endpoint:
|
||||
# Remove specific subscription by endpoint
|
||||
subscription = PushSubscription.find_by_endpoint(current_user.id, endpoint)
|
||||
if subscription:
|
||||
db.session.delete(subscription)
|
||||
if safe_commit("unsubscribe_push", {"user_id": current_user.id}):
|
||||
return jsonify({"success": True, "message": "Unsubscribed from push notifications"})
|
||||
else:
|
||||
# Remove all subscriptions for user
|
||||
subscriptions = PushSubscription.get_user_subscriptions(current_user.id)
|
||||
for subscription in subscriptions:
|
||||
db.session.delete(subscription)
|
||||
|
||||
if safe_commit("unsubscribe_push_all", {"user_id": current_user.id}):
|
||||
return jsonify({"success": True, "message": "Unsubscribed from all push notifications"})
|
||||
|
||||
return jsonify({"success": False, "message": "No subscription found"}), 404
|
||||
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
return jsonify({"success": False, "message": str(e)}), 500
|
||||
|
||||
|
||||
@push_bp.route("/api/push/subscriptions", methods=["GET"])
|
||||
@login_required
|
||||
def list_subscriptions():
|
||||
"""Get all push subscriptions for the current user."""
|
||||
try:
|
||||
subscriptions = PushSubscription.get_user_subscriptions(current_user.id)
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"subscriptions": [sub.to_dict() for sub in subscriptions]
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({"success": False, "message": str(e)}), 500
|
||||
|
||||
Reference in New Issue
Block a user