Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions surfsense_backend/app/celery_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@ def parse_schedule_interval(interval: str) -> dict:
enable_utc=True,
# Task execution settings
task_track_started=True,
task_time_limit=3600, # 1 hour hard limit
task_soft_time_limit=3000, # 50 minutes soft limit
task_time_limit=28800, # 8 hour hard limit
task_soft_time_limit=28200, # 7 hours 50 minutes soft limit
# Result backend settings
result_expires=86400, # Results expire after 24 hours
result_extended=True,
Expand Down
19 changes: 16 additions & 3 deletions surfsense_backend/app/connectors/notion_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,23 @@ async def get_all_pages(self, start_date=None, end_date=None):
"timestamp": "last_edited_time",
}

# First, get a list of all pages the integration has access to
search_results = await self.notion.search(**search_params)
# Paginate through all pages the integration has access to
pages = []
has_more = True
cursor = None

while has_more:
if cursor:
search_params["start_cursor"] = cursor

search_results = await self.notion.search(**search_params)

pages.extend(search_results["results"])
has_more = search_results.get("has_more", False)

if has_more:
cursor = search_results.get("next_cursor")

pages = search_results["results"]
all_page_data = []

for page in pages:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,13 @@ async def index_airtable_records(
f"Successfully indexed new Airtable record {summary_content}"
)

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Airtable records processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing the Airtable record {record.get('id', 'Unknown')}: {e!s}",
Expand All @@ -408,7 +415,10 @@ async def index_airtable_records(
session, connector, update_last_indexed
)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Airtable records processed"
)
await session.commit()
logger.info(
"Successfully committed all Airtable document changes to database"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,13 @@ async def index_clickup_tasks(
documents_indexed += 1
logger.info(f"Successfully indexed new task {task_name}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} ClickUp tasks processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing task {task.get('name', 'Unknown')}: {e!s}",
Expand All @@ -366,6 +373,8 @@ async def index_clickup_tasks(
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} ClickUp tasks processed")
await session.commit()

await task_logger.log_task_success(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,13 @@ async def index_confluence_pages(
documents_indexed += 1
logger.info(f"Successfully indexed new page {page_title}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Confluence pages processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing page {page.get('title', 'Unknown')}: {e!s}",
Expand All @@ -384,7 +391,10 @@ async def index_confluence_pages(
if update_last_indexed:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Confluence pages processed"
)
await session.commit()
logger.info(
"Successfully committed all Confluence document changes to database"
Expand Down
11 changes: 11 additions & 0 deletions surfsense_backend/app/tasks/connector_indexers/discord_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -462,6 +462,13 @@ async def index_discord_messages(
f"Successfully indexed new channel {guild_name}#{channel_name} with {len(formatted_messages)} messages"
)

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Discord channels processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing guild {guild_name}: {e!s}", exc_info=True
Expand All @@ -476,6 +483,10 @@ async def index_discord_messages(
if documents_indexed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Discord channels processed"
)
await session.commit()

# Prepare result message
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -381,13 +381,21 @@ async def index_github_repos(
session.add(document)
documents_processed += 1

# Batch commit every 10 documents
if documents_processed % 10 == 0:
logger.info(
f"Committing batch: {documents_processed} GitHub files processed so far"
)
await session.commit()

except Exception as repo_err:
logger.error(
f"Failed to process repository {repo_full_name}: {repo_err}"
)
errors.append(f"Failed processing {repo_full_name}: {repo_err}")

# Commit all changes at the end
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_processed} GitHub files processed")
await session.commit()
logger.info(
f"Finished GitHub indexing for connector {connector_id}. Processed {documents_processed} files."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -407,6 +407,13 @@ async def index_google_calendar_events(
documents_indexed += 1
logger.info(f"Successfully indexed new event {event_summary}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Google Calendar events processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing event {event.get('summary', 'Unknown')}: {e!s}",
Expand All @@ -422,6 +429,10 @@ async def index_google_calendar_events(
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Google Calendar events processed"
)
await session.commit()

await task_logger.log_task_success(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,13 @@ async def index_google_gmail_messages(
documents_indexed += 1
logger.info(f"Successfully indexed new email {summary_content}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Gmail messages processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing the email {message_id}: {e!s}",
Expand All @@ -338,7 +345,8 @@ async def index_google_gmail_messages(
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} Gmail messages processed")
await session.commit()
logger.info(
"Successfully committed all Google gmail document changes to database"
Expand Down
10 changes: 9 additions & 1 deletion surfsense_backend/app/tasks/connector_indexers/jira_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,6 +352,13 @@ async def index_jira_issues(
f"Successfully indexed new issue {issue_identifier} - {issue_title}"
)

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Jira issues processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing issue {issue.get('identifier', 'Unknown')}: {e!s}",
Expand All @@ -368,7 +375,8 @@ async def index_jira_issues(
if update_last_indexed:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} Jira issues processed")
await session.commit()
logger.info("Successfully committed all JIRA document changes to database")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,13 @@ async def index_linear_issues(
f"Successfully indexed new issue {issue_identifier} - {issue_title}"
)

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Linear issues processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing issue {issue.get('identifier', 'Unknown')}: {e!s}",
Expand All @@ -387,7 +394,8 @@ async def index_linear_issues(
if update_last_indexed:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} Linear issues processed")
await session.commit()
logger.info("Successfully committed all Linear document changes to database")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -438,6 +438,13 @@ async def index_luma_events(
documents_indexed += 1
logger.info(f"Successfully indexed new event {event_name}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Luma events processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing event {event.get('name', 'Unknown')}: {e!s}",
Expand All @@ -453,6 +460,8 @@ async def index_luma_events(
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} Luma events processed")
await session.commit()

await task_logger.log_task_success(
Expand Down
18 changes: 17 additions & 1 deletion surfsense_backend/app/tasks/connector_indexers/notion_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,6 +356,14 @@ def process_blocks(blocks, level=0):

documents_indexed += 1
logger.info(f"Successfully updated Notion page: {page_title}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} documents processed so far"
)
await session.commit()

continue

# Document doesn't exist - create new one
Expand Down Expand Up @@ -406,6 +414,13 @@ def process_blocks(blocks, level=0):
documents_indexed += 1
logger.info(f"Successfully indexed new Notion page: {page_title}")

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} documents processed so far"
)
await session.commit()

except Exception as e:
logger.error(
f"Error processing Notion page {page.get('title', 'Unknown')}: {e!s}",
Expand All @@ -423,7 +438,8 @@ def process_blocks(blocks, level=0):
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} documents processed")
await session.commit()

# Prepare result message
Expand Down
11 changes: 10 additions & 1 deletion surfsense_backend/app/tasks/connector_indexers/slack_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,14 @@ async def index_slack_messages(

session.add(document)
documents_indexed += 1

# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Slack channels processed so far"
)
await session.commit()

logger.info(
f"Successfully indexed new channel {channel_name} with {len(formatted_messages)} messages"
)
Expand All @@ -376,7 +384,8 @@ async def index_slack_messages(
if total_processed > 0:
await update_connector_last_indexed(session, connector, update_last_indexed)

# Commit all changes
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {documents_indexed} Slack channels processed")
await session.commit()

# Prepare result message
Expand Down
2 changes: 1 addition & 1 deletion surfsense_web/app/(home)/login/GoogleLoginButton.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ export function GoogleLoginButton() {
<h1 className="my-8 text-xl font-bold text-neutral-800 dark:text-neutral-100 md:text-4xl">
{t("welcome_back")}
</h1>
{/*
{/*
<motion.div
initial={{ opacity: 0, y: -5 }}
animate={{ opacity: 1, y: 0 }}
Expand Down
6 changes: 5 additions & 1 deletion surfsense_web/app/dashboard/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,11 @@ const DashboardPage = () => {
</div>
</div>
</div>
<Link className="flex flex-1 flex-col p-4 cursor-pointer" href={`/dashboard/${space.id}/documents`} key={space.id}>
<Link
className="flex flex-1 flex-col p-4 cursor-pointer"
href={`/dashboard/${space.id}/documents`}
key={space.id}
>
<div className="flex flex-1 flex-col justify-between p-1">
<div>
<h3 className="font-medium text-lg">{space.name}</h3>
Expand Down
Loading