diff --git a/apps/docs/src/pages/en/courses/section.md b/apps/docs/src/pages/en/courses/section.md index 1164bc646..d6d03ed1e 100644 --- a/apps/docs/src/pages/en/courses/section.md +++ b/apps/docs/src/pages/en/courses/section.md @@ -55,22 +55,26 @@ If drip configuration is enabled for a section, a student won't be able to acces ### Drip by Date 1. If you want a section to be available to users on a specific date, this is the option you should opt for. +2. Exact-date sections unlock only when their chosen date and time arrives. +3. Unlocking an exact-date section does not change the timing of other relative drip sections. ![Drip by Date](/assets/products/drip-by-date.png) -2. Select the date on which this section will be dripped. -3. Click `Continue` to save it. +4. Select the date on which this section will be dripped. +5. Click `Continue` to save it. ### Drip After a Certain Number of Days From Last Dripped Content 1. If you want a section to be available to users after a certain number of days have elapsed since the last dripped content, this is the option you should opt for. +2. Relative-date sections are released in section order. A later relative section waits for the earlier relative section before its own delay begins. +3. The first relative section counts from the student's enrollment date. After that, each newly released relative section becomes the anchor for the next relative section. > For the first dripped section, the date of enrollment will be considered the last dripped content date. ![Drip After a Certain Number of Days Have Elapsed](/assets/products/drip-by-specific-days.png) -2. Select the number of days. -3. Click `Continue` to save it. +4. Select the number of days. +5. Click `Continue` to save it. ### Notify Users When a Section Has Dripped diff --git a/apps/queue/.env b/apps/queue/.env index 31f8da553..7df280850 100644 --- a/apps/queue/.env +++ b/apps/queue/.env @@ -1,11 +1,36 @@ +# Port for the queue service to listen on PORT=4000 + +# email configuration for sending notifications (e.g., course updates, reminders) EMAIL_USER=email_user EMAIL_PASS=email_pass EMAIL_HOST=email_host EMAIL_FROM=no-reply@example.com + +# MongoDB connection string for storing queue data and job information DB_CONNECTION_STRING=mongodb://db.string + +# Redis configuration for caching and job queue management REDIS_HOST=localhost REDIS_PORT=6379 + +# Maximum number of times to retry a failed job before marking it as failed SEQUENCE_BOUNCE_LIMIT=3 + +# Domain for constructing URLs in notifications and links DOMAIN=courselit.app -PIXEL_SIGNING_SECRET=super_secret_string \ No newline at end of file + +# Secret key for signing tracking pixels (used in email notifications) +PIXEL_SIGNING_SECRET=super_secret_string + +# Optional: enables PostHog exception tracking when set. +POSTHOG_API_KEY= + +# Optional: PostHog host URL (default shown here). +POSTHOG_HOST=https://us.i.posthog.com + +# Optional: per-source exception cap per minute (default: 100). +POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE=100 + +# Optional: deployment environment label sent in telemetry (default: unknown). +DEPLOY_ENV=local diff --git a/apps/queue/README.md b/apps/queue/README.md index 4e3f8eaab..7f1a85855 100644 --- a/apps/queue/README.md +++ b/apps/queue/README.md @@ -21,6 +21,10 @@ The following environment variables are used by the queue service: - `EMAIL_PORT` - SMTP server port (default: `587`) - `PORT` - HTTP server port (default: `80`) - `NODE_ENV` - Environment mode. When set to `production`, emails are actually sent; otherwise they are only logged +- `POSTHOG_API_KEY` - Enables PostHog error tracking when set +- `POSTHOG_HOST` - PostHog host URL (default: `https://us.i.posthog.com`) +- `POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE` - Per-source exception cap (default: `100`) +- `DEPLOY_ENV` - Deployment environment label used in telemetry (default: `unknown`) - `SEQUENCE_BOUNCE_LIMIT` - Maximum number of bounces allowed for email sequences (default: `3`) - `PROTOCOL` - Protocol used for generating site URLs (default: `https`) - `DOMAIN` - Base domain name for generating site URLs diff --git a/apps/queue/docs/wip/DRIP_HARDENING_GAPS_AND_ROADMAP.md b/apps/queue/docs/wip/DRIP_HARDENING_GAPS_AND_ROADMAP.md new file mode 100644 index 000000000..c85c5f92a --- /dev/null +++ b/apps/queue/docs/wip/DRIP_HARDENING_GAPS_AND_ROADMAP.md @@ -0,0 +1,261 @@ +# Drip Hardening Gaps And Roadmap + +## Context + +This document captures the current missing pieces in drip functionality (queue + web integration), focused on scalability, maintainability, and industry-standard operational behavior. + +Date: 2026-03-21 +Scope: + +- `apps/queue/src/domain/process-drip.ts` +- `apps/queue/src/domain/queries.ts` +- `apps/web/graphql/courses/logic.ts` + +This list reflects remaining open hardening gaps on the current branch. +Last updated after: + +- Domain-scoped membership query contract in queue drip flow. +- Regression test coverage for domain-scoped membership lookup and multi-group drip email behavior. + +## 1) Duplicate Emails In Horizontal Scale (Critical) + +Current behavior: + +- Multiple queue instances can execute `processDrip()` concurrently. +- They can compute same unlocked groups and queue duplicate emails. +- User progress update is idempotent (`$addToSet`), email queueing is not. + +Impact: + +- Duplicate notifications to learners. +- Hard-to-debug race conditions. + +Recommendation: + +- Add distributed lock for drip worker cycle (Redis lock / BullMQ job lock / leader election). +- Add idempotency key for drip emails (for example: `drip::::`). + +Acceptance criteria: + +- Running N workers concurrently does not create duplicate drip emails for same user/group release event. + +## 2) Non-Atomic Unlock + Notify Flow (High) + +Current behavior: + +- Unlock state is written first, email queueing happens afterwards. +- If email enqueue fails, unlock succeeds but notification may be partially/fully lost. + +Impact: + +- Inconsistent learner communication. +- No deterministic retry for missed notifications. + +Recommendation: + +- Introduce outbox pattern: + - Persist release events atomically with unlock update. + - Separate reliable sender consumes outbox with retries and idempotency. + +Acceptance criteria: + +- If queue/email subsystem is down temporarily, unlock notifications are eventually delivered without duplicates. + +## 3) Full Polling + In-Memory Expansion (High) + +Current behavior: + +- Every minute: + - loads all courses with drip, + - loads memberships per course, + - loads users per course, + - loops in application memory. + +Impact: + +- High DB load and memory pressure as tenants/courses/users scale. +- Runtime grows with total data size, not just due work. + +Recommendation: + +- Move to due-work driven execution: + - precompute next drip due per user/course purchase, or + - enqueue per-user drip check jobs at enrollment/release events. +- Use cursor/batch processing where full scan remains necessary. + +Acceptance criteria: + +- Runtime and DB pressure scale roughly with due drip events, not total historical volume. + +## 4) Missing/Weak Indexing For Drip Query Paths (Medium) + +Current behavior: + +- Frequent query shapes in drip path do not have explicit compound indexes aligned to usage. + +Primary candidates: + +- `Membership`: `(domain, entityType, entityId, status, userId)` +- `User`: `(domain, userId)` (for bulk lookup by userIds in a domain) +- `Course`: if keeping scan approach, index around drip-enabled groups and domain as feasible. + +Impact: + +- Degraded throughput and elevated DB CPU at scale. + +Recommendation: + +- Add and validate compound indexes for hot predicates. +- Run explain plans before/after. + +Acceptance criteria: + +- Explain plans avoid broad collection scans for hot drip queries. + +## 5) Rank Reorder Semantics For Relative Drip (Medium) + +Current behavior: + +- Relative drip is rank-ordered each run. +- If groups are reordered after enrollments, in-flight learner release path changes. + +Impact: + +- Learner-facing release schedule may shift unexpectedly. +- Support burden and potential trust issues. + +Recommendation (choose one explicit product policy): + +- Policy A (simple): lock relative rank editing once enrollments exist. +- Policy B (robust): persist per-user drip cursor/snapshot so future rank changes affect only new learners (or only after explicit migration). + +Acceptance criteria: + +- Reordering behavior is deterministic and documented. + +## 6) Multi-Email Burst For Same-Run Unlocks (Medium) + +Current behavior: + +- If multiple groups unlock in one run and each has drip email configured, user gets multiple emails. + +Impact: + +- Notification fatigue. + +Recommendation: + +- Add configurable notification policy: + - `per_group` (current), + - `digest_per_run` (recommended default for larger schools). +- If digest enabled, provide editable digest template with localization strategy. + +Acceptance criteria: + +- Multi-unlock runs follow configured policy and are test-covered. + +## 7) Data Validation Hardening For Drip Inputs (Medium) + +Current behavior: + +- Some server-side constraints are implicit/incomplete (for example, exact-date vs relative-date required fields). + +Impact: + +- Invalid drip configs can be stored and silently skipped. + +Recommendation: + +- Enforce stricter validation in `updateGroup`: + - `relative-date` requires numeric `delayInMillis >= 0`, + - `exact-date` requires valid numeric `dateInUTC`, + - optional sanity checks for email schema consistency. + +Acceptance criteria: + +- Invalid drip payloads are rejected with explicit errors. + +## 8) Observability Gaps For Release Lifecycle (Medium) + +Current behavior: + +- Error capture exists, but release lifecycle visibility is limited. + +Recommendation: + +- Add structured counters/events: + - `drip_courses_scanned` + - `drip_users_evaluated` + - `drip_groups_unlocked` + - `drip_emails_queued` + - `drip_emails_failed` + - `drip_loop_duration_ms` +- Add per-domain dimensions where safe. + +Acceptance criteria: + +- Can answer: "How many unlocks and drip emails happened per domain/day and failure rate?" + +## 9) Test Coverage Still Missing For Some Hard Cases (Medium) + +Current behavior: + +- Unit-level coverage has improved significantly, but concurrency/idempotency/outbox/reorder-policy and digest policy are not covered yet. + +Recommendation: + +- Add tests for: + - concurrent worker execution idempotency, + - outbox retry semantics, + - rank reorder policy behavior, + - digest mode behavior. + +Acceptance criteria: + +- Regression suite catches duplicate-email races and policy regressions. + +## Proposed Implementation Plan + +## Phase 1 (P0 - Safety) + +- Add strict drip input validation in `updateGroup`. +- Add key indexes for hot query paths. +- Add release metrics counters. + +## Phase 2 (P1 - Correctness Under Scale) + +- Introduce distributed locking for drip worker cycle. +- Add email idempotency key to avoid duplicate sends. + +## Phase 3 (P1/P2 - Reliability) + +- Implement outbox for unlock-notification events. +- Add sender worker retry + dead-letter handling. + +## Phase 4 (P2 - Product Policy) + +- Decide and implement rank-reorder semantics for relative drip. +- Add digest mode and editable template/localization design. + +## Suggested Ownership Split + +- Queue worker correctness/scalability: `apps/queue` +- GraphQL validation + admin constraints: `apps/web/graphql` +- Schema/index migrations: `packages/common-logic`, `packages/orm-models`, migration scripts +- Metrics/dashboarding: observability owner + +## Decision Log Needed + +Before implementation, confirm: + +- Reorder policy for in-flight learners (lock vs cursor snapshot). +- Email policy for multi-unlock runs (per-group vs digest). +- Preferred reliability model (direct enqueue with idempotency vs outbox). + +## References + +- `apps/queue/src/domain/process-drip.ts` +- `apps/queue/src/domain/queries.ts` +- `apps/web/graphql/courses/logic.ts` +- `apps/queue/src/domain/__tests__/process-drip.test.ts` +- `apps/web/graphql/courses/__tests__/update-group-drip.test.ts` diff --git a/apps/queue/jest.config.ts b/apps/queue/jest.config.ts index f63b0036b..3565e7b7e 100644 --- a/apps/queue/jest.config.ts +++ b/apps/queue/jest.config.ts @@ -1,5 +1,4 @@ const config = { - preset: "@shelf/jest-mongodb", setupFilesAfterEnv: ["/setupTests.ts"], watchPathIgnorePatterns: ["globalConfig"], moduleNameMapper: { diff --git a/apps/queue/package.json b/apps/queue/package.json index b20521a2c..bb996ba57 100644 --- a/apps/queue/package.json +++ b/apps/queue/package.json @@ -18,6 +18,11 @@ "@courselit/email-editor": "workspace:^", "@courselit/orm-models": "workspace:^", "@courselit/utils": "workspace:^", + "@opentelemetry/api-logs": "^0.204.0", + "@opentelemetry/exporter-logs-otlp-http": "^0.204.0", + "@opentelemetry/resources": "^2.1.0", + "@opentelemetry/sdk-logs": "^0.204.0", + "@opentelemetry/sdk-node": "^0.204.0", "@types/jsdom": "^21.1.7", "bullmq": "^4.14.0", "express": "^4.18.2", @@ -26,6 +31,7 @@ "mongodb": "^6.15.0", "mongoose": "^8.13.1", "nodemailer": "^6.9.2", + "posthog-node": "^5.9.1", "pino": "^8.14.1", "pino-mongodb": "^4.3.0", "zod": "^3.22.4" diff --git a/apps/queue/setupTests.ts b/apps/queue/setupTests.ts index df50d087e..6abcfe834 100644 --- a/apps/queue/setupTests.ts +++ b/apps/queue/setupTests.ts @@ -3,6 +3,13 @@ import { MongoMemoryServer } from "mongodb-memory-server"; let mongod: MongoMemoryServer | null = null; +jest.setTimeout(30000); + +function getMongoPort(basePort: number) { + const workerId = Number(process.env.JEST_WORKER_ID || "0"); + return basePort + workerId; +} + // Suppress console.error during tests to reduce noise const originalError = console.error; beforeAll(() => { @@ -17,14 +24,13 @@ afterAll(() => { // @shelf/jest-mongodb provides global.__MONGO_URI__ through globalSetup // If not available, set up MongoDB Memory Server manually beforeAll(async () => { - let mongoUri = (global as any).__MONGO_URI__ || process.env.MONGO_URL; - - // If @shelf/jest-mongodb didn't set up MongoDB, do it manually - if (!mongoUri) { - mongod = await MongoMemoryServer.create(); - mongoUri = mongod.getUri(); - (global as any).__MONGO_URI__ = mongoUri; - } + mongod = await MongoMemoryServer.create({ + instance: { + ip: "127.0.0.1", + port: getMongoPort(38017), + }, + }); + const mongoUri = mongod.getUri(); if (mongoose.connection.readyState === 0) { await mongoose.connect(mongoUri); @@ -33,13 +39,16 @@ beforeAll(async () => { afterAll(async () => { if (mongoose.connection.readyState !== 0) { - await mongoose.connection.close(); + await mongoose.disconnect(); } // Clean up manually created MongoDB instance if it exists if (mongod) { - await mongod.stop(); + await mongod.stop({ doCleanup: true, force: true }); + mongod = null; } + + delete (global as any).__MONGO_URI__; }); // Clean up database after each test diff --git a/apps/queue/src/db.ts b/apps/queue/src/db.ts index 6795311cb..3f675b900 100644 --- a/apps/queue/src/db.ts +++ b/apps/queue/src/db.ts @@ -1,4 +1,5 @@ import mongoose from "mongoose"; +import { captureError, getDomainId } from "./observability/posthog"; export const connectToDatabase = async () => { const dbUri = process.env.DB_CONNECTION_STRING; @@ -9,6 +10,11 @@ export const connectToDatabase = async () => { mongoose.connection.on("error", (err) => { console.error("Mongoose connection error:", err); + captureError({ + error: err, + source: "db.error_handler", + domainId: getDomainId(), + }); }); mongoose.connection.on("disconnected", () => { @@ -24,6 +30,11 @@ const connect = async (dbUri: string) => { await mongoose.connect(dbUri, {}); } catch (error) { console.error("Database connection failed:", error); + captureError({ + error, + source: "db.connect", + domainId: getDomainId(), + }); process.exit(1); } }; diff --git a/apps/queue/src/domain/__tests__/process-drip.test.ts b/apps/queue/src/domain/__tests__/process-drip.test.ts new file mode 100644 index 000000000..366187107 --- /dev/null +++ b/apps/queue/src/domain/__tests__/process-drip.test.ts @@ -0,0 +1,1087 @@ +/** + * @jest-environment node + */ + +import { + getNewAccessibleGroupIdsForPurchase, + processDrip, +} from "../process-drip"; +import { Constants } from "@courselit/common-models"; +import CourseModel from "../model/course"; +import UserModel from "../model/user"; +import * as queries from "../queries"; +import mailQueue from "../queue"; +import * as posthog from "../../observability/posthog"; + +const DAY_IN_MS = 86_400_000; +const STOP_LOOP_ERROR_MESSAGE = "stop-loop-after-first-iteration"; + +jest.mock("../queue", () => ({ + __esModule: true, + default: { + add: jest.fn(), + }, +})); + +function makeCourse(groups: any[]) { + return { + groups, + } as any; +} + +function makeDripGroup({ + id, + rank, + drip, +}: { + id: string; + rank: number; + drip: any; +}) { + return { + _id: id, + rank, + drip, + } as any; +} + +function makePurchase(overrides: Partial = {}) { + return { + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + ...overrides, + } as any; +} + +describe("getNewAccessibleGroupIdsForPurchase", () => { + it("returns empty list when no drip groups are unlockable", () => { + const nowUTC = new Date("2026-01-02T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "future-exact", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC + DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "disabled", + rank: 2000, + drip: { + status: false, + type: "relative-date", + delayInMillis: 0, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase(), + nowUTC, + }); + + expect(newGroupIds).toEqual([]); + }); + + it("uses _id for exact-date group unlocks", () => { + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "exact-group-id", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + ]); + const purchase = makePurchase(); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: purchase, + nowUTC, + }); + + expect(newGroupIds).toEqual(["exact-group-id"]); + }); + + it("does not unlock exact-date drip groups when dateInUTC is invalid", () => { + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "invalid-exact", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: "invalid-date", + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase(), + nowUTC, + }); + + expect(newGroupIds).toEqual([]); + }); + + it("filters out groups already present in accessibleGroups", () => { + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "already-there", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase({ + accessibleGroups: ["already-there"], + }), + nowUTC, + }); + + expect(newGroupIds).toEqual([]); + }); + + it("uses lastDripAt as the relative drip anchor when available", () => { + const nowUTC = new Date("2026-01-08T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "relative-1", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 2 * DAY_IN_MS, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase({ + createdAt: new Date("2026-01-01T00:00:00.000Z"), + lastDripAt: new Date("2026-01-06T00:00:00.000Z"), + }), + nowUTC, + }); + + expect(newGroupIds).toEqual(["relative-1"]); + }); + + it("returns only exact-date groups when purchase anchor dates are missing", () => { + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "exact-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "relative-1", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 0, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: { + accessibleGroups: [], + } as any, + nowUTC, + }); + + expect(newGroupIds).toEqual(["exact-1"]); + }); + + it("releases relative groups sequentially instead of unlocking all groups at once", () => { + const nowUTC = new Date("2026-01-04T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 2 * DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "group-2", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 2 * DAY_IN_MS, + }, + }), + ]); + const purchase = makePurchase(); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: purchase, + nowUTC, + }); + + expect(newGroupIds).toEqual(["group-1"]); + }); + + it("evaluates relative drip groups by rank even when input order is unsorted", () => { + const nowUTC = new Date("2026-01-03T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "group-rank-2000", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 1 * DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "group-rank-1000", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 1 * DAY_IN_MS, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase(), + nowUTC, + }); + + expect(newGroupIds).toEqual(["group-rank-1000", "group-rank-2000"]); + }); + + it("does not unlock later relative groups before an earlier group in the sequence", () => { + const nowUTC = new Date("2026-01-03T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "group-early", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 10 * DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "group-late", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 1 * DAY_IN_MS, + }, + }), + ]); + const purchase = makePurchase(); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: purchase, + nowUTC, + }); + + expect(newGroupIds).toEqual([]); + }); + + it("supports relative drips with 0-day delay", () => { + const nowUTC = new Date("2026-01-01T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "instant-relative", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 0, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase(), + nowUTC, + }); + + expect(newGroupIds).toEqual(["instant-relative"]); + }); + + it("stops evaluating relative groups after encountering a negative delay", () => { + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "bad-group", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: -1, + }, + }), + makeDripGroup({ + id: "next-group", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 0, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase(), + nowUTC, + }); + + expect(newGroupIds).toEqual([]); + }); + + it("combines exact-date and relative unlocks without duplicates", () => { + const nowUTC = new Date("2026-01-03T00:00:00.000Z").getTime(); + const course = makeCourse([ + makeDripGroup({ + id: "shared-group", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "relative-group", + rank: 2000, + drip: { + status: true, + type: "relative-date", + delayInMillis: DAY_IN_MS, + }, + }), + ]); + + const newGroupIds = getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse: makePurchase({ + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }), + nowUTC, + }); + + expect(newGroupIds).toEqual(["shared-group", "relative-group"]); + }); +}); + +describe("processDrip", () => { + function mockStopLoopAfterFirstIteration() { + const stopError = new Error(STOP_LOOP_ERROR_MESSAGE); + const setTimeoutSpy = jest + .spyOn(global, "setTimeout") + .mockImplementation(() => { + throw stopError; + }); + return { stopError, setTimeoutSpy }; + } + + beforeEach(() => { + jest.clearAllMocks(); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it("updates accessible groups and queues drip email when a new group unlocks", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + email: { + subject: "Section unlocked", + content: { + content: [ + { + blockType: "text", + settings: { + content: "Hi {{ subscriber.name }}", + }, + }, + ], + }, + }, + }, + }), + ], + } as any; + + const creator = { + userId: "creator-1", + name: "Creator Name", + email: "creator@example.com", + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(creator), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + const updateOneSpy = jest + .spyOn(UserModel, "updateOne") + .mockResolvedValue({} as any); + + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + jest.spyOn(queries, "getDomain").mockResolvedValue({ + settings: { mailingAddress: "Main street" }, + name: "school", + } as any); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(queries.getMemberships).toHaveBeenCalledWith( + "course-1", + Constants.MembershipEntityType.COURSE, + "domain-1", + ); + expect(updateOneSpy).toHaveBeenCalledTimes(1); + expect(updateOneSpy).toHaveBeenCalledWith( + { + userId: "user-1", + "purchases.courseId": "course-1", + }, + { + $addToSet: { + "purchases.$.accessibleGroups": { + $each: ["group-1"], + }, + }, + }, + ); + expect((mailQueue as any).add).toHaveBeenCalledTimes(1); + expect((mailQueue as any).add).toHaveBeenCalledWith( + "mail", + expect.objectContaining({ + to: "user@example.com", + subject: "Section unlocked", + domainId: "domain-1", + }), + ); + }); + + it("queues email for later unlocked group when first unlocked group has no drip email", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + makeDripGroup({ + id: "group-2", + rank: 2000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + email: { + subject: "Second section unlocked", + content: { + content: [ + { + blockType: "text", + settings: { + content: "Section two is now live", + }, + }, + ], + }, + }, + }, + }), + ], + } as any; + + const creator = { + userId: "creator-1", + name: "Creator Name", + email: "creator@example.com", + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(creator), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + const updateOneSpy = jest + .spyOn(UserModel, "updateOne") + .mockResolvedValue({} as any); + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + jest.spyOn(queries, "getDomain").mockResolvedValue({ + settings: { mailingAddress: "Main street" }, + name: "school", + } as any); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(updateOneSpy).toHaveBeenCalledWith( + { + userId: "user-1", + "purchases.courseId": "course-1", + }, + { + $addToSet: { + "purchases.$.accessibleGroups": { + $each: ["group-1", "group-2"], + }, + }, + }, + ); + expect((mailQueue as any).add).toHaveBeenCalledTimes(1); + expect((mailQueue as any).add).toHaveBeenCalledWith( + "mail", + expect.objectContaining({ + subject: "Second section unlocked", + to: "user@example.com", + domainId: "domain-1", + }), + ); + }); + + it("queues one email per unlocked group with drip email configured", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + email: { + subject: "First section unlocked", + content: { + content: [ + { + blockType: "text", + settings: { + content: "Section one is now live", + }, + }, + ], + }, + }, + }, + }), + makeDripGroup({ + id: "group-2", + rank: 2000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + email: { + subject: "Second section unlocked", + content: { + content: [ + { + blockType: "text", + settings: { + content: "Section two is now live", + }, + }, + ], + }, + }, + }, + }), + ], + } as any; + + const creator = { + userId: "creator-1", + name: "Creator Name", + email: "creator@example.com", + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(creator), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + jest.spyOn(UserModel, "updateOne").mockResolvedValue({} as any); + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + jest.spyOn(queries, "getDomain").mockResolvedValue({ + settings: { mailingAddress: "Main street" }, + name: "school", + } as any); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect((mailQueue as any).add).toHaveBeenCalledTimes(2); + expect((mailQueue as any).add).toHaveBeenNthCalledWith( + 1, + "mail", + expect.objectContaining({ + subject: "First section unlocked", + }), + ); + expect((mailQueue as any).add).toHaveBeenNthCalledWith( + 2, + "mail", + expect.objectContaining({ + subject: "Second section unlocked", + }), + ); + }); + + it("does not queue email when drip email content is missing", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + ], + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + jest.spyOn(UserModel, "updateOne").mockResolvedValue({} as any); + + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect((mailQueue as any).add).not.toHaveBeenCalled(); + }); + + it("does not update lastDripAt when only exact-date groups unlock", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "exact-group", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC - DAY_IN_MS, + }, + }), + ], + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + lastDripAt: new Date("2026-01-05T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + const updateOneSpy = jest + .spyOn(UserModel, "updateOne") + .mockResolvedValue({} as any); + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(updateOneSpy).toHaveBeenCalledWith( + { + userId: "user-1", + "purchases.courseId": "course-1", + }, + { + $addToSet: { + "purchases.$.accessibleGroups": { + $each: ["exact-group"], + }, + }, + }, + ); + }); + + it("updates lastDripAt when a relative-date group unlocks", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "relative-group", + rank: 1000, + drip: { + status: true, + type: "relative-date", + delayInMillis: 0, + }, + }), + ], + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + tags: [], + unsubscribeToken: "token-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + const updateOneSpy = jest + .spyOn(UserModel, "updateOne") + .mockResolvedValue({} as any); + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(updateOneSpy).toHaveBeenCalledWith( + { + userId: "user-1", + "purchases.courseId": "course-1", + }, + { + $addToSet: { + "purchases.$.accessibleGroups": { + $each: ["relative-group"], + }, + }, + $set: { + "purchases.$.lastDripAt": expect.any(Date), + }, + }, + ); + }); + + it("does not update users when no new groups are unlockable", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-02T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + const course = { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [ + makeDripGroup({ + id: "group-1", + rank: 1000, + drip: { + status: true, + type: "exact-date", + dateInUTC: nowUTC + DAY_IN_MS, + }, + }), + ], + } as any; + + const user = { + userId: "user-1", + email: "user@example.com", + name: "Student", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date("2026-01-01T00:00:00.000Z"), + }, + ], + } as any; + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([course]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest.fn().mockResolvedValue(null), + } as any); + jest.spyOn(UserModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([user]), + } as any); + const updateOneSpy = jest + .spyOn(UserModel, "updateOne") + .mockResolvedValue({} as any); + jest.spyOn(queries, "getMemberships").mockResolvedValue([ + { + userId: "user-1", + } as any, + ]); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(updateOneSpy).not.toHaveBeenCalled(); + expect((mailQueue as any).add).not.toHaveBeenCalled(); + }); + + it("captures per-course errors and continues the loop", async () => { + const { stopError } = mockStopLoopAfterFirstIteration(); + const nowUTC = new Date("2026-01-10T00:00:00.000Z").getTime(); + jest.spyOn(Date.prototype, "getTime").mockReturnValue(nowUTC); + + jest.spyOn(CourseModel, "find").mockReturnValue({ + lean: jest.fn().mockResolvedValue([ + { + courseId: "course-1", + creatorId: "creator-1", + domain: "domain-1", + slug: "course-1", + title: "Course One", + groups: [], + }, + ]), + } as any); + jest.spyOn(UserModel, "findOne").mockReturnValue({ + lean: jest + .fn() + .mockRejectedValue(new Error("creator-query-failed")), + } as any); + const captureErrorSpy = jest + .spyOn(posthog, "captureError") + .mockImplementation(() => {}); + + await expect(processDrip()).rejects.toThrow(stopError); + + expect(captureErrorSpy).toHaveBeenCalledWith( + expect.objectContaining({ + source: "processDrip.course", + domainId: "domain-1", + context: { + course_id: "course-1", + }, + }), + ); + }); +}); diff --git a/apps/queue/src/domain/__tests__/process-ongoing-sequences.test.ts b/apps/queue/src/domain/__tests__/process-ongoing-sequences.test.ts index 695ae6a66..bf6e5575f 100644 --- a/apps/queue/src/domain/__tests__/process-ongoing-sequences.test.ts +++ b/apps/queue/src/domain/__tests__/process-ongoing-sequences.test.ts @@ -14,11 +14,12 @@ import UserModel from "../model/user"; import EmailDelivery from "../model/email-delivery"; import * as queries from "../queries"; import * as mail from "../../mail"; -import { AdminSequence, InternalUser } from "@courselit/common-logic"; +import { InternalUser } from "@courselit/orm-models"; import { getEmailFrom, jwtUtils } from "@courselit/utils"; import { getUnsubLink } from "../../utils/get-unsub-link"; import { getSiteUrl } from "../../utils/get-site-url"; import { sequenceBounceLimit } from "../../constants"; +import { AdminSequence } from "@courselit/orm-models"; // Mock dependencies jest.mock("../../mail"); @@ -288,6 +289,20 @@ describe("processOngoingSequence", () => { sequenceId: { $ne: TEST_SEQUENCE_ID }, // Keep main test sequence }); jest.clearAllMocks(); + + mockedGetSiteUrl.mockReturnValue("https://test.com"); + mockedGetUnsubLink.mockReturnValue( + "https://test.com/api/unsubscribe/unsub-token-123", + ); + mockedGetEmailFrom.mockImplementation( + ({ name, email }) => `${name} <${email}>`, + ); + mockedJwtUtils.generateToken = jest.fn().mockReturnValue("test-token"); + mockedSendMail.mockResolvedValue(undefined); + }); + + afterEach(() => { + jest.restoreAllMocks(); }); afterAll(async () => { diff --git a/apps/queue/src/domain/handler.ts b/apps/queue/src/domain/handler.ts index 2cb701dd6..aa76597ec 100644 --- a/apps/queue/src/domain/handler.ts +++ b/apps/queue/src/domain/handler.ts @@ -6,6 +6,7 @@ export async function addMailJob({ subject, body, from, + domainId, headers, }: MailJob) { for (const recipient of to) { @@ -14,6 +15,7 @@ export async function addMailJob({ subject, body, from, + domainId, headers, }); } diff --git a/apps/queue/src/domain/model/course.ts b/apps/queue/src/domain/model/course.ts index 90deaf77d..64cabd8f2 100644 --- a/apps/queue/src/domain/model/course.ts +++ b/apps/queue/src/domain/model/course.ts @@ -1,4 +1,4 @@ import mongoose from "mongoose"; -import { CourseSchema } from "@courselit/common-logic"; +import { CourseSchema } from "@courselit/orm-models"; -export default mongoose.models.Domain || mongoose.model("Course", CourseSchema); +export default mongoose.models.Course || mongoose.model("Course", CourseSchema); diff --git a/apps/queue/src/domain/model/email-delivery.ts b/apps/queue/src/domain/model/email-delivery.ts index d6c3f3afd..d45864c41 100644 --- a/apps/queue/src/domain/model/email-delivery.ts +++ b/apps/queue/src/domain/model/email-delivery.ts @@ -1,5 +1,5 @@ import mongoose from "mongoose"; -import { EmailDeliverySchema } from "@courselit/common-logic"; +import { EmailDeliverySchema } from "@courselit/orm-models"; export default mongoose.models.EmailDelivery || mongoose.model("EmailDelivery", EmailDeliverySchema); diff --git a/apps/queue/src/domain/model/email-template.ts b/apps/queue/src/domain/model/email-template.ts index fc830478f..a465f53a1 100644 --- a/apps/queue/src/domain/model/email-template.ts +++ b/apps/queue/src/domain/model/email-template.ts @@ -1,6 +1,6 @@ import mongoose, { Model } from "mongoose"; import { EmailTemplate as PublicEmailTemplate } from "@courselit/common-models"; -import { EmailContentSchema } from "@courselit/common-logic"; +import { EmailContentSchema } from "@courselit/orm-models"; interface EmailTemplate extends PublicEmailTemplate { domain: mongoose.Schema.Types.ObjectId; diff --git a/apps/queue/src/domain/model/email.ts b/apps/queue/src/domain/model/email.ts index a6b266d0e..48fbf7bc1 100644 --- a/apps/queue/src/domain/model/email.ts +++ b/apps/queue/src/domain/model/email.ts @@ -1,4 +1,4 @@ import mongoose from "mongoose"; -import { EmailSchema } from "@courselit/common-logic"; +import { EmailSchema } from "@courselit/orm-models"; export default mongoose.models.Email || mongoose.model("Email", EmailSchema); diff --git a/apps/queue/src/domain/model/mail-job.ts b/apps/queue/src/domain/model/mail-job.ts index 47d57bb5c..377e10b25 100644 --- a/apps/queue/src/domain/model/mail-job.ts +++ b/apps/queue/src/domain/model/mail-job.ts @@ -5,6 +5,7 @@ export const MailJob = z.object({ from: z.string(), subject: z.string(), body: z.string(), + domainId: z.string(), headers: z.record(z.string()).optional(), }); diff --git a/apps/queue/src/domain/model/membership.ts b/apps/queue/src/domain/model/membership.ts index ac3045c2f..10f12c6a2 100644 --- a/apps/queue/src/domain/model/membership.ts +++ b/apps/queue/src/domain/model/membership.ts @@ -1,4 +1,4 @@ -import { MembershipSchema } from "@courselit/common-logic"; +import { MembershipSchema } from "@courselit/orm-models"; import mongoose, { Model } from "mongoose"; const MembershipModel = diff --git a/apps/queue/src/domain/model/progress.ts b/apps/queue/src/domain/model/progress.ts deleted file mode 100644 index 33d049d3d..000000000 --- a/apps/queue/src/domain/model/progress.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Progress } from "@courselit/common-models"; -import mongoose from "mongoose"; - -const ProgressSchema = new mongoose.Schema( - { - courseId: { type: String, required: true }, - completedLessons: { type: [String] }, - downloaded: { type: Boolean }, - accessibleGroups: { type: [String] }, - }, - { - timestamps: true, - }, -); - -export default ProgressSchema; diff --git a/apps/queue/src/domain/model/rule.ts b/apps/queue/src/domain/model/rule.ts index 332e04efb..a86fc1724 100644 --- a/apps/queue/src/domain/model/rule.ts +++ b/apps/queue/src/domain/model/rule.ts @@ -1,5 +1,5 @@ import mongoose, { Model } from "mongoose"; -import { RuleSchema } from "@courselit/common-logic"; +import { RuleSchema } from "@courselit/orm-models"; const RuleModel = (mongoose.models.Rule as Model) || mongoose.model("Rule", RuleSchema); diff --git a/apps/queue/src/domain/model/sequence-report.ts b/apps/queue/src/domain/model/sequence-report.ts deleted file mode 100644 index 18b7a4e42..000000000 --- a/apps/queue/src/domain/model/sequence-report.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { SequenceReport } from "@courselit/common-models"; -import mongoose from "mongoose"; - -const SequenceReportSchema = new mongoose.Schema({ - broadcast: { - lockedAt: Date, - sentAt: Date, - }, - sequence: { - subscribers: [String], - unsubscribers: [String], - failed: [String], - }, -}); - -export default SequenceReportSchema; diff --git a/apps/queue/src/domain/model/sequence.ts b/apps/queue/src/domain/model/sequence.ts index a8bfd9018..a3d5150ba 100644 --- a/apps/queue/src/domain/model/sequence.ts +++ b/apps/queue/src/domain/model/sequence.ts @@ -1,5 +1,5 @@ import mongoose, { Model } from "mongoose"; -import { SequenceSchema } from "@courselit/common-logic"; +import { SequenceSchema } from "@courselit/orm-models"; const SequenceModel = (mongoose.models.Sequence as Model) || diff --git a/apps/queue/src/domain/model/user.ts b/apps/queue/src/domain/model/user.ts index e1ba2caf4..387f351c9 100644 --- a/apps/queue/src/domain/model/user.ts +++ b/apps/queue/src/domain/model/user.ts @@ -1,5 +1,5 @@ import mongoose, { Model } from "mongoose"; -import { UserSchema } from "@courselit/common-logic"; +import { UserSchema } from "@courselit/orm-models"; const UserModel = (mongoose.models.User as Model) || mongoose.model("User", UserSchema); diff --git a/apps/queue/src/domain/process-drip.ts b/apps/queue/src/domain/process-drip.ts index 8744e7a64..ac800b9d3 100644 --- a/apps/queue/src/domain/process-drip.ts +++ b/apps/queue/src/domain/process-drip.ts @@ -1,161 +1,308 @@ import CourseModel from "./model/course"; import UserModel from "./model/user"; -import mailQueue from "./queue"; import { Liquid } from "liquidjs"; import { getDomain, getMemberships } from "./queries"; import { Constants } from "@courselit/common-models"; -import { InternalCourse, InternalUser } from "@courselit/common-logic"; +import { InternalCourse, InternalUser } from "@courselit/orm-models"; import { getEmailFrom } from "@courselit/utils"; import { FilterQuery, UpdateQuery } from "mongoose"; import { renderEmailToHtml } from "@courselit/email-editor"; import { getSiteUrl } from "../utils/get-site-url"; import { getUnsubLink } from "../utils/get-unsub-link"; +import { captureError, getDomainId } from "../observability/posthog"; +import { addMailJob } from "./handler"; +import { logInfo } from "@/observability/logs"; const liquidEngine = new Liquid(); -export async function processDrip() { - // eslint-disable-next-line no-constant-condition - while (true) { - // eslint-disable-next-line no-console - console.log( - `Starting process of drips at ${new Date().toDateString()}`, +type CourseGroup = InternalCourse["groups"][number]; +type UserPurchase = InternalUser["purchases"][number]; + +function toGroupId(group: CourseGroup): string | undefined { + const value = (group as { _id?: unknown; id?: unknown })._id ?? group.id; + if (value === null || value === undefined) { + return undefined; + } + + return String(value); +} + +function getSortedGroups(groups: CourseGroup[] = []): CourseGroup[] { + return [...groups].sort((a, b) => (a.rank ?? 0) - (b.rank ?? 0)); +} + +/** + * Answers: given this course, this user’s progress, and current time, which section + * group IDs should be newly unlocked right now? + */ +export function getNewAccessibleGroupIdsForPurchase({ + course, + userProgressInCourse, + nowUTC, +}: { + course: InternalCourse; + userProgressInCourse: UserPurchase; + nowUTC: number; +}): string[] { + const accessibleGroups = Array.isArray( + userProgressInCourse.accessibleGroups, + ) + ? userProgressInCourse.accessibleGroups + : []; + const sortedGroups = getSortedGroups(course.groups ?? []); + + const exactDateAccessibleGroupIds = sortedGroups + .filter((group) => { + const releaseDateInUTC = group.drip?.dateInUTC; + return ( + group.drip?.status && + group.drip.type === "exact-date" && + typeof releaseDateInUTC === "number" && + nowUTC >= releaseDateInUTC + ); + }) + .map(toGroupId) + .filter((id): id is string => Boolean(id)); + + const progressAnchor = userProgressInCourse.lastDripAt + ? new Date(userProgressInCourse.lastDripAt) + : userProgressInCourse.createdAt + ? new Date(userProgressInCourse.createdAt) + : null; + if (!progressAnchor) { + return exactDateAccessibleGroupIds.filter( + (id) => !accessibleGroups.includes(id), ); + } - const courseQuery: FilterQuery = { - "groups.drip": { $exists: true }, - }; - // @ts-ignore - Mongoose type compatibility issue - const courses = (await CourseModel.find( - courseQuery, - ).lean()) as unknown as InternalCourse[]; + const anchorInUTC = progressAnchor.getTime(); + if (Number.isNaN(anchorInUTC)) { + return exactDateAccessibleGroupIds.filter( + (id) => !accessibleGroups.includes(id), + ); + } - const nowUTC = new Date().getTime(); + const relativeAccessibleGroupIds: string[] = []; + let releaseCursorUTC = anchorInUTC; + for (const group of sortedGroups) { + if ( + !group.drip?.status || + group.drip.type !== "relative-date" || + !Number.isFinite(group.drip.delayInMillis) + ) { + continue; + } - for (const course of courses) { - const creatorQuery: FilterQuery = { - userId: course.creatorId, - }; - // @ts-ignore - Mongoose type compatibility issue - const creator = (await UserModel.findOne( - creatorQuery, - ).lean()) as unknown as InternalUser | null; - - const exactDateAccessibleGroupIds = course.groups - .filter( - (group) => - group.drip && - group.drip.status && - group.drip.type === "exact-date" && - nowUTC >= group.drip.dateInUTC, - ) - .map((group) => group.id); - - const memberships = await getMemberships( - course.courseId, - Constants.MembershipEntityType.COURSE, + const groupId = toGroupId(group); + if (!groupId || accessibleGroups.includes(groupId)) { + continue; + } + + const delayInMillis = group.drip.delayInMillis as number; + if (delayInMillis < 0) { + break; + } + + const unlockAtUTC = releaseCursorUTC + delayInMillis; + if (nowUTC >= unlockAtUTC) { + relativeAccessibleGroupIds.push(groupId); + releaseCursorUTC = unlockAtUTC; + continue; + } + + // Relative drips are sequential by section order. + break; + } + + const allAccessibleGroupIds = new Set([ + ...exactDateAccessibleGroupIds, + ...relativeAccessibleGroupIds, + ]); + return [...allAccessibleGroupIds].filter( + (id) => !accessibleGroups.includes(id), + ); +} + +export async function processDrip() { + // eslint-disable-next-line no-constant-condition + while (true) { + try { + // eslint-disable-next-line no-console + console.log( + `Starting process of drips at ${new Date().toDateString()}`, ); - const userQuery: FilterQuery = { - domain: course.domain, - userId: { $in: memberships.map((m) => m.userId) }, + + const courseQuery: FilterQuery = { + "groups.drip": { $exists: true }, }; // @ts-ignore - Mongoose type compatibility issue - const users = (await UserModel.find( - userQuery, - ).lean()) as unknown as InternalUser[]; - - for (const user of users) { - const userProgressInCourse = user.purchases.find( - (p) => p.courseId === course.courseId, - ); - if (!userProgressInCourse) continue; - - const lastDripAtUTC = ( - userProgressInCourse.lastDripAt || - userProgressInCourse.createdAt - ).getTime(); - const relativeAccessibleGroupIds = course.groups - .filter( - (group) => - group.drip && - group.drip.status && - group.drip.type === "relative-date" && - group.drip.delayInMillis >= 0 && - nowUTC >= lastDripAtUTC + group.drip.delayInMillis, - ) - .map((group) => (group as any)._id); - - const allAccessibleGroupIds = [ - ...new Set([ - ...exactDateAccessibleGroupIds, - ...relativeAccessibleGroupIds, - ]), - ]; - const newGroupIds = allAccessibleGroupIds.filter( - (id) => !userProgressInCourse.accessibleGroups.includes(id), - ); - - if (newGroupIds.length > 0) { - const updateQuery: FilterQuery = { - userId: user.userId, - "purchases.courseId": course.courseId, - }; - const updateData: UpdateQuery = { - $addToSet: { - "purchases.$.accessibleGroups": { - $each: newGroupIds, - }, - }, - $set: { - "purchases.$.lastDripAt": new Date(nowUTC), - }, + const courses = (await CourseModel.find( + courseQuery, + ).lean()) as unknown as InternalCourse[]; + + const nowUTC = new Date().getTime(); + + for (const course of courses) { + try { + const creatorQuery: FilterQuery = { + userId: course.creatorId, }; // @ts-ignore - Mongoose type compatibility issue - await UserModel.updateOne(updateQuery, updateData); + const creator = (await UserModel.findOne( + creatorQuery, + ).lean()) as unknown as InternalUser | null; - const firstGroupWithDripEmailSet = course.groups.find( - (group) => (group as any)._id === newGroupIds[0], + const memberships = await getMemberships( + course.courseId, + Constants.MembershipEntityType.COURSE, + course.domain, ); + const userQuery: FilterQuery = { + domain: course.domain, + userId: { $in: memberships.map((m) => m.userId) }, + }; + // @ts-ignore - Mongoose type compatibility issue + const users = (await UserModel.find( + userQuery, + ).lean()) as unknown as InternalUser[]; - if (firstGroupWithDripEmailSet) { - const domain = await getDomain(course.domain); - const templatePayload = { - subscriber: { - email: user.email, - name: user.name, - tags: user.tags, - }, - product: { - title: course.title, - url: `${getSiteUrl(domain)}/course/${course.slug}/${course.courseId}`, + for (const user of users) { + const userProgressInCourse = user.purchases.find( + (p) => p.courseId === course.courseId, + ); + if (!userProgressInCourse) continue; + + const newGroupIds = getNewAccessibleGroupIdsForPurchase( + { + course, + userProgressInCourse, + nowUTC, }, - address: domain.settings.mailingAddress, - unsubscribe_link: getUnsubLink( - domain, - user.unsubscribeToken, - ), - }; - if (firstGroupWithDripEmailSet.drip?.email?.content) { - const content = await liquidEngine.parseAndRender( - await renderEmailToHtml({ - email: firstGroupWithDripEmailSet.drip.email - .content, - }), - templatePayload, + ); + + if (newGroupIds.length > 0) { + const newlyUnlockedGroups = newGroupIds + .map((groupId) => + course.groups.find( + (group) => + toGroupId(group as CourseGroup) === + groupId, + ), + ) + .filter((group): group is CourseGroup => + Boolean(group), + ); + + const updateQuery: FilterQuery = { + userId: user.userId, + "purchases.courseId": course.courseId, + }; + const updateData: UpdateQuery = { + $addToSet: { + "purchases.$.accessibleGroups": { + $each: newGroupIds, + }, + }, + }; + if ( + newlyUnlockedGroups.some( + (group) => + group.drip?.status && + group.drip.type === "relative-date", + ) + ) { + updateData.$set = { + "purchases.$.lastDripAt": new Date(nowUTC), + }; + } + // @ts-ignore - Mongoose type compatibility issue + await UserModel.updateOne(updateQuery, updateData); + + logInfo( + `${newGroupIds.length} Sections unlocked for ${user.email} in course ${course.title}`, + { + source: "processDrip.unlock", + domainId: getDomainId(course.domain), + course_id: course.courseId, + user_id: user.userId, + unlocked_group_ids: newGroupIds.join(","), + }, ); - await mailQueue.add("mail", { - to: user.email, - subject: - firstGroupWithDripEmailSet.drip.email - .subject, - body: content, - from: getEmailFrom({ - name: creator?.name || creator?.email || "", - email: process.env.EMAIL_FROM || "", - }), - }); + + const newlyUnlockedGroupsWithDripEmail = + newlyUnlockedGroups.filter( + (group) => + Boolean(group.drip?.email?.content) && + Boolean(group.drip?.email?.subject), + ); + + if (newlyUnlockedGroupsWithDripEmail.length > 0) { + const domain = await getDomain(course.domain); + const templatePayload = { + subscriber: { + email: user.email, + name: user.name, + tags: user.tags, + }, + product: { + title: course.title, + url: `${getSiteUrl(domain)}/course/${course.slug}/${course.courseId}`, + }, + address: domain.settings.mailingAddress, + unsubscribe_link: getUnsubLink( + domain, + user.unsubscribeToken, + ), + }; + for (const group of newlyUnlockedGroupsWithDripEmail) { + const dripEmail = group.drip?.email; + if ( + !dripEmail?.content || + !dripEmail.subject + ) { + continue; + } + + const content = + await liquidEngine.parseAndRender( + await renderEmailToHtml({ + email: dripEmail.content, + }), + templatePayload, + ); + await addMailJob({ + to: [user.email], + subject: dripEmail.subject, + body: content, + from: getEmailFrom({ + name: + creator?.name || + creator?.email || + "", + email: process.env.EMAIL_FROM || "", + }), + domainId: getDomainId(course.domain), + }); + } + } } } + } catch (err: any) { + captureError({ + error: err, + source: "processDrip.course", + domainId: getDomainId(course.domain), + context: { + course_id: course.courseId, + }, + }); } } + } catch (err: any) { + captureError({ + error: err, + source: "processDrip.loop", + domainId: getDomainId(), + }); } await new Promise((resolve) => setTimeout(resolve, 60 * 1000)); diff --git a/apps/queue/src/domain/process-ongoing-sequences/index.ts b/apps/queue/src/domain/process-ongoing-sequences/index.ts index 09adb4b63..e2b630170 100644 --- a/apps/queue/src/domain/process-ongoing-sequences/index.ts +++ b/apps/queue/src/domain/process-ongoing-sequences/index.ts @@ -4,16 +4,40 @@ import { Worker } from "bullmq"; import redis from "../../redis"; import sequenceQueue from "../sequence-queue"; import { processOngoingSequence } from "./process-ongoing-sequence"; +import { captureError, getDomainId } from "../../observability/posthog"; + +interface SequenceWorkerData { + ongoingSequenceId: string; + domainId?: string; +} if (process.env.NODE_ENV !== "test") { new Worker( "sequence", async (job) => { - const ongoingSequenceId = job.data; + const payload = job.data as SequenceWorkerData | string; + const ongoingSequenceId = + typeof payload === "string" + ? payload + : payload.ongoingSequenceId; + const domainId = + typeof payload === "string" + ? getDomainId() + : getDomainId(payload.domainId); try { await processOngoingSequence(ongoingSequenceId); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "worker.sequence", + domainId, + context: { + queue_name: "sequence", + job_id: String(job.id), + ongoing_sequence_id: String(ongoingSequenceId), + }, + }); } }, { connection: redis }, @@ -28,14 +52,39 @@ export async function processOngoingSequences(): Promise { } // eslint-disable-next-line no-constant-condition while (true) { - // eslint-disable-next-line no-console - console.log( - `Starting process of ongoing sequence at ${new Date().toDateString()}`, - ); + try { + // eslint-disable-next-line no-console + console.log( + `Starting process of ongoing sequence at ${new Date().toDateString()}`, + ); - const dueOngoingSequences = await getDueOngoingSequences(); - for (const ongoingSequence of dueOngoingSequences) { - sequenceQueue.add("sequence", ongoingSequence.id); + const dueOngoingSequences = await getDueOngoingSequences(); + for (const ongoingSequence of dueOngoingSequences) { + try { + await sequenceQueue.add("sequence", { + ongoingSequenceId: String(ongoingSequence.id), + domainId: getDomainId(ongoingSequence.domain), + }); + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "processOngoingSequences.enqueue", + domainId: getDomainId(ongoingSequence.domain), + context: { + queue_name: "sequence", + ongoing_sequence_id: String(ongoingSequence.id), + }, + }); + } + } + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "processOngoingSequences.loop", + domainId: getDomainId(), + }); } await new Promise((resolve) => setTimeout(resolve, 60 * 1000)); diff --git a/apps/queue/src/domain/process-ongoing-sequences/process-ongoing-sequence.ts b/apps/queue/src/domain/process-ongoing-sequences/process-ongoing-sequence.ts index 4d660688c..cad0e1598 100644 --- a/apps/queue/src/domain/process-ongoing-sequences/process-ongoing-sequence.ts +++ b/apps/queue/src/domain/process-ongoing-sequences/process-ongoing-sequence.ts @@ -15,7 +15,7 @@ import { import { sendMail } from "@/mail"; import mongoose from "mongoose"; import EmailDelivery from "@/domain/model/email-delivery"; -import { AdminSequence, InternalUser } from "@courselit/common-logic"; +import { AdminSequence, InternalUser } from "@courselit/orm-models"; import { Email as EmailType, renderEmailToHtml } from "@courselit/email-editor"; import { getUnsubLink } from "@/utils/get-unsub-link"; import { getSiteUrl } from "@/utils/get-site-url"; @@ -24,69 +24,90 @@ import { JSDOM } from "jsdom"; import { DomainDocument } from "@/domain/model/domain"; import { Liquid } from "liquidjs"; import { logger } from "@/logger"; +import { captureError, getDomainId } from "@/observability/posthog"; const liquidEngine = new Liquid(); export async function processOngoingSequence( - ongoingSequenceId: mongoose.Types.ObjectId, + ongoingSequenceId: mongoose.Types.ObjectId | string, ) { - const ongoingSequence = - await OngoingSequenceModel.findById(ongoingSequenceId); - if (!ongoingSequence) { - return; - } + let domainId = getDomainId(); + try { + const ongoingSequence = + await OngoingSequenceModel.findById(ongoingSequenceId); + if (!ongoingSequence) { + return; + } - const domain = await getDomain(ongoingSequence.domain); - if ( - !domain || - !domain.quota || - !domain.quota.mail || - !domain.settings?.mailingAddress - ) { - console.log( - `Invalid domain settings for "${domain?.name || "unknown"}"`, - domain, - ); // eslint-disable-line no-console - return; - } - if ( - domain.quota.mail.dailyCount >= domain.quota.mail.daily || - domain.quota.mail.monthlyCount >= domain.quota.mail.monthly - ) { - console.log(`Domain quota exceeded for "${domain.name}"`); // eslint-disable-line no-console - return; - } + domainId = getDomainId(ongoingSequence.domain); - const sequence = await getSequence(ongoingSequence.sequenceId); - const [user, creator] = await Promise.all([ - getUser(ongoingSequence.userId), - sequence ? getUser(sequence.creatorId) : null, - ]); + const domain = await getDomain(ongoingSequence.domain); + if ( + !domain || + !domain.quota || + !domain.quota.mail || + !domain.settings?.mailingAddress + ) { + console.log( + `Invalid domain settings for "${domain?.name || "unknown"}"`, + domain, + ); // eslint-disable-line no-console + return; + } + if ( + domain.quota.mail.dailyCount >= domain.quota.mail.daily || + domain.quota.mail.monthlyCount >= domain.quota.mail.monthly + ) { + console.log(`Domain quota exceeded for "${domain.name}"`); // eslint-disable-line no-console + return; + } - if (!sequence || !user || !creator) { - return await cleanUpResources(ongoingSequence); - } + const sequence = await getSequence(ongoingSequence.sequenceId); + const [user, creator] = await Promise.all([ + getUser(ongoingSequence.userId), + sequence ? getUser(sequence.creatorId) : null, + ]); - const nextPublishedEmail = getNextPublishedEmail(sequence, ongoingSequence); + if (!sequence || !user || !creator) { + return await cleanUpResources(ongoingSequence); + } - await attemptMailSending({ - domain, - creator, - user, - sequence, - ongoingSequence, - email: nextPublishedEmail, - }); + const nextPublishedEmail = getNextPublishedEmail( + sequence, + ongoingSequence, + ); + + await attemptMailSending({ + domain, + creator, + user, + sequence, + ongoingSequence, + email: nextPublishedEmail, + }); - ongoingSequence.sentEmailIds.push(nextPublishedEmail.emailId); - await domain.incrementEmailCount(); - const nextEmail = getNextPublishedEmail(sequence, ongoingSequence); - if (!nextEmail) { - return await cleanUpResources(ongoingSequence, true); - } else { - ongoingSequence.nextEmailScheduledTime = new Date( - ongoingSequence.nextEmailScheduledTime + nextEmail.delayInMillis, - ).getTime(); - await ongoingSequence.save(); + ongoingSequence.sentEmailIds.push(nextPublishedEmail.emailId); + await domain.incrementEmailCount(); + const nextEmail = getNextPublishedEmail(sequence, ongoingSequence); + if (!nextEmail) { + return await cleanUpResources(ongoingSequence, true); + } else { + ongoingSequence.nextEmailScheduledTime = new Date( + ongoingSequence.nextEmailScheduledTime + + nextEmail.delayInMillis, + ).getTime(); + await ongoingSequence.save(); + } + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "processOngoingSequence.handler", + domainId, + context: { + ongoing_sequence_id: String(ongoingSequenceId), + }, + }); + throw err; } } @@ -243,6 +264,19 @@ async function attemptMailSending({ } else { await ongoingSequence.save(); } + logger.error(err); + captureError({ + error: err, + source: "processOngoingSequence.mail_send", + domainId: getDomainId(domain?.id), + context: { + sequence_id: sequence.sequenceId, + user_id: user.userId, + error_code: err?.code, + response_code: err?.responseCode, + command: err?.command, + }, + }); throw err; } } @@ -296,6 +330,15 @@ function transformLinksForClickTracking( return dom.serialize(); } catch (error) { logger.error("Error transforming links with jsdom:", error); + captureError({ + error, + source: "processOngoingSequence.transform_links", + domainId: getDomainId(domain?.id), + context: { + sequence_id: sequenceId, + user_id: userId, + }, + }); return htmlContent; } } diff --git a/apps/queue/src/domain/process-rules.ts b/apps/queue/src/domain/process-rules.ts index 91a603401..20b6a223a 100644 --- a/apps/queue/src/domain/process-rules.ts +++ b/apps/queue/src/domain/process-rules.ts @@ -6,10 +6,10 @@ import MembershipModel from "./model/membership"; import { logger } from "../logger"; import { Constants, Rule, User } from "@courselit/common-models"; import mongoose from "mongoose"; -import { - AdminSequence, - convertFiltersToDBConditions, -} from "@courselit/common-logic"; +import { convertFiltersToDBConditions } from "@courselit/common-logic"; +import { captureError, getDomainId } from "../observability/posthog"; +import { logInfo } from "../observability/logs"; +import { AdminSequence } from "@courselit/orm-models"; type RuleWithDomain = Omit & { domain: mongoose.Types.ObjectId; @@ -18,24 +18,41 @@ type RuleWithDomain = Omit & { export async function processRules() { // eslint-disable-next-line no-constant-condition while (true) { - const currentTime = new Date(); - // eslint-disable-next-line no-console - console.log( - `Starting process of rules at ${currentTime.toDateString()}`, - ); + try { + const currentTime = new Date(); + // eslint-disable-next-line no-console + console.log( + `Starting process of rules at ${currentTime.toDateString()}`, + ); - // @ts-ignore - Mongoose type compatibility issue - const dueRules: RuleWithDomain[] = (await RuleModel.find({ - event: Constants.EventType.DATE_OCCURRED, - eventDateInMillis: { $lt: currentTime.getTime() }, - }).lean()) as any; + // @ts-ignore - Mongoose type compatibility issue + const dueRules: RuleWithDomain[] = (await RuleModel.find({ + event: Constants.EventType.DATE_OCCURRED, + eventDateInMillis: { $lt: currentTime.getTime() }, + }).lean()) as any; - for (const rule of dueRules) { - try { - await processRule(rule); - } catch (err: any) { - logger.error(err); + for (const rule of dueRules) { + try { + await processRule(rule); + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "processRules.rule", + domainId: getDomainId(rule.domain), + context: { + sequence_id: rule.sequenceId, + }, + }); + } } + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "processRules.loop", + domainId: getDomainId(), + }); } await new Promise((resolve) => setTimeout(resolve, 60 * 1000)); @@ -90,6 +107,15 @@ async function addBroadcastToOngoingSequence(sequence: AdminSequence) { `Adding ${allUsers.length} users to ongoing sequence`, JSON.stringify(query), ); + logInfo( + `${allUsers.length} subscribers added to ${sequence.type} ${sequence.sequenceId}`, + { + source: "processRules.addBroadcastToOngoingSequence", + domain_id: getDomainId(sequence.domain), + sequence_id: sequence.sequenceId, + subscribers_count: allUsers.length, + }, + ); const ongoingSequences = allUsers.map((user) => ({ domain: sequence.domain, sequenceId: sequence.sequenceId, diff --git a/apps/queue/src/domain/queries.ts b/apps/queue/src/domain/queries.ts index 68eda83b3..962311790 100644 --- a/apps/queue/src/domain/queries.ts +++ b/apps/queue/src/domain/queries.ts @@ -11,10 +11,10 @@ import DomainModel, { DomainDocument } from "./model/domain"; import { Constants, EmailTemplate } from "@courselit/common-models"; import emailTemplate from "./model/email-template"; import { - AdminSequence, InternalMembership, InternalUser, -} from "@courselit/common-logic"; + AdminSequence, +} from "@courselit/orm-models"; export async function getDueOngoingSequences(): Promise { const currentTime = new Date().getTime(); @@ -73,9 +73,14 @@ export async function getTemplate(id: string): Promise { return (await emailTemplate.find({ templateId: id }).lean()) as any; } -export async function getMemberships(entityId: string, entityType: string) { +export async function getMemberships( + entityId: string, + entityType: string, + domain: mongoose.Types.ObjectId, +) { // @ts-ignore - Mongoose type compatibility issue return await MembershipModel.find({ + domain, entityId, entityType, status: Constants.MembershipStatus.ACTIVE, diff --git a/apps/queue/src/domain/worker.ts b/apps/queue/src/domain/worker.ts index 035137e08..8a1ac0f02 100644 --- a/apps/queue/src/domain/worker.ts +++ b/apps/queue/src/domain/worker.ts @@ -1,24 +1,16 @@ import { Worker } from "bullmq"; -import nodemailer from "nodemailer"; import redis from "../redis"; import { logger } from "../logger"; - -const transporter = nodemailer.createTransport({ - host: process.env.EMAIL_HOST, - port: +(process.env.EMAIL_PORT || 587), - auth: { - user: process.env.EMAIL_USER, - pass: process.env.EMAIL_PASS, - }, -}); +import { captureError, getDomainId } from "../observability/posthog"; +import { sendMail } from "../mail"; const worker = new Worker( "mail", async (job) => { - const { to, from, subject, body, headers } = job.data; + const { to, from, subject, body, headers, domainId } = job.data; try { - await transporter.sendMail({ + await sendMail({ from, to, subject, @@ -27,6 +19,18 @@ const worker = new Worker( }); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "worker.mail", + domainId: getDomainId(domainId), + context: { + queue_name: "mail", + job_id: String(job.id), + error_code: err?.code, + response_code: err?.responseCode, + command: err?.command, + }, + }); } }, { connection: redis }, diff --git a/apps/queue/src/index.ts b/apps/queue/src/index.ts index a0466e1a6..8ffd4ae3f 100644 --- a/apps/queue/src/index.ts +++ b/apps/queue/src/index.ts @@ -10,6 +10,13 @@ import "./notifications/worker/dispatch-notification"; // start loops import { startEmailAutomation } from "./start-email-automation"; import { verifyJWTMiddleware } from "./middlewares/verify-jwt"; +import { + captureError, + getDomainId, + setupPosthogExpressErrorHandler, +} from "./observability/posthog"; +import { initPosthogLogs } from "./observability/logs"; +import { logger } from "./logger"; const app = express(); app.use(express.json()); @@ -21,7 +28,38 @@ app.get("/healthy", (req, res) => { res.status(200).json({ status: "ok", uptime: process.uptime() }); }); -startEmailAutomation(); +app.use((err, req: any, res, next) => { + captureError({ + error: err, + source: "express.uncaught", + domainId: getDomainId(req?.user?.domain), + context: { + path: req?.path, + method: req?.method, + }, + }); + next(err); +}); + +setupPosthogExpressErrorHandler(app); + +initPosthogLogs().catch((err) => { + logger.error(err); + captureError({ + error: err, + source: "service.observability.logs_init", + domainId: getDomainId(), + }); +}); + +startEmailAutomation().catch((err) => { + logger.error(err); + captureError({ + error: err, + source: "service.startup", + domainId: getDomainId(), + }); +}); const port = process.env.PORT || 80; app.listen(port, () => { diff --git a/apps/queue/src/job/routes.ts b/apps/queue/src/job/routes.ts index 3ccba53f0..9bba61e3b 100644 --- a/apps/queue/src/job/routes.ts +++ b/apps/queue/src/job/routes.ts @@ -10,22 +10,48 @@ import NotificationModel from "../notifications/model/notification"; import { ObjectId } from "mongodb"; import { Constants, User } from "@courselit/common-models"; import { z } from "zod"; +import { captureError, getDomainId } from "../observability/posthog"; const router: any = express.Router(); -router.post("/mail", async (req: express.Request, res: express.Response) => { - try { - const { to, from, subject, body, headers } = req.body; - MailJob.parse({ to, from, subject, body, headers }); +router.post( + "/mail", + async ( + req: express.Request & { user: User & { domain: string } }, + res: express.Response, + ) => { + const domainId = getDomainId(req.user?.domain); - await addMailJob({ to, from, subject, body, headers }); + try { + const { to, from, subject, body, headers } = req.body; + MailJob.parse({ + to, + from, + subject, + body, + headers, + domainId, + }); - res.status(200).json({ message: "Success" }); - } catch (err: any) { - logger.error(err); - res.status(500).json({ error: err.message }); - } -}); + await addMailJob({ to, from, subject, body, headers, domainId }); + + res.status(200).json({ message: "Success" }); + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "job.mail.route", + domainId, + context: { + path: req.path, + method: req.method, + route: "/job/mail", + }, + }); + res.status(500).json({ error: err.message }); + } + }, +); const DispatchNotificationJob = z.object({ activityType: z @@ -73,6 +99,16 @@ router.post( res.status(200).json({ message: "Success" }); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "job.dispatch_notification.route", + domainId: getDomainId(user.domain), + context: { + path: req.path, + method: req.method, + route: "/job/dispatch-notification", + }, + }); res.status(500).json({ error: err.message }); } }, @@ -107,6 +143,16 @@ router.post( res.status(200).json({ message: "Success" }); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "job.notification.route", + domainId: getDomainId(user.domain), + context: { + path: req.path, + method: req.method, + route: "/job/notification", + }, + }); res.status(500).json({ error: err.message }); } }, diff --git a/apps/queue/src/mail.ts b/apps/queue/src/mail.ts index 91ca33842..35ece82a7 100644 --- a/apps/queue/src/mail.ts +++ b/apps/queue/src/mail.ts @@ -1,4 +1,5 @@ import { createTransport } from "nodemailer"; +import { logInfo } from "./observability/logs"; const transporter = createTransport({ host: process.env.EMAIL_HOST, @@ -14,16 +15,29 @@ export async function sendMail({ to, subject, html, + headers, }: { from: string; to: string; subject: string; html: string; + headers?: Record; }) { + const transportMode = + process.env.NODE_ENV === "production" ? "smtp" : "console"; + if (process.env.NODE_ENV === "production") { - await transporter.sendMail({ from, to, subject, html }); + await transporter.sendMail({ from, to, subject, html, headers }); } else { // eslint-disable-next-line no-console - console.log("Mail sent", from, to, subject, html, new Date()); + console.log("Mail sent", from, to, subject, html, headers, new Date()); } + + logInfo("Mail sent", { + source: "mail.send", + queue_name: "mail", + transport_mode: transportMode, + to, + subject, + }); } diff --git a/apps/queue/src/middlewares/verify-jwt.ts b/apps/queue/src/middlewares/verify-jwt.ts index 3bec18973..70eb4054e 100644 --- a/apps/queue/src/middlewares/verify-jwt.ts +++ b/apps/queue/src/middlewares/verify-jwt.ts @@ -1,5 +1,6 @@ import { jwtUtils } from "@courselit/utils"; import { logger } from "../logger"; +import { captureError, getDomainId } from "../observability/posthog"; export const verifyJWTMiddleware = (req, res, next) => { try { @@ -18,6 +19,11 @@ export const verifyJWTMiddleware = (req, res, next) => { next(); } catch (err) { logger.error(err); + captureError({ + error: err, + source: "middleware.verify_jwt", + domainId: getDomainId(), + }); return res.status(500).json({ error: err.message }); } }; diff --git a/apps/queue/src/notifications/services/channels/email.ts b/apps/queue/src/notifications/services/channels/email.ts index 34473490d..8bd560461 100644 --- a/apps/queue/src/notifications/services/channels/email.ts +++ b/apps/queue/src/notifications/services/channels/email.ts @@ -4,6 +4,7 @@ import { addMailJob } from "../../../domain/handler"; import { getSiteUrl } from "../../../utils/get-site-url"; import { getUnsubLink } from "../../../utils/get-unsub-link"; import { ChannelPayload, NotificationChannel } from "./types"; +import { getDomainId } from "../../../observability/posthog"; export class EmailChannel implements NotificationChannel { async send(payload: ChannelPayload): Promise { @@ -46,6 +47,7 @@ export class EmailChannel implements NotificationChannel { name: payload.domain.settings?.title || payload.domain.name, email: process.env.EMAIL_FROM || "", }), + domainId: getDomainId(payload.domain?._id), subject: notificationDetails.message, body: `

${notificationDetails.message}

diff --git a/apps/queue/src/notifications/worker/dispatch-notification.ts b/apps/queue/src/notifications/worker/dispatch-notification.ts index 4e4dd074d..f7d0f9ca2 100644 --- a/apps/queue/src/notifications/worker/dispatch-notification.ts +++ b/apps/queue/src/notifications/worker/dispatch-notification.ts @@ -17,6 +17,7 @@ import { ChannelPayload, NotificationChannel, } from "../services/channels/types"; +import { captureError, getDomainId } from "../../observability/posthog"; interface DispatchNotificationJob { domain: string | mongoose.Types.ObjectId; @@ -35,12 +36,21 @@ const channelRegistry: Record = { const worker = new Worker( "dispatch-notification", async (job) => { + const payload = job.data as DispatchNotificationJob; try { - await processDispatchNotificationJob( - job.data as DispatchNotificationJob, - ); + await processDispatchNotificationJob(payload); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "worker.dispatch_notification", + domainId: getDomainId(payload?.domain), + context: { + queue_name: "dispatch-notification", + job_id: String(job.id), + activity_type: payload?.activityType, + }, + }); throw err; } }, diff --git a/apps/queue/src/notifications/worker/notification.ts b/apps/queue/src/notifications/worker/notification.ts index aea8dd101..59ee5e23c 100644 --- a/apps/queue/src/notifications/worker/notification.ts +++ b/apps/queue/src/notifications/worker/notification.ts @@ -2,6 +2,7 @@ import { Worker } from "bullmq"; import redis from "../../redis"; import { logger } from "../../logger"; import { notificationEmitter } from "../utils/emitter"; +import { captureError, getDomainId } from "../../observability/posthog"; const worker = new Worker( "notification", @@ -11,6 +12,15 @@ const worker = new Worker( deliverInAppNotification(notification); } catch (err: any) { logger.error(err); + captureError({ + error: err, + source: "worker.notification", + domainId: getDomainId(notification?.domain), + context: { + queue_name: "notification", + job_id: String(job.id), + }, + }); } }, { connection: redis }, diff --git a/apps/queue/src/observability/__tests__/posthog.test.ts b/apps/queue/src/observability/__tests__/posthog.test.ts new file mode 100644 index 000000000..0d2949078 --- /dev/null +++ b/apps/queue/src/observability/__tests__/posthog.test.ts @@ -0,0 +1,166 @@ +/** + * @jest-environment node + */ + +const captureExceptionMock = jest.fn(); +const setupExpressErrorHandlerMock = jest.fn(); +const posthogConstructorMock = jest.fn(() => ({ + captureException: captureExceptionMock, +})); + +jest.mock("posthog-node", () => ({ + PostHog: posthogConstructorMock, + setupExpressErrorHandler: setupExpressErrorHandlerMock, +})); + +const loadModule = async () => { + jest.resetModules(); + return await import("../posthog"); +}; + +describe("error tracking wrapper", () => { + const originalEnv = { ...process.env }; + + beforeEach(() => { + jest.clearAllMocks(); + process.env = { ...originalEnv }; + delete process.env.POSTHOG_API_KEY; + delete process.env.POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE; + delete process.env.DEPLOY_ENV; + }); + + afterAll(() => { + process.env = originalEnv; + }); + + it("is no-op when POSTHOG_API_KEY is missing", async () => { + const module = await loadModule(); + module.captureError({ + error: new Error("boom"), + source: "worker.mail", + domainId: "domain-1", + }); + + expect(module.isPosthogEnabled()).toBe(false); + expect(posthogConstructorMock).not.toHaveBeenCalled(); + expect(captureExceptionMock).not.toHaveBeenCalled(); + }); + + it("captures exception when POSTHOG_API_KEY is present", async () => { + process.env.POSTHOG_API_KEY = "phc_test_key"; + process.env.DEPLOY_ENV = "staging"; + + const module = await loadModule(); + module.captureError({ + error: new Error("boom"), + source: "worker.mail", + domainId: "domain-1", + context: { + queue_name: "mail", + job_id: "123", + }, + }); + + expect(module.isPosthogEnabled()).toBe(true); + expect(posthogConstructorMock).toHaveBeenCalledWith( + "phc_test_key", + expect.objectContaining({ + enableExceptionAutocapture: true, + }), + ); + expect(captureExceptionMock).toHaveBeenCalledWith( + expect.any(Error), + "domain-1", + expect.objectContaining({ + service: "courselit:queue", + environment: "staging", + source: "worker.mail", + domain_id: "domain-1", + }), + ); + }); + + it("dedupes identical exceptions in the 60s window", async () => { + process.env.POSTHOG_API_KEY = "phc_test_key"; + + const module = await loadModule(); + const error = new Error("same-message"); + + module.captureError({ + error, + source: "processRules.loop", + domainId: "domain-1", + }); + module.captureError({ + error, + source: "processRules.loop", + domainId: "domain-1", + }); + + expect(captureExceptionMock).toHaveBeenCalledTimes(1); + }); + + it("applies per-source cap from env var", async () => { + process.env.POSTHOG_API_KEY = "phc_test_key"; + process.env.POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE = "1"; + + const module = await loadModule(); + module.captureError({ + error: new Error("first"), + source: "worker.mail", + domainId: "domain-1", + }); + module.captureError({ + error: new Error("second"), + source: "worker.mail", + domainId: "domain-1", + }); + + expect(captureExceptionMock).toHaveBeenCalledTimes(1); + }); + + it("handles dedupe cache bound without throwing", async () => { + process.env.POSTHOG_API_KEY = "phc_test_key"; + process.env.POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE = "20000"; + + const module = await loadModule(); + + for (let i = 0; i < 10001; i++) { + module.captureError({ + error: new Error(`err-${i}`), + source: `worker.mail.${i}`, + domainId: "domain-1", + }); + } + + expect(captureExceptionMock).toHaveBeenCalledTimes(10001); + }); + + it("swallows capture pipeline failures", async () => { + process.env.POSTHOG_API_KEY = "phc_test_key"; + captureExceptionMock.mockImplementation(() => { + throw new Error("capture failed"); + }); + + const module = await loadModule(); + + expect(() => + module.captureError({ + error: new Error("boom"), + source: "worker.mail", + domainId: "domain-1", + }), + ).not.toThrow(); + }); + + it("wires setupExpressErrorHandler only when enabled", async () => { + let module = await loadModule(); + module.setupPosthogExpressErrorHandler({} as any); + expect(setupExpressErrorHandlerMock).not.toHaveBeenCalled(); + + process.env.POSTHOG_API_KEY = "phc_test_key"; + module = await loadModule(); + module.setupPosthogExpressErrorHandler({} as any); + expect(setupExpressErrorHandlerMock).toHaveBeenCalledTimes(1); + }); +}); diff --git a/apps/queue/src/observability/logs.ts b/apps/queue/src/observability/logs.ts new file mode 100644 index 000000000..34071635c --- /dev/null +++ b/apps/queue/src/observability/logs.ts @@ -0,0 +1,82 @@ +import { logs } from "@opentelemetry/api-logs"; +import { OTLPLogExporter } from "@opentelemetry/exporter-logs-otlp-http"; +import { resourceFromAttributes } from "@opentelemetry/resources"; +import { BatchLogRecordProcessor } from "@opentelemetry/sdk-logs"; +import { NodeSDK } from "@opentelemetry/sdk-node"; + +const SERVICE_NAME = "courselit:queue"; +const LOGS_PATH = "/i/v1/logs"; + +let sdk: NodeSDK | null = null; +let initialized = false; +const logger = logs.getLogger(SERVICE_NAME); + +export async function initPosthogLogs() { + if (initialized) { + return; + } + + const token = process.env.POSTHOG_API_KEY; + if (!token) { + initialized = true; + return; + } + + const baseHost = (process.env.POSTHOG_HOST || "https://us.i.posthog.com") + .replace(/\/+$/, "") + .trim(); + const url = `${baseHost}${LOGS_PATH}`; + + sdk = new NodeSDK({ + resource: resourceFromAttributes({ + "service.name": SERVICE_NAME, + "deployment.environment": process.env.DEPLOY_ENV || "unknown", + }), + logRecordProcessor: new BatchLogRecordProcessor( + new OTLPLogExporter({ + url, + headers: { + Authorization: `Bearer ${token}`, + }, + }), + ), + }); + + await Promise.resolve(sdk.start()); + initialized = true; +} + +export function logInfo( + body: string, + attributes: Record = {}, +) { + if (!sdk) { + return; + } + + try { + logger.emit({ + severityText: "info", + body, + attributes: sanitizeAttributes(attributes), + }); + } catch { + // swallow logging failures; queue processing must continue + } +} + +function sanitizeAttributes(attributes: Record) { + const sanitized: Record = {}; + + for (const [key, value] of Object.entries(attributes)) { + if ( + typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" + ) { + sanitized[key] = value; + } + } + + return sanitized; +} diff --git a/apps/queue/src/observability/posthog.ts b/apps/queue/src/observability/posthog.ts new file mode 100644 index 000000000..2e1017246 --- /dev/null +++ b/apps/queue/src/observability/posthog.ts @@ -0,0 +1,266 @@ +import type { Express } from "express"; +import { PostHog, setupExpressErrorHandler } from "posthog-node"; + +type Severity = "error" | "warning" | "critical"; + +interface CaptureErrorInput { + error: unknown; + source: string; + domainId?: unknown; + severity?: Severity; + context?: Record; +} + +interface CaptureEventInput { + event: string; + source: string; + domainId?: unknown; + properties?: Record; +} + +const SYSTEM_DOMAIN_ID = "system"; +const SERVICE = "courselit:queue"; +const DEDUPE_TTL_MS = 60_000; +const DEDUPE_MAX_KEYS = 10_000; +const DEFAULT_PER_SOURCE_CAP = 100; +const MAX_MESSAGE_LENGTH = 500; +const MAX_STACK_TOP_LENGTH = 300; +const MAX_CONTEXT_STRING_LENGTH = 500; +const CONTEXT_ALLOWLIST = new Set([ + "sequence_id", + "course_id", + "user_id", + "job_id", + "queue_name", + "activity_type", + "path", + "method", + "error_code", + "response_code", + "command", + "ongoing_sequence_id", + "route", + "worker_name", +]); + +const perSourceCap = getPerSourceCap(); +const environment = process.env.DEPLOY_ENV || "unknown"; +const dedupeFingerprintExpiry = new Map(); +const sourceRateWindow = new Map(); + +const client = process.env.POSTHOG_API_KEY + ? new PostHog(process.env.POSTHOG_API_KEY, { + host: process.env.POSTHOG_HOST || "https://us.i.posthog.com", + enableExceptionAutocapture: true, + }) + : null; + +export function isPosthogEnabled() { + return Boolean(client); +} + +export function setupPosthogExpressErrorHandler(app: Express) { + if (!client) { + return; + } + + setupExpressErrorHandler(client, app); +} + +export function getDomainId(domainId?: unknown) { + if (domainId === null || domainId === undefined) { + return SYSTEM_DOMAIN_ID; + } + + const normalized = String(domainId).trim(); + return normalized || SYSTEM_DOMAIN_ID; +} + +export function captureError({ + error, + source, + domainId, + severity = "error", + context = {}, +}: CaptureErrorInput) { + if (!client) { + return; + } + + try { + const { name, message, stackTop, normalizedError } = + getErrorParts(error); + const safeDomainId = getDomainId(domainId); + const fingerprint = `${source}|${name}|${stackTop}`; + if (shouldDedupe(fingerprint)) { + return; + } + + if (isSourceRateLimited(source)) { + return; + } + + client.captureException(normalizedError, safeDomainId, { + service: SERVICE, + environment, + source, + domain_id: safeDomainId, + severity, + error_name: name, + error_message: message, + error_stack_top: stackTop, + ...sanitizeContext(context), + }); + } catch { + // swallow capture pipeline errors; queue processing must not fail because of telemetry + } +} + +export function captureEvent({ + event, + source, + domainId, + properties = {}, +}: CaptureEventInput) { + if (!client) { + return; + } + + try { + const safeDomainId = getDomainId(domainId); + client.capture({ + event, + distinctId: safeDomainId, + properties: { + service: SERVICE, + environment, + source, + domain_id: safeDomainId, + ...sanitizeContext(properties), + }, + }); + } catch { + // swallow capture pipeline errors; queue processing must not fail because of telemetry + } +} + +function shouldDedupe(fingerprint: string) { + const now = Date.now(); + const expiry = dedupeFingerprintExpiry.get(fingerprint); + if (expiry && expiry > now) { + return true; + } + + if (dedupeFingerprintExpiry.size >= DEDUPE_MAX_KEYS) { + dedupeFingerprintExpiry.clear(); + } + + dedupeFingerprintExpiry.set(fingerprint, now + DEDUPE_TTL_MS); + return false; +} + +function isSourceRateLimited(source: string) { + const minute = Math.floor(Date.now() / 60_000); + const current = sourceRateWindow.get(source); + + if (!current || current.minute !== minute) { + sourceRateWindow.set(source, { minute, count: 1 }); + return false; + } + + if (current.count >= perSourceCap) { + return true; + } + + current.count += 1; + return false; +} + +function getPerSourceCap() { + const configured = Number.parseInt( + process.env.POSTHOG_ERROR_CAP_PER_SOURCE_PER_MINUTE || "", + 10, + ); + + if (!Number.isFinite(configured) || configured <= 0) { + return DEFAULT_PER_SOURCE_CAP; + } + + return configured; +} + +function getErrorParts(error: unknown) { + if (error instanceof Error) { + const stackTop = sanitizeErrorText(extractStackTop(error.stack || "")); + return { + name: error.name || "Error", + message: sanitizeErrorText(error.message, MAX_MESSAGE_LENGTH), + stackTop, + normalizedError: error, + }; + } + + const fallbackMessage = + typeof error === "string" ? error : "Unknown error thrown"; + const normalizedError = new Error(fallbackMessage); + + return { + name: "Error", + message: sanitizeErrorText(fallbackMessage, MAX_MESSAGE_LENGTH), + stackTop: "", + normalizedError, + }; +} + +function extractStackTop(stack: string) { + const [, firstFrame = ""] = stack.split("\n"); + return firstFrame.trim(); +} + +function sanitizeContext(context: Record) { + const sanitized: Record = {}; + + for (const [key, value] of Object.entries(context)) { + if (!CONTEXT_ALLOWLIST.has(key)) { + continue; + } + + if ( + typeof value === "number" || + typeof value === "boolean" || + typeof value === "string" + ) { + sanitized[key] = + typeof value === "string" + ? sanitizeErrorText(value, MAX_CONTEXT_STRING_LENGTH) + : value; + continue; + } + + if (value === null || value === undefined) { + continue; + } + + sanitized[key] = sanitizeErrorText( + String(value), + MAX_CONTEXT_STRING_LENGTH, + ); + } + + return sanitized; +} + +function sanitizeErrorText(input: string, limit = MAX_STACK_TOP_LENGTH) { + const maskedTokens = input + .replace( + /\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,}\b/gi, + "[redacted-email]", + ) + .replace(/\b[A-Za-z0-9_-]{24,}\b/g, "[redacted-token]"); + + if (maskedTokens.length <= limit) { + return maskedTokens; + } + + return `${maskedTokens.slice(0, limit)}...`; +} diff --git a/apps/queue/src/start-email-automation.ts b/apps/queue/src/start-email-automation.ts index cdfbab218..3c797e1cd 100644 --- a/apps/queue/src/start-email-automation.ts +++ b/apps/queue/src/start-email-automation.ts @@ -2,11 +2,44 @@ import { connectToDatabase } from "./db"; import { processDrip } from "./domain/process-drip"; import { processOngoingSequences } from "./domain/process-ongoing-sequences"; import { processRules } from "./domain/process-rules"; +import { captureError, getDomainId } from "./observability/posthog"; +import { logger } from "./logger"; export async function startEmailAutomation() { - await connectToDatabase(); + try { + await connectToDatabase(); + } catch (err: any) { + logger.error(err); + captureError({ + error: err, + source: "startEmailAutomation.bootstrap", + domainId: getDomainId(), + }); + throw err; + } - processOngoingSequences(); - processRules(); - processDrip(); + processOngoingSequences().catch((err) => { + logger.error(err); + captureError({ + error: err, + source: "processOngoingSequences.loop", + domainId: getDomainId(), + }); + }); + processRules().catch((err) => { + logger.error(err); + captureError({ + error: err, + source: "processRules.loop", + domainId: getDomainId(), + }); + }); + processDrip().catch((err) => { + logger.error(err); + captureError({ + error: err, + source: "processDrip.loop", + domainId: getDomainId(), + }); + }); } diff --git a/apps/web/app/(with-contexts)/course-old/[slug]/[id]/layout-with-sidebar.tsx b/apps/web/app/(with-contexts)/course-old/[slug]/[id]/layout-with-sidebar.tsx index 20a9cafb5..42cb5adf9 100644 --- a/apps/web/app/(with-contexts)/course-old/[slug]/[id]/layout-with-sidebar.tsx +++ b/apps/web/app/(with-contexts)/course-old/[slug]/[id]/layout-with-sidebar.tsx @@ -1,6 +1,7 @@ "use client"; import { useContext } from "react"; +import constants from "@/config/constants"; import { formattedLocaleDate, isEnrolled, @@ -56,11 +57,18 @@ export function generateSideBarItems( }, ]; - let lastGroupDripDateInMillis = Date.now(); + let lastGroupDripDateInMillis = getRelativeDripAnchorMillis( + course, + profile, + ); for (const group of course.groups) { let availableLabel = ""; - if (group.drip && group.drip.status) { + const isAccessible = + group.drip?.status && + isGroupAccessibleToUser(course, profile as Profile, group); + + if (group.drip && group.drip.status && !isAccessible) { if ( group.drip.type === Constants.dripType[0].split("-")[0].toUpperCase() @@ -69,7 +77,8 @@ export function generateSideBarItems( (group?.drip?.delayInMillis ?? 0) + lastGroupDripDateInMillis; const daysUntilAvailable = Math.ceil( - (delayInMillis - Date.now()) / 86400000, + (delayInMillis - Date.now()) / + constants.relativeDripUnitInMillis, ); availableLabel = daysUntilAvailable && @@ -97,7 +106,8 @@ export function generateSideBarItems( group.drip && group.drip.status && group.drip.type === - Constants.dripType[0].split("-")[0].toUpperCase() + Constants.dripType[0].split("-")[0].toUpperCase() && + !isGroupAccessibleToUser(course, profile as Profile, group) ) { lastGroupDripDateInMillis += group?.drip?.delayInMillis ?? 0; } @@ -145,11 +155,18 @@ export function isGroupAccessibleToUser( if (!group.drip || !group.drip.status) return true; if (!Array.isArray(profile.purchases)) return false; + const groupId = getGroupId(group); + if (!groupId) return false; for (const purchase of profile.purchases) { if (purchase.courseId === course.courseId) { if (Array.isArray(purchase.accessibleGroups)) { - if (purchase.accessibleGroups.includes(group.id)) { + const accessibleGroupIds = purchase.accessibleGroups + .map((id) => + id === null || id === undefined ? "" : String(id), + ) + .filter(Boolean); + if (accessibleGroupIds.includes(groupId)) { return true; } } @@ -158,3 +175,56 @@ export function isGroupAccessibleToUser( return false; } + +function getGroupId(group: GroupWithLessons): string | undefined { + const value = + (group as GroupWithLessons & { _id?: unknown }).id ?? + (group as GroupWithLessons & { _id?: unknown })._id; + if (value === null || value === undefined) { + return undefined; + } + + return String(value); +} + +function getRelativeDripAnchorMillis( + course: CourseFrontend, + profile: Profile, +): number { + const purchase = profile.purchases?.find( + (purchase) => purchase.courseId === course.courseId, + ); + + if (purchase?.lastDripAt) { + const lastDripAt = normalizeTimestamp(purchase.lastDripAt); + if (!Number.isNaN(lastDripAt)) { + return lastDripAt; + } + } + + if (purchase?.createdAt) { + const createdAt = normalizeTimestamp(purchase.createdAt); + if (!Number.isNaN(createdAt)) { + return createdAt; + } + } + + return Date.now(); +} + +function normalizeTimestamp(value: string | number | Date): number { + if (typeof value === "number") { + return value; + } + + if (value instanceof Date) { + return value.getTime(); + } + + const numericValue = Number(value); + if (!Number.isNaN(numericValue)) { + return numericValue; + } + + return new Date(value).getTime(); +} diff --git a/apps/web/app/(with-contexts)/course/[slug]/[id]/__tests__/layout-with-sidebar.test.tsx b/apps/web/app/(with-contexts)/course/[slug]/[id]/__tests__/layout-with-sidebar.test.tsx new file mode 100644 index 000000000..31f698d49 --- /dev/null +++ b/apps/web/app/(with-contexts)/course/[slug]/[id]/__tests__/layout-with-sidebar.test.tsx @@ -0,0 +1,651 @@ +jest.mock("next/navigation", () => ({ + usePathname: () => "/course/test-course/course-1", +})); + +jest.mock("next/link", () => { + return ({ children }: { children: React.ReactNode }) => children; +}); + +jest.mock("@components/contexts", () => ({ + ProfileContext: { Provider: ({ children }: any) => children }, + SiteInfoContext: { Provider: ({ children }: any) => children }, + ThemeContext: { Provider: ({ children }: any) => children }, +})); + +jest.mock("@components/ui/sidebar", () => ({ + Sidebar: ({ children }: any) => children, + SidebarContent: ({ children }: any) => children, + SidebarGroup: ({ children }: any) => children, + SidebarGroupContent: ({ children }: any) => children, + SidebarGroupLabel: ({ children }: any) => children, + SidebarHeader: ({ children }: any) => children, + SidebarInset: ({ children }: any) => children, + SidebarMenu: ({ children }: any) => children, + SidebarMenuButton: ({ children }: any) => children, + SidebarMenuItem: ({ children }: any) => children, + SidebarProvider: ({ children }: any) => children, + SidebarTrigger: () => null, +})); + +jest.mock("@components/ui/tooltip", () => ({ + Tooltip: ({ children }: any) => children, + TooltipContent: ({ children }: any) => children, + TooltipProvider: ({ children }: any) => children, + TooltipTrigger: ({ children }: any) => children, +})); + +jest.mock("@components/ui/collapsible", () => ({ + Collapsible: ({ children }: any) => children, + CollapsibleContent: ({ children }: any) => children, + CollapsibleTrigger: ({ children }: any) => children, +})); + +jest.mock("@components/ui/button", () => ({ + Button: ({ children }: any) => children, +})); + +jest.mock("@components/admin/next-theme-switcher", () => () => null); + +jest.mock("@courselit/components-library", () => ({ + Image: () => null, +})); + +jest.mock("@courselit/icons", () => ({ + CheckCircled: () => null, + Circle: () => null, + Lock: () => null, +})); + +jest.mock("lucide-react", () => ({ + ChevronRight: () => null, + Clock: () => null, + LogOutIcon: () => null, +})); + +jest.mock("@courselit/page-primitives", () => ({ + Caption: ({ children }: any) => children, +})); + +jest.mock("@ui-lib/utils", () => ({ + formattedLocaleDate: () => "Mar 22, 2026", + isEnrolled: () => true, + isLessonCompleted: () => false, +})); + +import { Constants, Profile } from "@courselit/common-models"; +import { generateSideBarItems } from "../layout-with-sidebar"; +import { CourseFrontend } from "../helpers"; +import constants from "@/config/constants"; + +describe("generateSideBarItems", () => { + const originalDateNow = Date.now; + const originalRelativeDripUnitInMillis = constants.relativeDripUnitInMillis; + + beforeEach(() => { + Date.now = jest.fn(() => + new Date("2026-03-22T00:00:00.000Z").getTime(), + ); + Object.assign(constants, { + relativeDripUnitInMillis: 1, + }); + }); + + afterEach(() => { + Date.now = originalDateNow; + Object.assign(constants, { + relativeDripUnitInMillis: originalRelativeDripUnitInMillis, + }); + }); + + it("hides the drip badge once the group has been released to the learner", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "First section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: "2026-03-22T00:00:00.000Z", + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: ["group-1"], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[1].badge).toBeUndefined(); + }); + + it("hides the drip badge when a released group is keyed by _id", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + _id: "group-legacy-1", + name: "First section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: "2026-03-22T00:00:00.000Z", + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: ["group-legacy-1"], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[1].badge).toBeUndefined(); + }); + + it("shows cumulative relative drip time for later sections", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-0", + name: "Exact Date Section 1", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: new Date( + "2026-03-24T00:00:00.000Z", + ).getTime(), + }, + }, + { + id: "group-1", + name: "Tenth Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2, + }, + }, + { + id: "group-3", + name: "Exact Date Section 2", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: new Date( + "2026-03-25T00:00:00.000Z", + ).getTime(), + }, + }, + { + id: "group-4", + name: "Twelfth Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 1, + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[2].badge?.text).toBe("2 days"); + expect(items[4].badge?.text).toBe("3 days"); + }); + + it("shows cumulative relative drip time across consecutive locked sections", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Eighth Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2, + }, + }, + { + id: "group-2", + name: "Ninth Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 1, + }, + }, + { + id: "group-3", + name: "Tenth Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 5, + }, + }, + { + id: "group-4", + name: "Eleventh Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2, + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[3].badge?.text).toBe("8 days"); + expect(items[4].badge?.text).toBe("10 days"); + }); + + it("shows exact-date drip labels based on dateInUTC", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Exact Date Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: new Date( + "2026-03-24T00:00:00.000Z", + ).getTime(), + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[1].badge?.text).toBe("Mar 22, 2026"); + expect(items[1].badge?.description).toBe("Available on Mar 22, 2026"); + }); + + it("does not let prior relative drips affect exact-date labels", () => { + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Relative Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 50, + }, + }, + { + id: "group-2", + name: "Exact Date Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[1].split("-")[0].toUpperCase(), + dateInUTC: new Date( + "2026-03-24T00:00:00.000Z", + ).getTime(), + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[2].badge?.text).toBe("Mar 22, 2026"); + expect(items[2].badge?.description).toBe("Available on Mar 22, 2026"); + }); + + it("uses purchase createdAt as the relative drip anchor when lastDripAt is absent", () => { + Object.assign(constants, { + relativeDripUnitInMillis: 86_400_000, + }); + + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Relative Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2 * constants.relativeDripUnitInMillis, + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date( + "2026-03-20T00:00:00.000Z", + ).toISOString(), + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[1].badge?.text).toBe("0 days"); + }); + + it("uses purchase lastDripAt as the relative drip anchor when present", () => { + Object.assign(constants, { + relativeDripUnitInMillis: 86_400_000, + }); + + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Relative Section", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2 * constants.relativeDripUnitInMillis, + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: [], + createdAt: new Date( + "2026-03-10T00:00:00.000Z", + ).toISOString(), + lastDripAt: new Date( + "2026-03-21T00:00:00.000Z", + ).toISOString(), + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[1].badge?.text).toBe("1 days"); + }); + + it("does not accumulate already accessible relative sections after lastDripAt", () => { + Object.assign(constants, { + relativeDripUnitInMillis: 1, + }); + + const course = { + title: "Course", + description: "", + featuredImage: undefined, + updatedAt: new Date().toISOString(), + creatorId: "creator-1", + slug: "test-course", + cost: 0, + courseId: "course-1", + tags: [], + paymentPlans: [], + defaultPaymentPlan: "", + firstLesson: "lesson-1", + groups: [ + { + id: "group-1", + name: "Relative Section 1", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 2, + }, + }, + { + id: "group-2", + name: "Relative Section 2", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 5, + }, + }, + { + id: "group-3", + name: "Relative Section 3", + lessons: [], + drip: { + status: true, + type: Constants.dripType[0].split("-")[0].toUpperCase(), + delayInMillis: 3, + }, + }, + ], + } as unknown as CourseFrontend; + + const profile = { + userId: "user-1", + purchases: [ + { + courseId: "course-1", + accessibleGroups: ["group-1", "group-2"], + createdAt: new Date( + "2026-03-10T00:00:00.000Z", + ).toISOString(), + lastDripAt: new Date( + "2026-03-22T00:00:00.000Z", + ).toISOString(), + }, + ], + } as unknown as Profile; + + const items = generateSideBarItems( + course, + profile, + "/course/test-course/course-1", + ); + + expect(items[3].badge?.text).toBe("3 days"); + }); +}); diff --git a/apps/web/app/(with-contexts)/course/[slug]/[id]/layout-with-sidebar.tsx b/apps/web/app/(with-contexts)/course/[slug]/[id]/layout-with-sidebar.tsx index 7b81e0238..f9038a873 100644 --- a/apps/web/app/(with-contexts)/course/[slug]/[id]/layout-with-sidebar.tsx +++ b/apps/web/app/(with-contexts)/course/[slug]/[id]/layout-with-sidebar.tsx @@ -1,6 +1,7 @@ "use client"; import { ReactNode, useContext } from "react"; +import constants from "@/config/constants"; import { formattedLocaleDate, isEnrolled, @@ -332,19 +333,12 @@ export function generateSideBarItems( }, ]; - let lastGroupDripDateInMillis = Date.now(); + let lastGroupDripDateInMillis = getRelativeDripAnchorMillis( + course, + profile, + ); for (const group of course.groups) { - // Update lastGroupDripDateInMillis for relative drip types - if ( - group.drip && - group.drip.status && - group.drip.type === - Constants.dripType[0].split("-")[0].toUpperCase() - ) { - lastGroupDripDateInMillis += group?.drip?.delayInMillis ?? 0; - } - const groupItem: SidebarItem = { title: group.name, href: "#", @@ -391,6 +385,18 @@ export function generateSideBarItems( } items.push(groupItem); + + // Advance the cumulative relative drip cursor after computing the + // current group's label so the current delay is counted exactly once. + if ( + group.drip && + group.drip.status && + group.drip.type === + Constants.dripType[0].split("-")[0].toUpperCase() && + !isGroupAccessibleToUser(course, profile as Profile, group) + ) { + lastGroupDripDateInMillis += group?.drip?.delayInMillis ?? 0; + } } return items; @@ -407,6 +413,13 @@ function getDripLabel({ profile: Profile; lastGroupDripDateInMillis: number; }): { text: string; description: string } | undefined { + if ( + group.drip?.status && + isGroupAccessibleToUser(course, profile as Profile, group) + ) { + return undefined; + } + if (group.drip && group.drip.status) { let availableLabel = ""; let text = ""; @@ -417,7 +430,8 @@ function getDripLabel({ const delayInMillis = (group?.drip?.delayInMillis ?? 0) + lastGroupDripDateInMillis; const daysUntilAvailable = Math.ceil( - (delayInMillis - Date.now()) / 86400000, + (delayInMillis - Date.now()) / + constants.relativeDripUnitInMillis, ); availableLabel = daysUntilAvailable && @@ -448,6 +462,48 @@ function getDripLabel({ return undefined; } +function getRelativeDripAnchorMillis( + course: CourseFrontend, + profile: Profile, +): number { + const purchase = profile.purchases?.find( + (purchase) => purchase.courseId === course.courseId, + ); + + if (purchase?.lastDripAt) { + const lastDripAt = normalizeTimestamp(purchase.lastDripAt); + if (!Number.isNaN(lastDripAt)) { + return lastDripAt; + } + } + + if (purchase?.createdAt) { + const createdAt = normalizeTimestamp(purchase.createdAt); + if (!Number.isNaN(createdAt)) { + return createdAt; + } + } + + return Date.now(); +} + +function normalizeTimestamp(value: string | number | Date): number { + if (typeof value === "number") { + return value; + } + + if (value instanceof Date) { + return value.getTime(); + } + + const numericValue = Number(value); + if (!Number.isNaN(numericValue)) { + return numericValue; + } + + return new Date(value).getTime(); +} + export function isGroupAccessibleToUser( course: CourseFrontend, profile: Profile, @@ -456,11 +512,18 @@ export function isGroupAccessibleToUser( if (!group.drip || !group.drip.status) return true; if (!Array.isArray(profile.purchases)) return false; + const groupId = getGroupId(group); + if (!groupId) return false; for (const purchase of profile.purchases) { if (purchase.courseId === course.courseId) { if (Array.isArray(purchase.accessibleGroups)) { - if (purchase.accessibleGroups.includes(group.id)) { + const accessibleGroupIds = purchase.accessibleGroups + .map((id) => + id === null || id === undefined ? "" : String(id), + ) + .filter(Boolean); + if (accessibleGroupIds.includes(groupId)) { return true; } } @@ -469,3 +532,14 @@ export function isGroupAccessibleToUser( return false; } + +function getGroupId(group: GroupWithLessons): string | undefined { + const value = + (group as GroupWithLessons & { _id?: unknown }).id ?? + (group as GroupWithLessons & { _id?: unknown })._id; + if (value === null || value === undefined) { + return undefined; + } + + return String(value); +} diff --git a/apps/web/app/(with-contexts)/dashboard/(sidebar)/product/[id]/content/page.tsx b/apps/web/app/(with-contexts)/dashboard/(sidebar)/product/[id]/content/page.tsx index e2208e4c7..e92c3f531 100644 --- a/apps/web/app/(with-contexts)/dashboard/(sidebar)/product/[id]/content/page.tsx +++ b/apps/web/app/(with-contexts)/dashboard/(sidebar)/product/[id]/content/page.tsx @@ -45,7 +45,7 @@ import DashboardContent from "@components/admin/dashboard-content"; import { AddressContext } from "@components/contexts"; import useProduct from "@/hooks/use-product"; import { truncate } from "@ui-lib/utils"; -import { Constants, Lesson, UIConstants } from "@courselit/common-models"; +import { Constants, UIConstants } from "@courselit/common-models"; import { DragAndDrop, useToast } from "@courselit/components-library"; import { FetchBuilder } from "@courselit/utils"; import { @@ -58,6 +58,9 @@ const { permissions } = UIConstants; export default function ContentPage() { const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + const [sectionMenuOpenId, setSectionMenuOpenId] = useState( + null, + ); const [itemToDelete, setItemToDelete] = useState { + const item = itemToDelete; setDeleteDialogOpen(false); setItemToDelete(null); - await removeGroup(itemToDelete?.id!, product?.courseId!); + setSectionMenuOpenId(null); + await removeGroup(item?.id!, product?.courseId!); }; const toggleSectionCollapse = (sectionId: string) => { @@ -244,7 +249,15 @@ export default function ContentPage() { )} - + + setSectionMenuOpenId( + open ? section.id : null, + ) + } + >