diff --git a/.cursor/mcp.json b/.cursor/mcp.json index 9b3221784d..da39e4ffaf 100644 --- a/.cursor/mcp.json +++ b/.cursor/mcp.json @@ -1,7 +1,3 @@ { - "mcpServers": { - "trigger.dev": { - "url": "http://localhost:3333/sse" - } - } -} \ No newline at end of file + "mcpServers": {} +} diff --git a/.gitignore b/.gitignore index 9bee46fc27..6f435d0400 100644 --- a/.gitignore +++ b/.gitignore @@ -63,4 +63,5 @@ apps/**/public/build /packages/core/src/package.json /packages/trigger-sdk/src/package.json /packages/python/src/package.json -.claude \ No newline at end of file +.claude +.mcp.log \ No newline at end of file diff --git a/README.md b/README.md index dab0551dc0..22181725bc 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,75 @@
- - - - Trigger.dev logo - - -### Open source background jobs and AI infrastructure -[Discord](https://trigger.dev/discord) | [Website](https://trigger.dev) | [Issues](https://github.com/triggerdotdev/trigger.dev/issues) | [Docs](https://trigger.dev/docs) +![Trigger.dev logo](https://content.trigger.dev/github-header-banner.jpg) -[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/triggerdotdev.svg?style=social&label=Follow%20%40trigger.dev)](https://twitter.com/triggerdotdev) +### Build and deploy fully‑managed AI agents and workflows + +[Website](https://trigger.dev) | [Docs](https://trigger.dev/docs) | [Issues](https://github.com/triggerdotdev/trigger.dev/issues) | [Feature requests](https://triggerdev.featurebase.app/) | [Public roadmap](https://triggerdev.featurebase.app/roadmap) | [Self-hosting](https://trigger.dev/docs/self-hosting/overview) + +[![Open Source](https://img.shields.io/badge/Open%20Source-%E2%9D%A4-red.svg)](https://github.com/triggerdotdev/trigger.dev) +[![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://github.com/triggerdotdev/trigger.dev/blob/main/LICENSE) +[![npm](https://img.shields.io/npm/v/@trigger.dev/sdk.svg?label=npm)](https://www.npmjs.com/package/@trigger.dev/sdk) +[![SDK downloads](https://img.shields.io/npm/dm/@trigger.dev/sdk.svg?label=SDK%20downloads)](https://www.npmjs.com/package/@trigger.dev/sdk) + +[![Twitter Follow](https://img.shields.io/twitter/follow/triggerdotdev?style=social)](https://twitter.com/triggerdotdev) +[![Discord](https://img.shields.io/discord/1066956501299777596?logo=discord&logoColor=white&color=7289da)](https://discord.gg/nkqV9xBYWy) +[![GitHub stars](https://img.shields.io/github/stars/triggerdotdev/trigger.dev?style=social)](https://github.com/triggerdotdev/trigger.dev)
## About Trigger.dev -Trigger.dev is an open source platform and SDK which allows you to create long-running background jobs. Write normal async code, deploy, and never hit a timeout. +Trigger.dev is the open-source platform for building AI workflows in TypeScript. Long-running tasks with retries, queues, observability, and elastic scaling. + +## The platform designed for building AI agents + +Build [AI agents](https://trigger.dev/product/ai-agents) using all the frameworks, services and LLMs you're used to, deploy them to Trigger.dev and get durable, long-running tasks with retries, queues, observability, and elastic scaling out of the box. + +- **Long-running without timeouts**: Execute your tasks with absolutely no timeouts, unlike AWS Lambda, Vercel, and other serverless platforms. + +- **Durability, retries & queues**: Build rock solid agents and AI applications using our durable tasks, retries, queues and idempotency. -### Key features: +- **True runtime freedom**: Customize your deployed tasks with system packages – run browsers, Python scripts, FFmpeg and more. -- JavaScript and TypeScript SDK -- No timeouts -- Retries (with exponential backoff) -- Queues and concurrency controls -- Schedules and crons -- Full Observability; logs, live trace views, advanced filtering -- React hooks to interact with the Trigger API from your React app -- Pipe LLM streams straight to your users through the Realtime API -- Trigger tasks and display the run status and metadata anywhere in your app -- Custom alerts, get notified by email, Slack or webhooks -- No infrastructure to manage -- Elastic (scaling) -- Works with your existing tech stack +- **Human-in-the-loop**: Programmatically pause your tasks until a human can approve, reject or give feedback. -## In your codebase +- **Realtime apps & streaming**: Move your background jobs to the foreground by subscribing to runs or streaming AI responses to your app. + +- **Observability & monitoring**: Each run has full tracing and logs. Configure error alerts to catch bugs fast. + +## Key features: + +- **[JavaScript and TypeScript SDK](https://trigger.dev/docs/tasks/overview)** - Build background tasks using familiar programming models +- **[Long-running tasks](https://trigger.dev/docs/runs/max-duration)** - Handle resource-heavy tasks without timeouts +- **[Durable cron schedules](https://trigger.dev/docs/tasks/scheduled#scheduled-tasks-cron)** - Create and attach recurring schedules of up to a year +- **[Trigger.dev Realtime](https://trigger.dev/docs/realtime/overview)** - Trigger, subscribe to, and get real-time updates for runs, with LLM streaming support +- **[Build extensions](https://trigger.dev/docs/config/extensions/overview#build-extensions)** - Hook directly into the build system and customize the build process. Run Python scripts, FFmpeg, browsers, and more. +- **[React hooks](https://trigger.dev/docs/frontend/react-hooks#react-hooks)** - Interact with the Trigger.dev API on your frontend using our React hooks package +- **[Batch triggering](https://trigger.dev/docs/triggering#tasks-batchtrigger)** - Use batchTrigger() to initiate multiple runs of a task with custom payloads and options +- **[Structured inputs / outputs](https://trigger.dev/docs/tasks/schemaTask#schematask)** - Define precise data schemas for your tasks with runtime payload validation +- **[Waits](https://trigger.dev/docs/wait)** - Add waits to your tasks to pause execution for a specified duration +- **[Preview branches](https://trigger.dev/docs/deployment/preview-branches)** - Create isolated environments for testing and development. Integrates with Vercel and git workflows +- **[Waitpoints](https://trigger.dev/docs/wait-for-token#wait-for-token)** - Add human-in-the-loop judgment at critical decision points without disrupting workflow +- **[Concurrency & queues](https://trigger.dev/docs/queue-concurrency#concurrency-and-queues)** - Set concurrency rules to manage how multiple tasks execute +- **[Multiple environments](https://trigger.dev/docs/how-it-works#dev-mode)** - Support for DEV, PREVIEW, STAGING, and PROD environments +- **[No infrastructure to manage](https://trigger.dev/docs/how-it-works#trigger-dev-architecture)** - Auto-scaling infrastructure that eliminates timeouts and server management +- **[Automatic retries](https://trigger.dev/docs/errors-retrying)** - If your task encounters an uncaught error, we automatically attempt to run it again +- **[Checkpointing](https://trigger.dev/docs/how-it-works#the-checkpoint-resume-system)** - Tasks are inherently durable, thanks to our checkpointing feature +- **[Versioning](https://trigger.dev/docs/versioning)** - Atomic versioning allows you to deploy new versions without affecting running tasks +- **[Machines](https://trigger.dev/docs/machines)** - Configure the number of vCPUs and GBs of RAM you want the task to use +- **[Observability & monitoring](https://trigger.dev/product/observability-and-monitoring)** - Monitor every aspect of your tasks' performance with comprehensive logging and visualization tools +- **[Logging & tracing](https://trigger.dev/docs/logging)** - Comprehensive logging and tracing for all your tasks +- **[Tags](https://trigger.dev/docs/tags#tags)** - Attach up to ten tags to each run, allowing you to filter via the dashboard, realtime, and the SDK +- **[Run metadata](https://trigger.dev/docs/runs/metadata#run-metadata)** - Attach metadata to runs which updates as the run progresses and is available to use in your frontend for live updates +- **[Bulk actions](https://trigger.dev/docs/bulk-actions)** - Perform actions on multiple runs simultaneously, including replaying and cancelling +- **[Real-time alerts](https://trigger.dev/docs/troubleshooting-alerts#alerts)** - Choose your preferred notification method for run failures and deployments + +## Write tasks in your codebase Create tasks where they belong: in your codebase. Version control, localhost, test and review like you're already used to. ```ts -import { task } from "@trigger.dev/sdk/v3"; +import { task } from "@trigger.dev/sdk"; //1. You need to export each task export const helloWorld = task({ @@ -58,13 +89,13 @@ Use our SDK to write tasks in your codebase. There's no infrastructure to manage ## Environments -We support `Development`, `Staging`, and `Production` environments, allowing you to test your tasks before deploying them to production. +We support `Development`, `Staging`, `Preview`, and `Production` environments, allowing you to test your tasks before deploying them to production. ## Full visibility of every job run View every task in every run so you can tell exactly what happened. We provide a full trace view of every task run so you can see what happened at every step. -![Trace view image](https://imagedelivery.net/3TbraffuDZ4aEf8KWOmI_w/7c1b347f-004c-4482-38a7-3f6fa9c00d00/public) +![Trace view image](https://content.trigger.dev/trace-view.png) # Getting started @@ -73,14 +104,19 @@ The quickest way to get started is to create an account and project in our [web ### Useful links: - [Quick start](https://trigger.dev/docs/quick-start) - get up and running in minutes -- [How it works](https://trigger.dev/docs/v3/how-it-works) - understand how Trigger.dev works under the hood +- [How it works](https://trigger.dev/docs/how-it-works) - understand how Trigger.dev works under the hood - [Guides and examples](https://trigger.dev/docs/guides/introduction) - walk-through guides and code examples for popular frameworks and use cases ## Self-hosting -If you prefer to self-host Trigger.dev, you can follow our [self-hosting guide](https://trigger.dev/docs/v3/open-source-self-hosting#overview). +If you prefer to self-host Trigger.dev, you can follow our [self-hosting guides](https://trigger.dev/docs/self-hosting/overview): + +- [Docker self-hosting guide](https://trigger.dev/docs/self-hosting/docker) - use Docker Compose to spin up a Trigger.dev instance +- [Kubernetes self-hosting guide](https://trigger.dev/docs/self-hosting/kubernetes) - use our official Helm chart to deploy Trigger.dev to your Kubernetes cluster + +## Support and community -We also have a dedicated self-hosting channel in our [Discord server](https://trigger.dev/discord) for support. +We have a large active community in our official [Discord server](https://trigger.dev/discord) for support, including a dedicated channel for self-hosting. ## Development diff --git a/apps/supervisor/package.json b/apps/supervisor/package.json index 9cce9d5feb..e9609bf154 100644 --- a/apps/supervisor/package.json +++ b/apps/supervisor/package.json @@ -13,6 +13,7 @@ "typecheck": "tsc --noEmit" }, "dependencies": { + "@aws-sdk/client-ecr": "^3.839.0", "@kubernetes/client-node": "^1.0.0", "@trigger.dev/core": "workspace:*", "dockerode": "^4.0.6", diff --git a/apps/supervisor/src/env.ts b/apps/supervisor/src/env.ts index dfe5237912..a225af5ea1 100644 --- a/apps/supervisor/src/env.ts +++ b/apps/supervisor/src/env.ts @@ -76,6 +76,7 @@ const Env = z.object({ KUBERNETES_IMAGE_PULL_SECRETS: z.string().optional(), // csv KUBERNETES_EPHEMERAL_STORAGE_SIZE_LIMIT: z.string().default("10Gi"), KUBERNETES_EPHEMERAL_STORAGE_SIZE_REQUEST: z.string().default("2Gi"), + KUBERNETES_STRIP_IMAGE_DIGEST: BoolEnv.default(false), // Placement tags settings PLACEMENT_TAGS_ENABLED: BoolEnv.default(false), diff --git a/apps/supervisor/src/workloadManager/docker.ts b/apps/supervisor/src/workloadManager/docker.ts index 6aa74a7ecc..4ebbe11ca7 100644 --- a/apps/supervisor/src/workloadManager/docker.ts +++ b/apps/supervisor/src/workloadManager/docker.ts @@ -8,14 +8,16 @@ import { env } from "../env.js"; import { getDockerHostDomain, getRunnerId, normalizeDockerHostUrl } from "../util.js"; import Docker from "dockerode"; import { tryCatch } from "@trigger.dev/core"; +import { ECRAuthService } from "./ecrAuth.js"; export class DockerWorkloadManager implements WorkloadManager { private readonly logger = new SimpleStructuredLogger("docker-workload-manager"); private readonly docker: Docker; private readonly runnerNetworks: string[]; - private readonly auth?: Docker.AuthConfig; + private readonly staticAuth?: Docker.AuthConfig; private readonly platformOverride?: string; + private readonly ecrAuthService?: ECRAuthService; constructor(private opts: WorkloadManagerOptions) { this.docker = new Docker({ @@ -44,13 +46,18 @@ export class DockerWorkloadManager implements WorkloadManager { url: env.DOCKER_REGISTRY_URL, }); - this.auth = { + this.staticAuth = { username: env.DOCKER_REGISTRY_USERNAME, password: env.DOCKER_REGISTRY_PASSWORD, serveraddress: env.DOCKER_REGISTRY_URL, }; + } else if (ECRAuthService.hasAWSCredentials()) { + this.logger.info("πŸ‹ AWS credentials found, initializing ECR auth service"); + this.ecrAuthService = new ECRAuthService(); } else { - this.logger.warn("πŸ‹ No Docker registry credentials provided, skipping auth"); + this.logger.warn( + "πŸ‹ No Docker registry credentials or AWS credentials provided, skipping auth" + ); } } @@ -160,9 +167,12 @@ export class DockerWorkloadManager implements WorkloadManager { imageArchitecture: inspectResult?.Architecture, }); + // Get auth config (static or ECR) + const authConfig = await this.getAuthConfig(); + // Ensure the image is present const [createImageError, imageResponseReader] = await tryCatch( - this.docker.createImage(this.auth, { + this.docker.createImage(authConfig, { fromImage: imageRef, ...(this.platformOverride ? { platform: this.platformOverride } : {}), }) @@ -216,6 +226,26 @@ export class DockerWorkloadManager implements WorkloadManager { logger.debug("create succeeded", { startResult, containerId: container.id }); } + /** + * Get authentication config for Docker operations + * Uses static credentials if available, otherwise attempts ECR auth + */ + private async getAuthConfig(): Promise { + // Use static credentials if available + if (this.staticAuth) { + return this.staticAuth; + } + + // Use ECR auth if service is available + if (this.ecrAuthService) { + const ecrAuth = await this.ecrAuthService.getAuthConfig(); + return ecrAuth || undefined; + } + + // No auth available + return undefined; + } + private async attachContainerToNetworks({ containerId, networkNames, diff --git a/apps/supervisor/src/workloadManager/ecrAuth.ts b/apps/supervisor/src/workloadManager/ecrAuth.ts new file mode 100644 index 0000000000..33e98f6319 --- /dev/null +++ b/apps/supervisor/src/workloadManager/ecrAuth.ts @@ -0,0 +1,144 @@ +import { ECRClient, GetAuthorizationTokenCommand } from "@aws-sdk/client-ecr"; +import { SimpleStructuredLogger } from "@trigger.dev/core/v3/utils/structuredLogger"; +import { tryCatch } from "@trigger.dev/core"; +import Docker from "dockerode"; + +interface ECRTokenCache { + token: string; + username: string; + serverAddress: string; + expiresAt: Date; +} + +export class ECRAuthService { + private readonly logger = new SimpleStructuredLogger("ecr-auth-service"); + private readonly ecrClient: ECRClient; + private tokenCache: ECRTokenCache | null = null; + + constructor() { + this.ecrClient = new ECRClient(); + + this.logger.info("πŸ” ECR Auth Service initialized", { + region: this.ecrClient.config.region, + }); + } + + /** + * Check if we have AWS credentials configured + */ + static hasAWSCredentials(): boolean { + if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) { + return true; + } + + if ( + process.env.AWS_PROFILE || + process.env.AWS_ROLE_ARN || + process.env.AWS_WEB_IDENTITY_TOKEN_FILE + ) { + return true; + } + + return false; + } + + /** + * Check if the current token is still valid with a 10-minute buffer + */ + private isTokenValid(): boolean { + if (!this.tokenCache) { + return false; + } + + const now = new Date(); + const bufferMs = 10 * 60 * 1000; // 10 minute buffer before expiration + return now < new Date(this.tokenCache.expiresAt.getTime() - bufferMs); + } + + /** + * Get a fresh ECR authorization token from AWS + */ + private async fetchNewToken(): Promise { + const [error, response] = await tryCatch( + this.ecrClient.send(new GetAuthorizationTokenCommand({})) + ); + + if (error) { + this.logger.error("Failed to get ECR authorization token", { error }); + return null; + } + + const authData = response.authorizationData?.[0]; + if (!authData?.authorizationToken || !authData.proxyEndpoint) { + this.logger.error("Invalid ECR authorization response", { authData }); + return null; + } + + // Decode the base64 token to get username:password + const decoded = Buffer.from(authData.authorizationToken, "base64").toString("utf-8"); + const [username, password] = decoded.split(":", 2); + + if (!username || !password) { + this.logger.error("Failed to parse ECR authorization token"); + return null; + } + + const expiresAt = authData.expiresAt || new Date(Date.now() + 12 * 60 * 60 * 1000); // Default 12 hours + + const tokenCache: ECRTokenCache = { + token: password, + username, + serverAddress: authData.proxyEndpoint, + expiresAt, + }; + + this.logger.info("πŸ” Successfully fetched ECR token", { + username, + serverAddress: authData.proxyEndpoint, + expiresAt: expiresAt.toISOString(), + }); + + return tokenCache; + } + + /** + * Get ECR auth config for Docker operations + * Returns cached token if valid, otherwise fetches a new one + */ + async getAuthConfig(): Promise { + // Check if cached token is still valid + if (this.isTokenValid()) { + this.logger.debug("Using cached ECR token"); + return { + username: this.tokenCache!.username, + password: this.tokenCache!.token, + serveraddress: this.tokenCache!.serverAddress, + }; + } + + // Fetch new token + this.logger.info("Fetching new ECR authorization token"); + const newToken = await this.fetchNewToken(); + + if (!newToken) { + return null; + } + + // Cache the new token + this.tokenCache = newToken; + + return { + username: newToken.username, + password: newToken.token, + serveraddress: newToken.serverAddress, + }; + } + + /** + * Clear the cached token (useful for testing or forcing refresh) + */ + clearCache(): void { + this.tokenCache = null; + this.logger.debug("ECR token cache cleared"); + } +} diff --git a/apps/supervisor/src/workloadManager/kubernetes.ts b/apps/supervisor/src/workloadManager/kubernetes.ts index e738177cbc..b38e6c5b46 100644 --- a/apps/supervisor/src/workloadManager/kubernetes.ts +++ b/apps/supervisor/src/workloadManager/kubernetes.ts @@ -49,6 +49,20 @@ export class KubernetesWorkloadManager implements WorkloadManager { }; } + private stripImageDigest(imageRef: string): string { + if (!env.KUBERNETES_STRIP_IMAGE_DIGEST) { + return imageRef; + } + + const atIndex = imageRef.lastIndexOf("@"); + + if (atIndex === -1) { + return imageRef; + } + + return imageRef.substring(0, atIndex); + } + async create(opts: WorkloadManagerCreateOptions) { this.logger.log("[KubernetesWorkloadManager] Creating container", { opts }); @@ -74,7 +88,7 @@ export class KubernetesWorkloadManager implements WorkloadManager { containers: [ { name: "run-controller", - image: opts.image, + image: this.stripImageDigest(opts.image), ports: [ { containerPort: 8000, diff --git a/apps/webapp/app/components/Shortcuts.tsx b/apps/webapp/app/components/Shortcuts.tsx index 8349ed970f..718166b55f 100644 --- a/apps/webapp/app/components/Shortcuts.tsx +++ b/apps/webapp/app/components/Shortcuts.tsx @@ -123,7 +123,7 @@ function ShortcutContent() { - + diff --git a/apps/webapp/app/db.server.ts b/apps/webapp/app/db.server.ts index c99b2e2c43..8435182e63 100644 --- a/apps/webapp/app/db.server.ts +++ b/apps/webapp/app/db.server.ts @@ -201,6 +201,7 @@ function getClient() { message: log.message, target: log.target, }, + ignoreError: true, }); }); } diff --git a/apps/webapp/app/env.server.ts b/apps/webapp/app/env.server.ts index 1a49acddbc..fdd343c90b 100644 --- a/apps/webapp/app/env.server.ts +++ b/apps/webapp/app/env.server.ts @@ -410,6 +410,19 @@ const EnvironmentSchema = z.object({ MARQS_QUEUE_AGE_RANDOMIZATION_BIAS: z.coerce.number().default(0.25), MARQS_REUSE_SNAPSHOT_COUNT: z.coerce.number().int().default(0), MARQS_MAXIMUM_ENV_COUNT: z.coerce.number().int().optional(), + MARQS_SHARED_WORKER_QUEUE_CONSUMER_INTERVAL_MS: z.coerce.number().int().default(250), + MARQS_SHARED_WORKER_QUEUE_MAX_MESSAGE_COUNT: z.coerce.number().int().default(10), + + MARQS_SHARED_WORKER_QUEUE_EAGER_DEQUEUE_ENABLED: z.string().default("0"), + MARQS_WORKER_ENABLED: z.string().default("0"), + MARQS_WORKER_COUNT: z.coerce.number().int().default(2), + MARQS_WORKER_CONCURRENCY_LIMIT: z.coerce.number().int().default(50), + MARQS_WORKER_CONCURRENCY_TASKS_PER_WORKER: z.coerce.number().int().default(5), + MARQS_WORKER_POLL_INTERVAL_MS: z.coerce.number().int().default(100), + MARQS_WORKER_IMMEDIATE_POLL_INTERVAL_MS: z.coerce.number().int().default(100), + MARQS_WORKER_SHUTDOWN_TIMEOUT_MS: z.coerce.number().int().default(60_000), + MARQS_SHARED_WORKER_QUEUE_COOLOFF_COUNT_THRESHOLD: z.coerce.number().int().default(10), + MARQS_SHARED_WORKER_QUEUE_COOLOFF_PERIOD_MS: z.coerce.number().int().default(5_000), PROD_TASK_HEARTBEAT_INTERVAL_MS: z.coerce.number().int().optional(), @@ -436,6 +449,7 @@ const EnvironmentSchema = z.object({ EVENT_LOOP_MONITOR_ENABLED: z.string().default("1"), MAXIMUM_LIVE_RELOADING_EVENTS: z.coerce.number().int().default(1000), MAXIMUM_TRACE_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(25_000), + MAXIMUM_TRACE_DETAILED_SUMMARY_VIEW_COUNT: z.coerce.number().int().default(10_000), TASK_PAYLOAD_OFFLOAD_THRESHOLD: z.coerce.number().int().default(524_288), // 512KB TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(3_145_728), // 3MB BATCH_TASK_PAYLOAD_MAXIMUM_SIZE: z.coerce.number().int().default(1_000_000), // 1MB @@ -1059,6 +1073,8 @@ const EnvironmentSchema = z.object({ // AI Run Filter AI_RUN_FILTER_MODEL: z.string().optional(), + + EVENT_LOOP_MONITOR_THRESHOLD_MS: z.coerce.number().int().default(100), }); export type Environment = z.infer; diff --git a/apps/webapp/app/eventLoopMonitor.server.ts b/apps/webapp/app/eventLoopMonitor.server.ts index db25a28137..42e982bdb9 100644 --- a/apps/webapp/app/eventLoopMonitor.server.ts +++ b/apps/webapp/app/eventLoopMonitor.server.ts @@ -1,10 +1,12 @@ import { createHook } from "node:async_hooks"; import { singleton } from "./utils/singleton"; import { tracer } from "./v3/tracer.server"; +import { env } from "./env.server"; +import { context, Context } from "@opentelemetry/api"; -const THRESHOLD_NS = 1e8; // 100ms +const THRESHOLD_NS = env.EVENT_LOOP_MONITOR_THRESHOLD_MS * 1e6; -const cache = new Map(); +const cache = new Map(); function init(asyncId: number, type: string, triggerAsyncId: number, resource: any) { cache.set(asyncId, { @@ -26,6 +28,7 @@ function before(asyncId: number) { cache.set(asyncId, { ...cached, start: process.hrtime(), + parentCtx: context.active(), }); } @@ -47,13 +50,17 @@ function after(asyncId: number) { if (diffNs > THRESHOLD_NS) { const time = diffNs / 1e6; // in ms - const newSpan = tracer.startSpan("event-loop-blocked", { - startTime: new Date(new Date().getTime() - time), - attributes: { - asyncType: cached.type, - label: "EventLoopMonitor", + const newSpan = tracer.startSpan( + "event-loop-blocked", + { + startTime: new Date(new Date().getTime() - time), + attributes: { + asyncType: cached.type, + label: "EventLoopMonitor", + }, }, - }); + cached.parentCtx + ); newSpan.end(); } diff --git a/apps/webapp/app/models/runtimeEnvironment.server.ts b/apps/webapp/app/models/runtimeEnvironment.server.ts index adde2db5ca..67119acd08 100644 --- a/apps/webapp/app/models/runtimeEnvironment.server.ts +++ b/apps/webapp/app/models/runtimeEnvironment.server.ts @@ -37,7 +37,7 @@ export async function findEnvironmentByApiKey( if (environment.type === "PREVIEW") { if (!branchName) { - logger.error("findEnvironmentByApiKey(): Preview env with no branch name provided", { + logger.warn("findEnvironmentByApiKey(): Preview env with no branch name provided", { environmentId: environment.id, }); return null; diff --git a/apps/webapp/app/root.tsx b/apps/webapp/app/root.tsx index f46d3a65ff..d481a69ab4 100644 --- a/apps/webapp/app/root.tsx +++ b/apps/webapp/app/root.tsx @@ -23,7 +23,7 @@ export const links: LinksFunction = () => { export const meta: MetaFunction = ({ data }) => { const typedData = data as UseDataFunctionReturn; return [ - { title: `Trigger.dev${appEnvTitleTag(typedData.appEnv)}` }, + { title: typedData?.appEnv ? `Trigger.dev${appEnvTitleTag(typedData.appEnv)}` : "Trigger.dev" }, { name: "viewport", content: "width=1024, initial-scale=1", @@ -84,11 +84,13 @@ export function ErrorBoundary() { - - - - - + + + + + + + diff --git a/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx b/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx index f2c9479361..df75c25ff7 100644 --- a/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx +++ b/apps/webapp/app/routes/account.authorization-code.$authorizationCode/route.tsx @@ -1,11 +1,8 @@ import { CheckCircleIcon } from "@heroicons/react/24/solid"; import { LoaderFunctionArgs } from "@remix-run/server-runtime"; -import { title } from "process"; import { typedjson, useTypedLoaderData } from "remix-typedjson"; import { z } from "zod"; -import { ErrorIcon } from "~/assets/icons/ErrorIcon"; import { AppContainer, MainCenteredContainer } from "~/components/layout/AppLayout"; -import { LinkButton } from "~/components/primitives/Buttons"; import { Callout } from "~/components/primitives/Callout"; import { Header1 } from "~/components/primitives/Headers"; import { Icon } from "~/components/primitives/Icon"; @@ -13,12 +10,16 @@ import { Paragraph } from "~/components/primitives/Paragraph"; import { logger } from "~/services/logger.server"; import { createPersonalAccessTokenFromAuthorizationCode } from "~/services/personalAccessToken.server"; import { requireUserId } from "~/services/session.server"; -import { rootPath } from "~/utils/pathBuilder"; const ParamsSchema = z.object({ authorizationCode: z.string(), }); +const SearchParamsSchema = z.object({ + source: z.string().optional(), + clientName: z.string().optional(), +}); + export const loader = async ({ request, params }: LoaderFunctionArgs) => { const userId = await requireUserId(request); @@ -32,6 +33,14 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { }); } + const url = new URL(request.url); + const searchObject = Object.fromEntries(url.searchParams.entries()); + + const searchParams = SearchParamsSchema.safeParse(searchObject); + + const source = (searchParams.success ? searchParams.data.source : undefined) ?? "cli"; + const clientName = (searchParams.success ? searchParams.data.clientName : undefined) ?? "unknown"; + try { const personalAccessToken = await createPersonalAccessTokenFromAuthorizationCode( parsedParams.data.authorizationCode, @@ -39,6 +48,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { ); return typedjson({ success: true as const, + source, + clientName, }); } catch (error) { if (error instanceof Response) { @@ -49,6 +60,8 @@ export const loader = async ({ request, params }: LoaderFunctionArgs) => { return typedjson({ success: false as const, error: error.message, + source, + clientName, }); } @@ -73,7 +86,7 @@ export default function Page() { Successfully authenticated - Return to your terminal to continue. + {getInstructionsForSource(result.source, result.clientName)} ) : (
@@ -91,3 +104,21 @@ export default function Page() { ); } + +const prettyClientNames: Record = { + "claude-code": "Claude Code", + "cursor-vscode": "Cursor", + "Visual Studio Code": "VSCode", + "windsurf-client": "Windsurf", + "claude-ai": "Claude Desktop", +}; + +function getInstructionsForSource(source: string, clientName: string) { + if (source === "mcp") { + if (clientName) { + return `Return to your ${prettyClientNames[clientName] ?? clientName} to continue.`; + } + } + + return `Return to your terminal to continue.`; +} diff --git a/apps/webapp/app/routes/api.v1.deployments.ts b/apps/webapp/app/routes/api.v1.deployments.ts index 65410761b9..c80e180d89 100644 --- a/apps/webapp/app/routes/api.v1.deployments.ts +++ b/apps/webapp/app/routes/api.v1.deployments.ts @@ -1,10 +1,13 @@ import { ActionFunctionArgs, json } from "@remix-run/server-runtime"; import { + ApiDeploymentListSearchParams, InitializeDeploymentRequestBody, InitializeDeploymentResponseBody, } from "@trigger.dev/core/v3"; +import { $replica } from "~/db.server"; import { authenticateApiRequest } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; import { ServiceValidationError } from "~/v3/services/baseService.server"; import { InitializeDeploymentService } from "~/v3/services/initializeDeployment.server"; @@ -60,3 +63,119 @@ export async function action({ request, params }: ActionFunctionArgs) { } } } + +export const loader = createLoaderApiRoute( + { + searchParams: ApiDeploymentListSearchParams, + allowJWT: true, + corsStrategy: "none", + authorization: { + action: "read", + resource: () => ({ deployments: "list" }), + superScopes: ["read:deployments", "read:all", "admin"], + }, + findResource: async () => 1, // This is a dummy function, we don't need to find a resource + }, + async ({ searchParams, authentication }) => { + const limit = Math.max(Math.min(searchParams["page[size]"] ?? 20, 100), 5); + + const afterDeployment = searchParams["page[after]"] + ? await $replica.workerDeployment.findFirst({ + where: { + friendlyId: searchParams["page[after]"], + environmentId: authentication.environment.id, + }, + }) + : undefined; + + const deployments = await $replica.workerDeployment.findMany({ + where: { + environmentId: authentication.environment.id, + ...(afterDeployment ? { id: { lt: afterDeployment.id } } : {}), + ...getCreatedAtFilter(searchParams), + ...(searchParams.status ? { status: searchParams.status } : {}), + }, + orderBy: { + id: "desc", + }, + take: limit + 1, + }); + + const hasMore = deployments.length > limit; + const nextCursor = hasMore ? deployments[limit - 1].friendlyId : undefined; + const data = hasMore ? deployments.slice(0, limit) : deployments; + + return json({ + data: data.map((deployment) => ({ + id: deployment.friendlyId, + createdAt: deployment.createdAt, + shortCode: deployment.shortCode, + version: deployment.version.toString(), + runtime: deployment.runtime, + runtimeVersion: deployment.runtimeVersion, + status: deployment.status, + deployedAt: deployment.deployedAt, + git: deployment.git, + error: deployment.errorData ?? undefined, + })), + pagination: { + next: nextCursor, + }, + }); + } +); + +import parseDuration from "parse-duration"; +import { parseDate } from "@trigger.dev/core/v3/isomorphic"; + +function getCreatedAtFilter(searchParams: ApiDeploymentListSearchParams) { + if (searchParams.period) { + const duration = parseDuration(searchParams.period, "ms"); + + if (!duration) { + throw new ServiceValidationError( + `Invalid search query parameter: period=${searchParams.period}`, + 400 + ); + } + + return { + createdAt: { + gte: new Date(Date.now() - duration), + lte: new Date(), + }, + }; + } + + if (searchParams.from && searchParams.to) { + const fromDate = safeDateFromString(searchParams.from, "from"); + const toDate = safeDateFromString(searchParams.to, "to"); + + return { + createdAt: { + gte: fromDate, + lte: toDate, + }, + }; + } + + if (searchParams.from) { + const fromDate = safeDateFromString(searchParams.from, "from"); + return { + createdAt: { + gte: fromDate, + }, + }; + } + + return {}; +} + +function safeDateFromString(value: string, paramName: string) { + const date = parseDate(value); + + if (!date) { + throw new ServiceValidationError(`Invalid search query parameter: ${paramName}=${value}`, 400); + } + return date; +} diff --git a/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts b/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts new file mode 100644 index 0000000000..9a23d12909 --- /dev/null +++ b/apps/webapp/app/routes/api.v1.orgs.$orgParam.projects.ts @@ -0,0 +1,138 @@ +import type { ActionFunctionArgs, LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json } from "@remix-run/server-runtime"; +import { + CreateProjectRequestBody, + GetProjectResponseBody, + GetProjectsResponseBody, +} from "@trigger.dev/core/v3"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { createProject } from "~/models/project.server"; +import { logger } from "~/services/logger.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { isCuid } from "cuid"; + +const ParamsSchema = z.object({ + orgParam: z.string(), +}); + +export async function loader({ request, params }: LoaderFunctionArgs) { + logger.info("get projects", { url: request.url }); + + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const { orgParam } = ParamsSchema.parse(params); + + const projects = await prisma.project.findMany({ + where: { + organization: { + ...orgParamWhereClause(orgParam), + deletedAt: null, + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + version: "V3", + deletedAt: null, + }, + include: { + organization: true, + }, + }); + + if (!projects) { + return json({ error: "Projects not found" }, { status: 404 }); + } + + const result: GetProjectsResponseBody = projects.map((project) => ({ + id: project.id, + externalRef: project.externalRef, + name: project.name, + slug: project.slug, + createdAt: project.createdAt, + organization: { + id: project.organization.id, + title: project.organization.title, + slug: project.organization.slug, + createdAt: project.organization.createdAt, + }, + })); + + return json(result); +} + +export async function action({ request, params }: ActionFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const { orgParam } = ParamsSchema.parse(params); + + const organization = await prisma.organization.findFirst({ + where: { + ...orgParamWhereClause(orgParam), + deletedAt: null, + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }); + + if (!organization) { + return json({ error: "Organization not found" }, { status: 404 }); + } + + const body = await request.json(); + const parsedBody = CreateProjectRequestBody.safeParse(body); + + if (!parsedBody.success) { + return json({ error: "Invalid request body" }, { status: 400 }); + } + + const project = await createProject({ + organizationSlug: organization.slug, + name: parsedBody.data.name, + userId: authenticationResult.userId, + version: "v3", + }); + + const result: GetProjectResponseBody = { + id: project.id, + externalRef: project.externalRef, + name: project.name, + slug: project.slug, + createdAt: project.createdAt, + organization: { + id: project.organization.id, + title: project.organization.title, + slug: project.organization.slug, + createdAt: project.organization.createdAt, + }, + }; + + return json(result); +} + +function orgParamWhereClause(orgParam: string) { + // If the orgParam is an ID, or if it's a slug + // IDs are cuid + if (isCuid(orgParam)) { + return { + id: orgParam, + }; + } + + return { + slug: orgParam, + }; +} diff --git a/apps/webapp/app/routes/api.v1.orgs.ts b/apps/webapp/app/routes/api.v1.orgs.ts new file mode 100644 index 0000000000..626162f234 --- /dev/null +++ b/apps/webapp/app/routes/api.v1.orgs.ts @@ -0,0 +1,37 @@ +import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { json } from "@remix-run/server-runtime"; +import { GetOrgsResponseBody } from "@trigger.dev/core/v3"; +import { prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; + +export async function loader({ request }: LoaderFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const orgs = await prisma.organization.findMany({ + where: { + deletedAt: null, + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }); + + if (!orgs) { + return json({ error: "Orgs not found" }, { status: 404 }); + } + + const result: GetOrgsResponseBody = orgs.map((org) => ({ + id: org.id, + title: org.title, + slug: org.slug, + createdAt: org.createdAt, + })); + + return json(result); +} diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts new file mode 100644 index 0000000000..2db054d4d4 --- /dev/null +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.jwt.ts @@ -0,0 +1,109 @@ +import { ActionFunctionArgs, json } from "@remix-run/node"; +import { generateJWT as internal_generateJWT } from "@trigger.dev/core/v3"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env"; + +const ParamsSchema = z.object({ + projectRef: z.string(), + env: z.enum(["dev", "staging", "prod", "preview"]), +}); + +const RequestBodySchema = z.object({ + claims: z + .object({ + scopes: z.array(z.string()).default([]), + }) + .optional(), + expirationTime: z.union([z.number(), z.string()]).optional(), +}); + +export async function action({ request, params }: ActionFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const parsedParams = ParamsSchema.safeParse(params); + + if (!parsedParams.success) { + return json({ error: "Invalid Params" }, { status: 400 }); + } + + const { projectRef, env } = parsedParams.data; + + const project = await prisma.project.findFirst({ + where: { + externalRef: projectRef, + organization: { + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }, + }); + + if (!project) { + return json({ error: "Project not found" }, { status: 404 }); + } + + const envResult = await getEnvironmentFromEnv({ + projectId: project.id, + userId: authenticationResult.userId, + env, + }); + + if (!envResult.success) { + return json({ error: envResult.error }, { status: 404 }); + } + + const runtimeEnv = envResult.environment; + + const parsedBody = RequestBodySchema.safeParse(await request.json()); + + if (!parsedBody.success) { + return json( + { error: "Invalid request body", issues: parsedBody.error.issues }, + { status: 400 } + ); + } + + const triggerBranch = request.headers.get("x-trigger-branch") ?? undefined; + + let previewBranchEnvironmentId: string | undefined; + + if (triggerBranch) { + const previewBranch = await prisma.runtimeEnvironment.findFirst({ + where: { + projectId: project.id, + branchName: triggerBranch, + parentEnvironmentId: runtimeEnv.id, + archivedAt: null, + }, + }); + + if (previewBranch) { + previewBranchEnvironmentId = previewBranch.id; + } else { + return json({ error: `Preview branch ${triggerBranch} not found` }, { status: 404 }); + } + } + + const claims = { + sub: previewBranchEnvironmentId ?? runtimeEnv.id, + pub: true, + ...parsedBody.data.claims, + }; + + const jwt = await internal_generateJWT({ + secretKey: runtimeEnv.apiKey, + payload: claims, + expirationTime: parsedBody.data.expirationTime ?? "1h", + }); + + return json({ token: jwt }); +} diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts index cb5adeaf0d..c45a3c55ed 100644 --- a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.ts @@ -70,14 +70,16 @@ export async function loader({ request, params }: LoaderFunctionArgs) { return json(result); } -async function getEnvironmentFromEnv({ +export async function getEnvironmentFromEnv({ projectId, userId, env, + branch, }: { projectId: string; userId: string; env: ParamsSchema["env"]; + branch?: string; }): Promise< | { success: true; @@ -126,6 +128,49 @@ async function getEnvironmentFromEnv({ break; } + if (slug === "preview") { + const previewEnvironment = await prisma.runtimeEnvironment.findFirst({ + where: { + projectId, + slug: "preview", + }, + }); + + if (!previewEnvironment) { + return { + success: false, + error: "Preview environment not found", + }; + } + + // If no branch is provided, just return the parent preview environment + if (!branch) { + return { + success: true, + environment: previewEnvironment, + }; + } + + const branchEnvironment = await prisma.runtimeEnvironment.findFirst({ + where: { + parentEnvironmentId: previewEnvironment.id, + branchName: branch, + }, + }); + + if (!branchEnvironment) { + return { + success: false, + error: `Preview branch ${branch} not found`, + }; + } + + return { + success: true, + environment: branchEnvironment, + }; + } + const environment = await prisma.runtimeEnvironment.findFirst({ where: { projectId, diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts new file mode 100644 index 0000000000..b26923716d --- /dev/null +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.$env.workers.$tagName.ts @@ -0,0 +1,141 @@ +import { json, type LoaderFunctionArgs } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { $replica, prisma } from "~/db.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { findCurrentWorkerFromEnvironment } from "~/v3/models/workerDeployment.server"; +import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env"; +import { GetWorkerByTagResponse } from "@trigger.dev/core/v3/schemas"; +import { env as $env } from "~/env.server"; +import { v3RunsPath } from "~/utils/pathBuilder"; + +const ParamsSchema = z.object({ + projectRef: z.string(), + tagName: z.string(), + env: z.enum(["dev", "staging", "prod", "preview"]), +}); + +const HeadersSchema = z.object({ + "x-trigger-branch": z.string().optional(), +}); + +type ParamsSchema = z.infer; + +export async function loader({ request, params }: LoaderFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const parsedParams = ParamsSchema.safeParse(params); + + if (!parsedParams.success) { + return json({ error: "Invalid Params" }, { status: 400 }); + } + + const parsedHeaders = HeadersSchema.safeParse(Object.fromEntries(request.headers)); + + const branch = parsedHeaders.success ? parsedHeaders.data["x-trigger-branch"] : undefined; + + const { projectRef, env } = parsedParams.data; + + const project = await prisma.project.findFirst({ + where: { + externalRef: projectRef, + organization: { + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }, + select: { + id: true, + slug: true, + organization: { + select: { + slug: true, + }, + }, + }, + }); + + if (!project) { + return json({ error: "Project not found" }, { status: 404 }); + } + + const envResult = await getEnvironmentFromEnv({ + projectId: project.id, + userId: authenticationResult.userId, + env, + branch, + }); + + if (!envResult.success) { + return json({ error: envResult.error }, { status: 404 }); + } + + const runtimeEnv = envResult.environment; + + const currentWorker = await findCurrentWorkerFromEnvironment( + { + id: runtimeEnv.id, + type: runtimeEnv.type, + }, + $replica, + params.tagName + ); + + if (!currentWorker) { + return json({ error: "Worker not found" }, { status: 404 }); + } + + const tasks = await $replica.backgroundWorkerTask.findMany({ + where: { + workerId: currentWorker.id, + }, + select: { + friendlyId: true, + slug: true, + filePath: true, + triggerSource: true, + createdAt: true, + payloadSchema: true, + }, + orderBy: { + slug: "asc", + }, + }); + + const urls = { + runs: `${$env.APP_ORIGIN}${v3RunsPath( + { slug: project.organization.slug }, + { slug: project.slug }, + { slug: runtimeEnv.slug }, + { versions: [currentWorker.version] } + )}`, + }; + + // Prepare the response object + const response: GetWorkerByTagResponse = { + worker: { + id: currentWorker.friendlyId, + version: currentWorker.version, + engine: currentWorker.engine, + sdkVersion: currentWorker.sdkVersion, + cliVersion: currentWorker.cliVersion, + tasks: tasks.map((task) => ({ + id: task.friendlyId, + slug: task.slug, + filePath: task.filePath, + triggerSource: task.triggerSource, + createdAt: task.createdAt, + payloadSchema: task.payloadSchema, + })), + }, + urls, + }; + + return json(response); +} diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts index 21654580bf..6ae6a133e9 100644 --- a/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.branches.ts @@ -1,4 +1,4 @@ -import { json, type ActionFunctionArgs } from "@remix-run/server-runtime"; +import { json, LoaderFunctionArgs, type ActionFunctionArgs } from "@remix-run/server-runtime"; import { tryCatch, UpsertBranchRequestBody } from "@trigger.dev/core/v3"; import { z } from "zod"; import { prisma } from "~/db.server"; @@ -93,3 +93,82 @@ export async function action({ request, params }: ActionFunctionArgs) { return json(result.branch); } + +export async function loader({ request, params }: LoaderFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const parsedParams = ParamsSchema.safeParse(params); + + if (!parsedParams.success) { + return json({ error: "Invalid Params" }, { status: 400 }); + } + + const { projectRef } = parsedParams.data; + + const project = await prisma.project.findFirst({ + select: { + id: true, + }, + where: { + externalRef: projectRef, + organization: { + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }, + }); + + if (!project) { + return json({ error: "Project not found" }, { status: 404 }); + } + + const previewEnvironment = await prisma.runtimeEnvironment.findFirst({ + select: { + id: true, + }, + where: { + projectId: project.id, + slug: "preview", + }, + }); + + if (!previewEnvironment) { + return json( + { error: "You don't have preview branches setup. Go to the dashboard to enable them." }, + { status: 400 } + ); + } + + const branches = await prisma.runtimeEnvironment.findMany({ + where: { + projectId: project.id, + parentEnvironmentId: previewEnvironment.id, + archivedAt: null, + }, + select: { + id: true, + branchName: true, + createdAt: true, + updatedAt: true, + git: true, + paused: true, + }, + }); + + return json({ + branches: branches.map((branch) => ({ + id: branch.id, + name: branch.branchName ?? "main", + createdAt: branch.createdAt, + updatedAt: branch.updatedAt, + git: branch.git ?? undefined, + isPaused: branch.paused, + })), + }); +} diff --git a/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts b/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts new file mode 100644 index 0000000000..58171cc5bb --- /dev/null +++ b/apps/webapp/app/routes/api.v1.projects.$projectRef.dev-status.ts @@ -0,0 +1,59 @@ +import { json, type LoaderFunctionArgs } from "@remix-run/node"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { devPresence } from "~/presenters/v3/DevPresence.server"; +import { authenticateApiRequestWithPersonalAccessToken } from "~/services/personalAccessToken.server"; +import { getEnvironmentFromEnv } from "./api.v1.projects.$projectRef.$env"; + +const ParamsSchema = z.object({ + projectRef: z.string(), +}); + +export async function loader({ request, params }: LoaderFunctionArgs) { + const authenticationResult = await authenticateApiRequestWithPersonalAccessToken(request); + + if (!authenticationResult) { + return json({ error: "Invalid or Missing Access Token" }, { status: 401 }); + } + + const parsedParams = ParamsSchema.safeParse(params); + + if (!parsedParams.success) { + return json({ error: "Invalid Params" }, { status: 400 }); + } + + const { projectRef } = parsedParams.data; + + const project = await prisma.project.findFirst({ + where: { + externalRef: projectRef, + organization: { + members: { + some: { + userId: authenticationResult.userId, + }, + }, + }, + }, + }); + + if (!project) { + return json({ error: "Project not found" }, { status: 404 }); + } + + const envResult = await getEnvironmentFromEnv({ + projectId: project.id, + userId: authenticationResult.userId, + env: "dev", + }); + + if (!envResult.success) { + return json({ error: envResult.error }, { status: 404 }); + } + + const runtimeEnv = envResult.environment; + + const isConnected = await devPresence.isConnected(runtimeEnv.id); + + return json({ isConnected }); +} diff --git a/apps/webapp/app/routes/api.v1.projects.ts b/apps/webapp/app/routes/api.v1.projects.ts index 3962560f5c..3a12417dce 100644 --- a/apps/webapp/app/routes/api.v1.projects.ts +++ b/apps/webapp/app/routes/api.v1.projects.ts @@ -1,4 +1,4 @@ -import type { LoaderFunctionArgs } from "@remix-run/server-runtime"; +import type { ActionFunctionArgs, LoaderFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { GetProjectsResponseBody } from "@trigger.dev/core/v3"; import { prisma } from "~/db.server"; diff --git a/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts b/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts new file mode 100644 index 0000000000..8ab42d8c3c --- /dev/null +++ b/apps/webapp/app/routes/api.v1.runs.$runId.trace.ts @@ -0,0 +1,57 @@ +import { json } from "@remix-run/server-runtime"; +import { BatchId } from "@trigger.dev/core/v3/isomorphic"; +import { z } from "zod"; +import { $replica } from "~/db.server"; +import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; +import { eventRepository } from "~/v3/eventRepository.server"; +import { getTaskEventStoreTableForRun } from "~/v3/taskEventStore.server"; + +const ParamsSchema = z.object({ + runId: z.string(), // This is the run friendly ID +}); + +export const loader = createLoaderApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "all", + findResource: (params, auth) => { + return $replica.taskRun.findFirst({ + where: { + friendlyId: params.runId, + runtimeEnvironmentId: auth.environment.id, + }, + }); + }, + shouldRetryNotFound: true, + authorization: { + action: "read", + resource: (run) => ({ + runs: run.friendlyId, + tags: run.runTags, + batch: run.batchId ? BatchId.toFriendlyId(run.batchId) : undefined, + tasks: run.taskIdentifier, + }), + superScopes: ["read:runs", "read:all", "admin"], + }, + }, + async ({ resource: run }) => { + const traceSummary = await eventRepository.getTraceDetailedSummary( + getTaskEventStoreTableForRun(run), + run.traceId, + run.createdAt, + run.completedAt ?? undefined + ); + + if (!traceSummary) { + return json({ error: "Trace not found" }, { status: 404 }); + } + + return json( + { + trace: traceSummary, + }, + { status: 200 } + ); + } +); diff --git a/apps/webapp/app/routes/api.v1.runs.ts b/apps/webapp/app/routes/api.v1.runs.ts index 17a664f6ef..b5191ee259 100644 --- a/apps/webapp/app/routes/api.v1.runs.ts +++ b/apps/webapp/app/routes/api.v1.runs.ts @@ -3,6 +3,7 @@ import { ApiRunListPresenter, ApiRunListSearchParams, } from "~/presenters/v3/ApiRunListPresenter.server"; +import { logger } from "~/services/logger.server"; import { createLoaderApiRoute } from "~/services/routeBuilders/apiBuilder.server"; export const loader = createLoaderApiRoute( diff --git a/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts b/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts index 1a32a8ce37..a05af273d8 100644 --- a/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts +++ b/apps/webapp/app/routes/api.v2.runs.$runParam.cancel.ts @@ -1,53 +1,47 @@ -import type { ActionFunctionArgs } from "@remix-run/server-runtime"; import { json } from "@remix-run/server-runtime"; import { z } from "zod"; -import { prisma } from "~/db.server"; -import { authenticateApiRequest } from "~/services/apiAuth.server"; +import { $replica } from "~/db.server"; +import { createActionApiRoute } from "~/services/routeBuilders/apiBuilder.server"; import { CancelTaskRunService } from "~/v3/services/cancelTaskRun.server"; const ParamsSchema = z.object({ runParam: z.string(), }); -export async function action({ request, params }: ActionFunctionArgs) { - // Ensure this is a POST request - if (request.method.toUpperCase() !== "POST") { - return { status: 405, body: "Method Not Allowed" }; - } - - // Authenticate the request - const authenticationResult = await authenticateApiRequest(request); - - if (!authenticationResult) { - return json({ error: "Invalid or Missing API Key" }, { status: 401 }); - } - - const parsed = ParamsSchema.safeParse(params); - - if (!parsed.success) { - return json({ error: "Invalid or Missing run id" }, { status: 400 }); - } - - const { runParam } = parsed.data; - - const taskRun = await prisma.taskRun.findUnique({ - where: { - friendlyId: runParam, - runtimeEnvironmentId: authenticationResult.environment.id, +const { action } = createActionApiRoute( + { + params: ParamsSchema, + allowJWT: true, + corsStrategy: "none", + authorization: { + action: "write", + resource: (params) => ({ runs: params.runParam }), + superScopes: ["write:runs", "admin"], }, - }); + findResource: async (params, auth) => { + return $replica.taskRun.findFirst({ + where: { + friendlyId: params.runParam, + runtimeEnvironmentId: auth.environment.id, + }, + }); + }, + }, + async ({ resource }) => { + if (!resource) { + return json({ error: "Run not found" }, { status: 404 }); + } - if (!taskRun) { - return json({ error: "Run not found" }, { status: 404 }); - } + const service = new CancelTaskRunService(); - const service = new CancelTaskRunService(); + try { + await service.call(resource); + } catch (error) { + return json({ error: "Internal Server Error" }, { status: 500 }); + } - try { - await service.call(taskRun); - } catch (error) { - return json({ error: "Internal Server Error" }, { status: 500 }); + return json({ id: resource.friendlyId }, { status: 200 }); } +); - return json({ id: runParam }, { status: 200 }); -} +export { action }; diff --git a/apps/webapp/app/routes/engine.v1.worker-actions.runs.$runFriendlyId.snapshots.$snapshotFriendlyId.continue.ts b/apps/webapp/app/routes/engine.v1.worker-actions.runs.$runFriendlyId.snapshots.$snapshotFriendlyId.continue.ts index b35f26a10e..5a436b6575 100644 --- a/apps/webapp/app/routes/engine.v1.worker-actions.runs.$runFriendlyId.snapshots.$snapshotFriendlyId.continue.ts +++ b/apps/webapp/app/routes/engine.v1.worker-actions.runs.$runFriendlyId.snapshots.$snapshotFriendlyId.continue.ts @@ -27,8 +27,12 @@ export const loader = createLoaderWorkerApiRoute( return json(continuationResult); } catch (error) { - logger.error("Failed to suspend run", { runFriendlyId, snapshotFriendlyId, error }); - throw error; + logger.warn("Failed to suspend run", { runFriendlyId, snapshotFriendlyId, error }); + if (error instanceof Error) { + throw json({ error: error.message }, { status: 422 }); + } + + throw json({ error: "Failed to continue run execution" }, { status: 422 }); } } ); diff --git a/apps/webapp/app/routes/projects.$projectRef.ts b/apps/webapp/app/routes/projects.$projectRef.ts new file mode 100644 index 0000000000..856a93c4ac --- /dev/null +++ b/apps/webapp/app/routes/projects.$projectRef.ts @@ -0,0 +1,37 @@ +import { type LoaderFunctionArgs, redirect } from "@remix-run/server-runtime"; +import { z } from "zod"; +import { prisma } from "~/db.server"; +import { requireUserId } from "~/services/session.server"; + +const ParamsSchema = z.object({ + projectRef: z.string(), +}); + +export async function loader({ params, request }: LoaderFunctionArgs) { + const userId = await requireUserId(request); + + const validatedParams = ParamsSchema.parse(params); + + const project = await prisma.project.findFirst({ + where: { + externalRef: validatedParams.projectRef, + organization: { + members: { + some: { + userId, + }, + }, + }, + }, + include: { + organization: true, + }, + }); + + if (!project) { + return new Response("Not found", { status: 404 }); + } + + // Redirect to the project's runs page + return redirect(`/orgs/${project.organization.slug}/projects/${project.slug}`); +} diff --git a/apps/webapp/app/routes/projects.v3.$projectRef.ts b/apps/webapp/app/routes/projects.v3.$projectRef.ts index 856a93c4ac..48b007a627 100644 --- a/apps/webapp/app/routes/projects.v3.$projectRef.ts +++ b/apps/webapp/app/routes/projects.v3.$projectRef.ts @@ -1,37 +1,12 @@ import { type LoaderFunctionArgs, redirect } from "@remix-run/server-runtime"; import { z } from "zod"; -import { prisma } from "~/db.server"; -import { requireUserId } from "~/services/session.server"; const ParamsSchema = z.object({ projectRef: z.string(), }); export async function loader({ params, request }: LoaderFunctionArgs) { - const userId = await requireUserId(request); - const validatedParams = ParamsSchema.parse(params); - const project = await prisma.project.findFirst({ - where: { - externalRef: validatedParams.projectRef, - organization: { - members: { - some: { - userId, - }, - }, - }, - }, - include: { - organization: true, - }, - }); - - if (!project) { - return new Response("Not found", { status: 404 }); - } - - // Redirect to the project's runs page - return redirect(`/orgs/${project.organization.slug}/projects/${project.slug}`); + return redirect(`/projects/${validatedParams.projectRef}`); } diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam.spans.$spanParam/route.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam.spans.$spanParam/route.tsx index 904f5c508b..6a4e29476e 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam.spans.$spanParam/route.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.$runParam.spans.$spanParam/route.tsx @@ -381,7 +381,7 @@ function RunBody({ onClick={() => { replace({ tab: "context" }); }} - shortcut={{ key: "c" }} + shortcut={{ key: "x" }} > Context diff --git a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction.tsx b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction.tsx index 04809bafa4..5877f2972a 100644 --- a/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction.tsx +++ b/apps/webapp/app/routes/resources.orgs.$organizationSlug.projects.$projectParam.env.$envParam.runs.bulkaction.tsx @@ -320,7 +320,7 @@ export function CreateBulkActionInspector({ label={simplur`${selectedItems.size} individually selected run[|s]`} value={"selected"} variant="button/small" - className="grow" + className="grow tabular-nums" /> diff --git a/apps/webapp/app/services/authorization.server.ts b/apps/webapp/app/services/authorization.server.ts index a7a3101165..15f85cc327 100644 --- a/apps/webapp/app/services/authorization.server.ts +++ b/apps/webapp/app/services/authorization.server.ts @@ -1,6 +1,6 @@ export type AuthorizationAction = "read" | "write" | string; // Add more actions as needed -const ResourceTypes = ["tasks", "tags", "runs", "batch", "waitpoints"] as const; +const ResourceTypes = ["tasks", "tags", "runs", "batch", "waitpoints", "deployments"] as const; export type AuthorizationResources = { [key in (typeof ResourceTypes)[number]]?: string | string[]; diff --git a/apps/webapp/app/services/realtime/redisRealtimeStreams.server.ts b/apps/webapp/app/services/realtime/redisRealtimeStreams.server.ts index 776b6179fa..0f2c3d011a 100644 --- a/apps/webapp/app/services/realtime/redisRealtimeStreams.server.ts +++ b/apps/webapp/app/services/realtime/redisRealtimeStreams.server.ts @@ -184,6 +184,15 @@ export class RedisRealtimeStreams implements StreamIngestor, StreamResponder { return new Response(null, { status: 200 }); } catch (error) { + if (error instanceof Error) { + if ("code" in error && error.code === "ECONNRESET") { + logger.info("[RealtimeStreams][ingestData] Connection reset during ingestData:", { + error, + }); + return new Response(null, { status: 500 }); + } + } + logger.error("[RealtimeStreams][ingestData] Error in ingestData:", { error }); return new Response(null, { status: 500 }); diff --git a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts index 9e161abe07..9d06d3345c 100644 --- a/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts +++ b/apps/webapp/app/services/routeBuilders/apiBuilder.server.ts @@ -430,7 +430,8 @@ type ApiKeyActionRouteBuilderOptions< TParamsSchema extends AnyZodSchema | undefined = undefined, TSearchParamsSchema extends AnyZodSchema | undefined = undefined, THeadersSchema extends AnyZodSchema | undefined = undefined, - TBodySchema extends AnyZodSchema | undefined = undefined + TBodySchema extends AnyZodSchema | undefined = undefined, + TResource = never > = { params?: TParamsSchema; searchParams?: TSearchParamsSchema; @@ -438,6 +439,17 @@ type ApiKeyActionRouteBuilderOptions< allowJWT?: boolean; corsStrategy?: "all" | "none"; method?: "POST" | "PUT" | "DELETE" | "PATCH"; + findResource?: ( + params: TParamsSchema extends z.ZodFirstPartySchemaTypes | z.ZodDiscriminatedUnion + ? z.infer + : undefined, + authentication: ApiAuthenticationResultSuccess, + searchParams: TSearchParamsSchema extends + | z.ZodFirstPartySchemaTypes + | z.ZodDiscriminatedUnion + ? z.infer + : undefined + ) => Promise; authorization?: { action: AuthorizationAction; resource: ( @@ -466,7 +478,8 @@ type ApiKeyActionHandlerFunction< TParamsSchema extends AnyZodSchema | undefined, TSearchParamsSchema extends AnyZodSchema | undefined, THeadersSchema extends AnyZodSchema | undefined = undefined, - TBodySchema extends AnyZodSchema | undefined = undefined + TBodySchema extends AnyZodSchema | undefined = undefined, + TResource = never > = (args: { params: TParamsSchema extends z.ZodFirstPartySchemaTypes | z.ZodDiscriminatedUnion ? z.infer @@ -484,25 +497,29 @@ type ApiKeyActionHandlerFunction< : undefined; authentication: ApiAuthenticationResultSuccess; request: Request; + resource?: TResource; }) => Promise; export function createActionApiRoute< TParamsSchema extends AnyZodSchema | undefined = undefined, TSearchParamsSchema extends AnyZodSchema | undefined = undefined, THeadersSchema extends AnyZodSchema | undefined = undefined, - TBodySchema extends AnyZodSchema | undefined = undefined + TBodySchema extends AnyZodSchema | undefined = undefined, + TResource = never >( options: ApiKeyActionRouteBuilderOptions< TParamsSchema, TSearchParamsSchema, THeadersSchema, - TBodySchema + TBodySchema, + TResource >, handler: ApiKeyActionHandlerFunction< TParamsSchema, TSearchParamsSchema, THeadersSchema, - TBodySchema + TBodySchema, + TResource > ) { const { @@ -682,6 +699,18 @@ export function createActionApiRoute< } } + const resource = options.findResource + ? await options.findResource(parsedParams, authenticationResult, parsedSearchParams) + : undefined; + + if (options.findResource && !resource) { + return await wrapResponse( + request, + json({ error: "Resource not found" }, { status: 404 }), + corsStrategy !== "none" + ); + } + const result = await handler({ params: parsedParams, searchParams: parsedSearchParams, @@ -689,6 +718,7 @@ export function createActionApiRoute< body: parsedBody, authentication: authenticationResult, request, + resource, }); return await wrapResponse(request, result, corsStrategy !== "none"); } catch (error) { diff --git a/apps/webapp/app/v3/alertsWorker.server.ts b/apps/webapp/app/v3/alertsWorker.server.ts index 3e1917ead1..46670887a7 100644 --- a/apps/webapp/app/v3/alertsWorker.server.ts +++ b/apps/webapp/app/v3/alertsWorker.server.ts @@ -33,6 +33,7 @@ function initializeWorker() { retry: { maxAttempts: 3, }, + logErrors: false, }, "v3.performDeploymentAlerts": { schema: z.object({ @@ -42,6 +43,7 @@ function initializeWorker() { retry: { maxAttempts: 3, }, + logErrors: false, }, "v3.deliverAlert": { schema: z.object({ @@ -51,6 +53,7 @@ function initializeWorker() { retry: { maxAttempts: 3, }, + logErrors: false, }, }, concurrency: { diff --git a/apps/webapp/app/v3/eventRepository.server.ts b/apps/webapp/app/v3/eventRepository.server.ts index 687fbe9e76..a839c3423c 100644 --- a/apps/webapp/app/v3/eventRepository.server.ts +++ b/apps/webapp/app/v3/eventRepository.server.ts @@ -33,7 +33,7 @@ import { createRedisClient, RedisClient, RedisWithClusterOptions } from "~/redis import { logger } from "~/services/logger.server"; import { singleton } from "~/utils/singleton"; import { DynamicFlushScheduler } from "./dynamicFlushScheduler.server"; -import { TaskEventStore, TaskEventStoreTable } from "./taskEventStore.server"; +import { DetailedTraceEvent, TaskEventStore, TaskEventStoreTable } from "./taskEventStore.server"; import { startActiveSpan } from "./tracer.server"; import { startSpan } from "./tracing.server"; @@ -146,6 +146,12 @@ export type PreparedEvent = Omit style: TaskEventStyle; }; +export type PreparedDetailedEvent = Omit & { + duration: number; + events: SpanEvents; + style: TaskEventStyle; +}; + export type RunPreparedEvent = PreparedEvent & { taskSlug?: string; }; @@ -186,6 +192,36 @@ export type SpanSummary = { export type TraceSummary = { rootSpan: SpanSummary; spans: Array }; +export type SpanDetailedSummary = { + id: string; + parentId: string | undefined; + message: string; + data: { + runId: string; + taskSlug?: string; + taskPath?: string; + events: SpanEvents; + startTime: Date; + duration: number; + isError: boolean; + isPartial: boolean; + isCancelled: boolean; + level: NonNullable; + environmentType: CreatableEventEnvironmentType; + workerVersion?: string; + queueName?: string; + machinePreset?: string; + properties?: Attributes; + output?: Attributes; + }; + children: Array; +}; + +export type TraceDetailedSummary = { + traceId: string; + rootSpan: SpanDetailedSummary; +}; + export type UpdateEventOptions = { attributes: TraceAttributes; endTime?: Date; @@ -589,6 +625,121 @@ export class EventRepository { }); } + public async getTraceDetailedSummary( + storeTable: TaskEventStoreTable, + traceId: string, + startCreatedAt: Date, + endCreatedAt?: Date, + options?: { includeDebugLogs?: boolean } + ): Promise { + return await startActiveSpan("getTraceDetailedSummary", async (span) => { + const events = await this.taskEventStore.findDetailedTraceEvents( + storeTable, + traceId, + startCreatedAt, + endCreatedAt, + { includeDebugLogs: options?.includeDebugLogs } + ); + + let preparedEvents: Array = []; + let rootSpanId: string | undefined; + const eventsBySpanId = new Map(); + + for (const event of events) { + preparedEvents.push(prepareDetailedEvent(event)); + + if (!rootSpanId && !event.parentId) { + rootSpanId = event.spanId; + } + } + + for (const event of preparedEvents) { + const existingEvent = eventsBySpanId.get(event.spanId); + + if (!existingEvent) { + eventsBySpanId.set(event.spanId, event); + continue; + } + + if (event.isCancelled || !event.isPartial) { + eventsBySpanId.set(event.spanId, event); + } + } + + preparedEvents = Array.from(eventsBySpanId.values()); + + if (!rootSpanId) { + return; + } + + // Build hierarchical structure + const spanDetailedSummaryMap = new Map(); + + // First pass: create all span detailed summaries + for (const event of preparedEvents) { + const ancestorCancelled = isAncestorCancelled(eventsBySpanId, event.spanId); + const duration = calculateDurationIfAncestorIsCancelled( + eventsBySpanId, + event.spanId, + event.duration + ); + + const output = event.output ? (event.output as Attributes) : undefined; + const properties = event.properties + ? removePrivateProperties(event.properties as Attributes) + : {}; + + const spanDetailedSummary: SpanDetailedSummary = { + id: event.spanId, + parentId: event.parentId ?? undefined, + message: event.message, + data: { + runId: event.runId, + taskSlug: event.taskSlug ?? undefined, + taskPath: event.taskPath ?? undefined, + events: event.events?.filter((e) => !e.name.startsWith("trigger.dev")), + startTime: getDateFromNanoseconds(event.startTime), + duration: nanosecondsToMilliseconds(duration), + isError: event.isError, + isPartial: ancestorCancelled ? false : event.isPartial, + isCancelled: event.isCancelled === true ? true : event.isPartial && ancestorCancelled, + level: event.level, + environmentType: event.environmentType, + workerVersion: event.workerVersion ?? undefined, + queueName: event.queueName ?? undefined, + machinePreset: event.machinePreset ?? undefined, + properties, + output, + }, + children: [], + }; + + spanDetailedSummaryMap.set(event.spanId, spanDetailedSummary); + } + + // Second pass: build parent-child relationships + for (const spanSummary of spanDetailedSummaryMap.values()) { + if (spanSummary.parentId) { + const parent = spanDetailedSummaryMap.get(spanSummary.parentId); + if (parent) { + parent.children.push(spanSummary); + } + } + } + + const rootSpan = spanDetailedSummaryMap.get(rootSpanId); + + if (!rootSpan) { + return; + } + + return { + traceId, + rootSpan, + }; + }); + } + public async getRunEvents( storeTable: TaskEventStoreTable, runId: string, @@ -1246,7 +1397,7 @@ export class EventRepository { span.setAttribute("prisma_error_code", errorDetails.code); } - logger.error("Failed to insert events, will attempt bisection", { + logger.info("Failed to insert events, will attempt bisection", { error: errorDetails, }); @@ -1517,6 +1668,15 @@ function prepareEvent(event: QueriedEvent): PreparedEvent { }; } +function prepareDetailedEvent(event: DetailedTraceEvent): PreparedDetailedEvent { + return { + ...event, + duration: Number(event.duration), + events: parseEventsField(event.events), + style: parseStyleField(event.style), + }; +} + function parseEventsField(events: Prisma.JsonValue): SpanEvents { const unsafe = events ? (events as any[]).map((e) => ({ @@ -1548,7 +1708,10 @@ function parseStyleField(style: Prisma.JsonValue): TaskEventStyle { return {}; } -function isAncestorCancelled(events: Map, spanId: string) { +function isAncestorCancelled( + events: Map, + spanId: string +) { const event = events.get(spanId); if (!event) { @@ -1567,7 +1730,16 @@ function isAncestorCancelled(events: Map, spanId: string) } function calculateDurationIfAncestorIsCancelled( - events: Map, + events: Map< + string, + { + isCancelled: boolean; + parentId: string | null; + isPartial: boolean; + startTime: bigint; + events: SpanEvents; + } + >, spanId: string, defaultDuration: number ) { @@ -1603,7 +1775,19 @@ function calculateDurationIfAncestorIsCancelled( return defaultDuration; } -function findFirstCancelledAncestor(events: Map, spanId: string) { +function findFirstCancelledAncestor( + events: Map< + string, + { + isCancelled: boolean; + parentId: string | null; + isPartial: boolean; + startTime: bigint; + events: SpanEvents; + } + >, + spanId: string +) { const event = events.get(spanId); if (!event) { @@ -1711,6 +1895,10 @@ export function getDateFromNanoseconds(nanoseconds: bigint) { return new Date(Number(nanoseconds) / 1_000_000); } +function nanosecondsToMilliseconds(nanoseconds: bigint | number): number { + return Number(nanoseconds) / 1_000_000; +} + function rehydrateJson(json: Prisma.JsonValue): any { if (json === null) { return undefined; diff --git a/apps/webapp/app/v3/machinePresets.server.ts b/apps/webapp/app/v3/machinePresets.server.ts index 024cb9f114..ee01aace82 100644 --- a/apps/webapp/app/v3/machinePresets.server.ts +++ b/apps/webapp/app/v3/machinePresets.server.ts @@ -6,7 +6,7 @@ export function machinePresetFromConfig(config: unknown): MachinePreset { const parsedConfig = MachineConfig.safeParse(config); if (!parsedConfig.success) { - logger.error("Failed to parse machine config", { config }); + logger.info("Failed to parse machine config", { config }); return machinePresetFromName("small-1x"); } diff --git a/apps/webapp/app/v3/marqs/index.server.ts b/apps/webapp/app/v3/marqs/index.server.ts index 1636dba5f0..89dfa1e3ff 100644 --- a/apps/webapp/app/v3/marqs/index.server.ts +++ b/apps/webapp/app/v3/marqs/index.server.ts @@ -1,3 +1,4 @@ +import { type RedisOptions } from "@internal/redis"; import { context, propagation, @@ -10,18 +11,30 @@ import { } from "@opentelemetry/api"; import { SEMATTRS_MESSAGE_ID, - SEMATTRS_MESSAGING_SYSTEM, SEMATTRS_MESSAGING_OPERATION, + SEMATTRS_MESSAGING_SYSTEM, } from "@opentelemetry/semantic-conventions"; +import { Logger } from "@trigger.dev/core/logger"; +import { tryCatch } from "@trigger.dev/core/utils"; import { flattenAttributes } from "@trigger.dev/core/v3"; +import { Worker, type WorkerConcurrencyOptions } from "@trigger.dev/redis-worker"; import Redis, { type Callback, type Result } from "ioredis"; +import { setInterval as setIntervalAsync } from "node:timers/promises"; +import z from "zod"; import { env } from "~/env.server"; import { AuthenticatedEnvironment } from "~/services/apiAuth.server"; import { logger } from "~/services/logger.server"; import { singleton } from "~/utils/singleton"; +import { legacyRunEngineWorker } from "../legacyRunEngineWorker.server"; import { concurrencyTracker } from "../services/taskRunConcurrencyTracker.server"; import { attributesFromAuthenticatedEnv, tracer } from "../tracer.server"; import { AsyncWorker } from "./asyncWorker.server"; +import { + MARQS_DELAYED_REQUEUE_THRESHOLD_IN_MS, + MARQS_RESUME_PRIORITY_TIMESTAMP_OFFSET, + MARQS_RETRY_PRIORITY_TIMESTAMP_OFFSET, + MARQS_SCHEDULED_REQUEUE_AVAILABLE_AT_THRESHOLD_IN_MS, +} from "./constants.server"; import { FairDequeuingStrategy } from "./fairDequeuingStrategy.server"; import { MarQSShortKeyProducer } from "./marqsKeyProducer"; import { @@ -35,13 +48,6 @@ import { VisibilityTimeoutStrategy, } from "./types"; import { V3LegacyRunEngineWorkerVisibilityTimeout } from "./v3VisibilityTimeout.server"; -import { legacyRunEngineWorker } from "../legacyRunEngineWorker.server"; -import { - MARQS_DELAYED_REQUEUE_THRESHOLD_IN_MS, - MARQS_RESUME_PRIORITY_TIMESTAMP_OFFSET, - MARQS_RETRY_PRIORITY_TIMESTAMP_OFFSET, - MARQS_SCHEDULED_REQUEUE_AVAILABLE_AT_THRESHOLD_IN_MS, -} from "./constants.server"; const KEY_PREFIX = "marqs:"; @@ -70,6 +76,29 @@ export type MarQSOptions = { enableRebalancing?: boolean; verbose?: boolean; subscriber?: MessageQueueSubscriber; + sharedWorkerQueueConsumerIntervalMs?: number; + sharedWorkerQueueMaxMessageCount?: number; + sharedWorkerQueueCooloffPeriodMs?: number; + sharedWorkerQueueCooloffCountThreshold?: number; + eagerDequeuingEnabled?: boolean; + workerOptions: { + pollIntervalMs?: number; + immediatePollIntervalMs?: number; + shutdownTimeoutMs?: number; + concurrency?: WorkerConcurrencyOptions; + enabled?: boolean; + redisOptions: RedisOptions; + }; +}; + +const workerCatalog = { + processQueueForWorkerQueue: { + schema: z.object({ + queueKey: z.string(), + parentQueueKey: z.string(), + }), + visibilityTimeoutMs: 30_000, + }, }; /** @@ -79,6 +108,10 @@ export class MarQS { private redis: Redis; public keys: MarQSKeyProducer; #rebalanceWorkers: Array = []; + private worker: Worker; + private queueDequeueCooloffPeriod: Map = new Map(); + private queueDequeueCooloffCounts: Map = new Map(); + private clearCooloffPeriodInterval: NodeJS.Timeout; constructor(private readonly options: MarQSOptions) { this.redis = options.redis; @@ -87,6 +120,45 @@ export class MarQS { this.#startRebalanceWorkers(); this.#registerCommands(); + + // This will prevent these cooloff maps from growing indefinitely + this.clearCooloffPeriodInterval = setInterval(() => { + this.queueDequeueCooloffCounts.clear(); + this.queueDequeueCooloffPeriod.clear(); + }, 60_000 * 10); // 10 minutes + + this.worker = new Worker({ + name: "marqs-worker", + redisOptions: options.workerOptions.redisOptions, + catalog: workerCatalog, + concurrency: options.workerOptions?.concurrency, + pollIntervalMs: options.workerOptions?.pollIntervalMs ?? 1000, + immediatePollIntervalMs: options.workerOptions?.immediatePollIntervalMs ?? 100, + shutdownTimeoutMs: options.workerOptions?.shutdownTimeoutMs ?? 10_000, + logger: new Logger("MarQSWorker", "info"), + jobs: { + processQueueForWorkerQueue: async (job) => { + await this.#processQueueForWorkerQueue(job.payload.queueKey, job.payload.parentQueueKey); + }, + }, + }); + + if (options.workerOptions?.enabled) { + this.worker.start(); + } + + this.#setupShutdownHandlers(); + } + + #setupShutdownHandlers() { + process.on("SIGTERM", () => this.shutdown("SIGTERM")); + process.on("SIGINT", () => this.shutdown("SIGINT")); + } + + async shutdown(signal: NodeJS.Signals) { + console.log("πŸ‘‡ Shutting down marqs", this.name, signal); + clearInterval(this.clearCooloffPeriodInterval); + this.#rebalanceWorkers.forEach((worker) => worker.stop()); } get name() { @@ -276,6 +348,21 @@ export class MarQS { span.setAttribute("reserve_recursive_queue", reserve.recursiveQueue); } + if (env.type !== "DEVELOPMENT" && this.options.eagerDequeuingEnabled) { + // This will move the message to the worker queue so it can be dequeued + await this.worker.enqueueOnce({ + id: messageQueue, // dedupe by environment, queue, and concurrency key + job: "processQueueForWorkerQueue", + payload: { + queueKey: messageQueue, + parentQueueKey: parentQueue, + }, + // Add a small delay to dedupe messages so at most one of these will processed, + // every 500ms per queue, concurrency key, and environment + availableAt: new Date(Date.now() + 500), // 500ms from now + }); + } + const result = await this.#callEnqueueMessage(messagePayload, reserve); if (result) { @@ -488,15 +575,18 @@ export class MarQS { span.setAttribute("queue_count", queues.length); for (const messageQueue of queues) { - const messageData = await this.#callDequeueMessage({ + const messages = await this.#callDequeueMessages({ messageQueue, parentQueue, + maxCount: 1, }); - if (!messageData) { + if (!messages || messages.length === 0) { return; } + const messageData = messages[0]; + const message = await this.readMessage(messageData.messageId); if (message) { @@ -554,11 +644,184 @@ export class MarQS { } /** - * Dequeue a message from the shared queue (this should be used in production environments) + * Dequeue a message from the shared worker queue (this should be used in production environments) */ - public async dequeueMessageInSharedQueue(consumerId: string) { + public async dequeueMessageFromSharedWorkerQueue(consumerId: string) { return this.#trace( - "dequeueMessageInSharedQueue", + "dequeueMessageFromSharedWorkerQueue", + async (span) => { + span.setAttribute(SemanticAttributes.CONSUMER_ID, consumerId); + + const workerQueueKey = this.keys.sharedWorkerQueueKey(); + + span.setAttribute(SemanticAttributes.PARENT_QUEUE, workerQueueKey); + + // Try and pop a message from the worker queue (redis list) + const messageId = await this.#trace("popMessageFromWorkerQueue", async (innerSpan) => { + innerSpan.setAttribute(SemanticAttributes.PARENT_QUEUE, workerQueueKey); + innerSpan.setAttribute(SemanticAttributes.CONSUMER_ID, consumerId); + + const results = await this.redis.popMessageFromWorkerQueue(workerQueueKey); + + if (!results) { + return null; + } + + const [messageId, queueLength] = results; + + innerSpan.setAttribute("queue_length", Number(queueLength)); + + return messageId; + }); + + if (!messageId) { + return; + } + + const message = await this.readMessage(messageId); + + if (!message) { + return; + } + + if (this.options.subscriber) { + await this.#trace( + "postMessageDequeued", + async (subscriberSpan) => { + subscriberSpan.setAttributes({ + [SemanticAttributes.MESSAGE_ID]: message.messageId, + [SemanticAttributes.QUEUE]: message.queue, + [SemanticAttributes.PARENT_QUEUE]: message.parentQueue, + }); + + return await this.options.subscriber?.messageDequeued(message); + }, + { + kind: SpanKind.INTERNAL, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + } + + await this.#trace( + "startHeartbeat", + async (heartbeatSpan) => { + heartbeatSpan.setAttributes({ + [SemanticAttributes.MESSAGE_ID]: message.messageId, + visibility_timeout_ms: this.visibilityTimeoutInMs, + }); + + return await this.options.visibilityTimeoutStrategy.startHeartbeat( + message.messageId, + this.visibilityTimeoutInMs + ); + }, + { + kind: SpanKind.INTERNAL, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + + return message; + }, + { + kind: SpanKind.CONSUMER, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + } + + public startSharedWorkerQueueConsumer(consumerId: string) { + const abortController = new AbortController(); + + this.#startSharedWorkerQueueConsumer(consumerId, abortController).catch((error) => { + logger.error("Failed to start shared worker queue consumer", { + error, + service: this.name, + consumerId, + }); + }); + + return () => { + abortController.abort(); + }; + } + + async #startSharedWorkerQueueConsumer(consumerId: string, abortController: AbortController) { + let lastProcessedAt = Date.now(); + let processedCount = 0; + + try { + for await (const _ of setIntervalAsync( + this.options.sharedWorkerQueueConsumerIntervalMs ?? 500, + null, + { + signal: abortController.signal, + } + )) { + logger.debug(`Processing shared worker queue`, { + processedCount, + lastProcessedAt, + service: this.name, + consumerId, + }); + + const now = performance.now(); + + const [error, results] = await tryCatch(this.#processSharedWorkerQueue(consumerId)); + + if (error) { + logger.error(`Failed to process shared worker queue`, { + error, + service: this.name, + consumerId, + }); + + continue; + } + + const duration = performance.now() - now; + + logger.debug(`Processed shared worker queue`, { + processedCount, + lastProcessedAt, + service: this.name, + duration, + results, + consumerId, + }); + + processedCount++; + lastProcessedAt = Date.now(); + } + } catch (error) { + if (error instanceof Error && error.name !== "AbortError") { + throw error; + } + + logger.debug(`Shared worker queue consumer stopped`, { + service: this.name, + processedCount, + lastProcessedAt, + }); + } + } + + /** + * Dequeue as many messages as possible from queues into the shared worker queue list + */ + async #processSharedWorkerQueue(consumerId: string) { + return this.#trace( + "processSharedWorkerQueue", async (span) => { span.setAttribute(SemanticAttributes.CONSUMER_ID, consumerId); @@ -581,63 +844,140 @@ export class MarQS { let attemptedEnvs = 0; let attemptedQueues = 0; + let messageCount = 0; + let coolOffPeriodCount = 0; - // Try each queue in order until we successfully dequeue a message + // Try each queue in order, attempt to dequeue a message from each queue, keep going until we've tried all the queues for (const env of envQueues) { attemptedEnvs++; for (const messageQueue of env.queues) { attemptedQueues++; - try { - const messageData = await this.#callDequeueMessage({ - messageQueue, - parentQueue, - }); - - if (!messageData) { - continue; // Try next queue if no message was dequeued + const cooloffPeriod = this.queueDequeueCooloffPeriod.get(messageQueue); + + // If the queue is in a cooloff period, skip attempting to dequeue from it + if (cooloffPeriod) { + // If the cooloff period is still active, skip attempting to dequeue from it + if (cooloffPeriod > Date.now()) { + coolOffPeriodCount++; + continue; + } else { + // If the cooloff period is over, delete the cooloff period and attempt to dequeue from the queue + this.queueDequeueCooloffPeriod.delete(messageQueue); } + } - const message = await this.readMessage(messageData.messageId); - - if (message) { - span.setAttributes({ - [SEMATTRS_MESSAGE_ID]: message.messageId, - [SemanticAttributes.QUEUE]: message.queue, - [SemanticAttributes.MESSAGE_ID]: message.messageId, - [SemanticAttributes.CONCURRENCY_KEY]: message.concurrencyKey, - [SemanticAttributes.PARENT_QUEUE]: message.parentQueue, - attempted_queues: attemptedQueues, // How many queues we tried before success - attempted_envs: attemptedEnvs, // How many environments we tried before success - message_timestamp: message.timestamp, - message_age: this.#calculateMessageAge(message), - message_priority: message.priority, - message_enqueue_method: message.enqueueMethod, - message_available_at: message.availableAt, - ...flattenAttributes(message.data, "message.data"), - }); - - await this.options.subscriber?.messageDequeued(message); - - await this.options.visibilityTimeoutStrategy.startHeartbeat( - messageData.messageId, - this.visibilityTimeoutInMs - ); - - return message; + await this.#trace( + "attemptDequeue", + async (attemptDequeueSpan) => { + try { + attemptDequeueSpan.setAttributes({ + [SemanticAttributes.QUEUE]: messageQueue, + [SemanticAttributes.PARENT_QUEUE]: parentQueue, + }); + + const messages = await this.#trace( + "callDequeueMessages", + async (dequeueSpan) => { + dequeueSpan.setAttributes({ + [SemanticAttributes.QUEUE]: messageQueue, + [SemanticAttributes.PARENT_QUEUE]: parentQueue, + }); + + return await this.#callDequeueMessages({ + messageQueue, + parentQueue, + maxCount: this.options.sharedWorkerQueueMaxMessageCount ?? 10, + }); + }, + { + kind: SpanKind.CONSUMER, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + + if (!messages || messages.length === 0) { + const cooloffCount = this.queueDequeueCooloffCounts.get(messageQueue) ?? 0; + + const cooloffCountThreshold = Math.max( + 10, + this.options.sharedWorkerQueueCooloffCountThreshold ?? 10 + ); // minimum of 10 + + if (cooloffCount >= cooloffCountThreshold) { + // If no messages were dequeued, set a cooloff period for the queue + // This is to prevent the queue from being dequeued too frequently + // and to give other queues a chance to dequeue messages more frequently + this.queueDequeueCooloffPeriod.set( + messageQueue, + Date.now() + (this.options.sharedWorkerQueueCooloffPeriodMs ?? 10_000) // defaults to 10 seconds + ); + this.queueDequeueCooloffCounts.delete(messageQueue); + } else { + this.queueDequeueCooloffCounts.set(messageQueue, cooloffCount + 1); + } + + attemptDequeueSpan.setAttribute("message_count", 0); + return null; // Try next queue if no message was dequeued + } + + this.queueDequeueCooloffCounts.delete(messageQueue); + + messageCount += messages.length; + + attemptDequeueSpan.setAttribute("message_count", messages.length); + + await this.#trace( + "addToWorkerQueue", + async (addToWorkerQueueSpan) => { + const workerQueueKey = this.keys.sharedWorkerQueueKey(); + + addToWorkerQueueSpan.setAttributes({ + message_count: messages.length, + [SemanticAttributes.PARENT_QUEUE]: workerQueueKey, + }); + + await this.redis.rpush( + workerQueueKey, + ...messages.map((message) => message.messageId) + ); + }, + { + kind: SpanKind.INTERNAL, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + } catch (error) { + // Log error but continue trying other queues + logger.warn(`[${this.name}] Failed to dequeue from queue ${messageQueue}`, { + error, + }); + return null; + } + }, + { + kind: SpanKind.CONSUMER, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, } - } catch (error) { - // Log error but continue trying other queues - logger.warn(`[${this.name}] Failed to dequeue from queue ${messageQueue}`, { error }); - continue; - } + ); } } // If we get here, we tried all queues but couldn't dequeue a message span.setAttribute("attempted_queues", attemptedQueues); span.setAttribute("attempted_envs", attemptedEnvs); + span.setAttribute("message_count", messageCount); + span.setAttribute("cooloff_period_count", coolOffPeriodCount); return; }, @@ -651,6 +991,64 @@ export class MarQS { ); } + async #processQueueForWorkerQueue(queueKey: string, parentQueueKey: string) { + return this.#trace("processQueueForWorkerQueue", async (span) => { + span.setAttributes({ + [SemanticAttributes.QUEUE]: queueKey, + [SemanticAttributes.PARENT_QUEUE]: parentQueueKey, + }); + + const maxCount = this.options.sharedWorkerQueueMaxMessageCount ?? 10; + + const dequeuedMessages = await this.#callDequeueMessages({ + messageQueue: queueKey, + parentQueue: parentQueueKey, + maxCount, + }); + + if (!dequeuedMessages || dequeuedMessages.length === 0) { + return; + } + + await this.#trace( + "addToWorkerQueue", + async (addToWorkerQueueSpan) => { + const workerQueueKey = this.keys.sharedWorkerQueueKey(); + + addToWorkerQueueSpan.setAttributes({ + message_count: dequeuedMessages.length, + [SemanticAttributes.PARENT_QUEUE]: workerQueueKey, + }); + + await this.redis.rpush( + workerQueueKey, + ...dequeuedMessages.map((message) => message.messageId) + ); + }, + { + kind: SpanKind.INTERNAL, + attributes: { + [SEMATTRS_MESSAGING_OPERATION]: "receive", + [SEMATTRS_MESSAGING_SYSTEM]: "marqs", + }, + } + ); + + // If we dequeued the max count, we need to enqueue another job to dequeue the next batch + if (dequeuedMessages.length === maxCount) { + await this.worker.enqueueOnce({ + id: queueKey, + job: "processQueueForWorkerQueue", + payload: { + queueKey, + parentQueueKey, + }, + availableAt: new Date(Date.now() + 500), // 500ms from now + }); + } + }); + } + public async acknowledgeMessage(messageId: string, reason: string = "unknown") { return this.#trace( "acknowledgeMessage", @@ -682,6 +1080,20 @@ export class MarQS { messageId, }); + const sharedQueueKey = this.keys.sharedQueueKey(); + + if (this.options.eagerDequeuingEnabled && message.parentQueue === sharedQueueKey) { + await this.worker.enqueueOnce({ + id: message.queue, + job: "processQueueForWorkerQueue", + payload: { + queueKey: message.queue, + parentQueueKey: message.parentQueue, + }, + availableAt: new Date(Date.now() + 500), // 500ms from now + }); + } + await this.options.subscriber?.messageAcked(message); }, { @@ -1256,12 +1668,14 @@ export class MarQS { } } - async #callDequeueMessage({ + async #callDequeueMessages({ messageQueue, parentQueue, + maxCount, }: { messageQueue: string; parentQueue: string; + maxCount: number; }) { const queueConcurrencyLimitKey = this.keys.queueConcurrencyLimitKeyFromQueue(messageQueue); const queueCurrentConcurrencyKey = this.keys.queueCurrentConcurrencyKeyFromQueue(messageQueue); @@ -1271,7 +1685,7 @@ export class MarQS { const queueReserveConcurrencyKey = this.keys.queueReserveConcurrencyKeyFromQueue(messageQueue); const envQueueKey = this.keys.envQueueKeyFromQueue(messageQueue); - logger.debug("Calling dequeueMessage", { + logger.debug("Calling dequeueMessages", { messageQueue, parentQueue, queueConcurrencyLimitKey, @@ -1284,7 +1698,7 @@ export class MarQS { service: this.name, }); - const result = await this.redis.dequeueMessage( + const result = await this.redis.dequeueMessages( messageQueue, parentQueue, queueConcurrencyLimitKey, @@ -1296,7 +1710,8 @@ export class MarQS { envQueueKey, messageQueue, String(Date.now()), - String(this.options.defaultEnvConcurrency) + String(this.options.defaultEnvConcurrency), + String(maxCount) ); if (!result) { @@ -1308,14 +1723,23 @@ export class MarQS { service: this.name, }); - if (result.length !== 2) { - return; + const messages = []; + for (let i = 0; i < result.length; i += 2) { + const messageId = result[i]; + const messageScore = result[i + 1]; + + messages.push({ + messageId, + messageScore, + }); } - return { - messageId: result[0], - messageScore: result[1], - }; + logger.debug("dequeueMessages parsed result", { + messages, + service: this.name, + }); + + return messages.filter(Boolean); } async #callRequeueMessage(message: MessagePayload) { @@ -1746,7 +2170,7 @@ return true `, }); - this.redis.defineCommand("dequeueMessage", { + this.redis.defineCommand("dequeueMessages", { numberOfKeys: 9, lua: ` local queueKey = KEYS[1] @@ -1762,6 +2186,7 @@ local envQueueKey = KEYS[9] local queueName = ARGV[1] local currentTime = tonumber(ARGV[2]) local defaultEnvConcurrencyLimit = ARGV[3] +local maxCount = tonumber(ARGV[4] or '1') -- Check current env concurrency against the limit local envCurrentConcurrency = tonumber(redis.call('SCARD', envCurrentConcurrencyKey) or '0') @@ -1784,27 +2209,38 @@ if queueCurrentConcurrency >= totalQueueConcurrencyLimit then return nil end --- Attempt to dequeue the next message -local messages = redis.call('ZRANGEBYSCORE', queueKey, '-inf', currentTime, 'WITHSCORES', 'LIMIT', 0, 1) +-- Calculate how many messages we can actually dequeue based on concurrency limits +local envAvailableCapacity = totalEnvConcurrencyLimit - envCurrentConcurrency +local queueAvailableCapacity = totalQueueConcurrencyLimit - queueCurrentConcurrency +local actualMaxCount = math.min(maxCount, envAvailableCapacity, queueAvailableCapacity) -if #messages == 0 then +if actualMaxCount <= 0 then return nil end -local messageId = messages[1] -local messageScore = tonumber(messages[2]) +-- Attempt to dequeue messages up to actualMaxCount +local messagesWithScores = redis.call('ZRANGEBYSCORE', queueKey, '-inf', currentTime, 'WITHSCORES', 'LIMIT', 0, actualMaxCount) --- Remove the message from the queue and update concurrency -redis.call('ZREM', queueKey, messageId) -redis.call('ZREM', envQueueKey, messageId) -redis.call('SADD', queueCurrentConcurrencyKey, messageId) -redis.call('SADD', envCurrentConcurrencyKey, messageId) +if #messagesWithScores == 0 then + return nil +end + +local messageIds = {} +for i = 1, #messagesWithScores, 2 do + table.insert(messageIds, messagesWithScores[i]) +end + +-- Remove the messages from the queue and update concurrency +redis.call('ZREM', queueKey, unpack(messageIds)) +redis.call('ZREM', envQueueKey, unpack(messageIds)) +redis.call('SADD', queueCurrentConcurrencyKey, unpack(messageIds)) +redis.call('SADD', envCurrentConcurrencyKey, unpack(messageIds)) -- Remove the message from the reserve concurrency set -redis.call('SREM', envReserveConcurrencyKey, messageId) +redis.call('SREM', envReserveConcurrencyKey, unpack(messageIds)) -- Remove the message from the queue reserve concurrency set -redis.call('SREM', queueReserveConcurrencyKey, messageId) +redis.call('SREM', queueReserveConcurrencyKey, unpack(messageIds)) -- Rebalance the parent queue local earliestMessage = redis.call('ZRANGE', queueKey, 0, 0, 'WITHSCORES') @@ -1814,7 +2250,27 @@ else redis.call('ZADD', parentQueueKey, earliestMessage[2], queueName) end -return {messageId, messageScore} -- Return message details +return messagesWithScores + `, + }); + + this.redis.defineCommand("popMessageFromWorkerQueue", { + numberOfKeys: 1, + lua: ` +local workerQueueKey = KEYS[1] + +-- lpop the first message from the worker queue +local messageId = redis.call('LPOP', workerQueueKey) + +-- if there is no messageId, return nil +if not messageId then + return nil +end + +-- get the length of the worker queue +local queueLength = tonumber(redis.call('LLEN', workerQueueKey) or '0') + +return {messageId, queueLength} -- Return message details `, }); @@ -2061,7 +2517,7 @@ declare module "ioredis" { callback?: Callback ): Result; - dequeueMessage( + dequeueMessages( queueKey: string, parentQueueKey: string, queueConcurrencyLimitKey: string, @@ -2074,7 +2530,13 @@ declare module "ioredis" { queueName: string, currentTime: string, defaultEnvConcurrencyLimit: string, - callback?: Callback<[string, string]> + maxCount: string, + callback?: Callback + ): Result; + + popMessageFromWorkerQueue( + workerQueueKey: string, + callback?: Callback<[string, string] | null> ): Result<[string, string] | null, Context>; requeueMessage( @@ -2211,5 +2673,30 @@ function getMarQSClient() { enableRebalancing: !env.MARQS_DISABLE_REBALANCING, maximumNackCount: env.MARQS_MAXIMUM_NACK_COUNT, subscriber: concurrencyTracker, + sharedWorkerQueueConsumerIntervalMs: env.MARQS_SHARED_WORKER_QUEUE_CONSUMER_INTERVAL_MS, + sharedWorkerQueueMaxMessageCount: env.MARQS_SHARED_WORKER_QUEUE_MAX_MESSAGE_COUNT, + eagerDequeuingEnabled: env.MARQS_SHARED_WORKER_QUEUE_EAGER_DEQUEUE_ENABLED === "1", + sharedWorkerQueueCooloffCountThreshold: env.MARQS_SHARED_WORKER_QUEUE_COOLOFF_COUNT_THRESHOLD, + sharedWorkerQueueCooloffPeriodMs: env.MARQS_SHARED_WORKER_QUEUE_COOLOFF_PERIOD_MS, + workerOptions: { + enabled: env.MARQS_WORKER_ENABLED === "1", + pollIntervalMs: env.MARQS_WORKER_POLL_INTERVAL_MS, + immediatePollIntervalMs: env.MARQS_WORKER_IMMEDIATE_POLL_INTERVAL_MS, + shutdownTimeoutMs: env.MARQS_WORKER_SHUTDOWN_TIMEOUT_MS, + concurrency: { + workers: env.MARQS_WORKER_COUNT, + tasksPerWorker: env.MARQS_WORKER_CONCURRENCY_TASKS_PER_WORKER, + limit: env.MARQS_WORKER_CONCURRENCY_LIMIT, + }, + redisOptions: { + keyPrefix: KEY_PREFIX, + port: env.REDIS_PORT ?? undefined, + host: env.REDIS_HOST ?? undefined, + username: env.REDIS_USERNAME ?? undefined, + password: env.REDIS_PASSWORD ?? undefined, + enableAutoPipelining: true, + ...(env.REDIS_TLS_DISABLED === "true" ? {} : { tls: {} }), + }, + }, }); } diff --git a/apps/webapp/app/v3/marqs/marqsKeyProducer.ts b/apps/webapp/app/v3/marqs/marqsKeyProducer.ts index 673d180229..5c9c7238ad 100644 --- a/apps/webapp/app/v3/marqs/marqsKeyProducer.ts +++ b/apps/webapp/app/v3/marqs/marqsKeyProducer.ts @@ -2,6 +2,7 @@ import { MarQSKeyProducer, MarQSKeyProducerEnv, QueueDescriptor } from "./types" const constants = { SHARED_QUEUE: "sharedQueue", + SHARED_WORKER_QUEUE: "sharedWorkerQueue", CURRENT_CONCURRENCY_PART: "currentConcurrency", CONCURRENCY_LIMIT_PART: "concurrency", DISABLED_CONCURRENCY_LIMIT_PART: "disabledConcurrency", @@ -108,6 +109,10 @@ export class MarQSShortKeyProducer implements MarQSKeyProducer { return constants.SHARED_QUEUE; } + sharedWorkerQueueKey(): string { + return constants.SHARED_WORKER_QUEUE; + } + queueConcurrencyLimitKeyFromQueue(queue: string) { const descriptor = this.queueDescriptorFromQueue(queue); diff --git a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts index 075732544c..20abf87b32 100644 --- a/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts +++ b/apps/webapp/app/v3/marqs/sharedQueueConsumer.server.ts @@ -166,6 +166,7 @@ export class SharedQueueConsumer { private _runningDurationInMs = 0; private _currentMessage: MessagePayload | undefined; private _currentMessageData: SharedQueueMessageBody | undefined; + private _stopWorkerQueueConsumer?: () => void; constructor( private _providerSender: ZodMessageSender, @@ -173,7 +174,7 @@ export class SharedQueueConsumer { ) { this._options = { maximumItemsPerTrace: options.maximumItemsPerTrace ?? 500, - traceTimeoutSeconds: options.traceTimeoutSeconds ?? 10, + traceTimeoutSeconds: options.traceTimeoutSeconds ?? 1, nextTickInterval: options.nextTickInterval ?? 1000, // 1 second interval: options.interval ?? 100, // 100ms }; @@ -233,6 +234,10 @@ export class SharedQueueConsumer { return; } + console.log("❌ Stopping the SharedQueueConsumer"); + + this._stopWorkerQueueConsumer?.(); + logger.debug("Stopping shared queue consumer"); this._enabled = false; @@ -252,6 +257,9 @@ export class SharedQueueConsumer { this._reasonStats = {}; this._actionStats = {}; this._outcomeStats = {}; + this._stopWorkerQueueConsumer = marqs?.startSharedWorkerQueueConsumer(this._id); + + console.log("βœ… Started the SharedQueueConsumer"); this.#doWork().finally(() => {}); } @@ -429,7 +437,7 @@ export class SharedQueueConsumer { this._currentMessage = undefined; this._currentMessageData = undefined; - const message = await marqs?.dequeueMessageInSharedQueue(this._id); + const message = await marqs?.dequeueMessageFromSharedWorkerQueue(this._id); if (!message) { return { @@ -621,7 +629,8 @@ export class SharedQueueConsumer { const worker = deployment?.worker; if (!deployment || !worker) { - logger.error("No matching deployment found for task run", { + // This happens when a run is "WAITING_FOR_DEPLOY" and is expected + logger.info("No matching deployment found for task run", { queueMessage: message.data, messageId: message.messageId, }); diff --git a/apps/webapp/app/v3/marqs/types.ts b/apps/webapp/app/v3/marqs/types.ts index 98792a3099..69e75ac44a 100644 --- a/apps/webapp/app/v3/marqs/types.ts +++ b/apps/webapp/app/v3/marqs/types.ts @@ -36,6 +36,7 @@ export interface MarQSKeyProducer { envSharedQueueKey(env: MarQSKeyProducerEnv): string; sharedQueueKey(): string; sharedQueueScanPattern(): string; + sharedWorkerQueueKey(): string; queueCurrentConcurrencyScanPattern(): string; queueConcurrencyLimitKeyFromQueue(queue: string): string; queueCurrentConcurrencyKeyFromQueue(queue: string): string; diff --git a/apps/webapp/app/v3/runEngineHandlers.server.ts b/apps/webapp/app/v3/runEngineHandlers.server.ts index fffee59368..f40f4b0176 100644 --- a/apps/webapp/app/v3/runEngineHandlers.server.ts +++ b/apps/webapp/app/v3/runEngineHandlers.server.ts @@ -17,6 +17,7 @@ import { updateMetadataService } from "~/services/metadata/updateMetadataInstanc import { findEnvironmentFromRun } from "~/models/runtimeEnvironment.server"; import { env } from "~/env.server"; import { getTaskEventStoreTableForRun } from "./taskEventStore.server"; +import { MetadataTooLargeError } from "~/utils/packets"; export function registerRunEngineEventBusHandlers() { engine.eventBus.on("runSucceeded", async ({ time, run }) => { @@ -381,17 +382,31 @@ export function registerRunEngineEventBusHandlers() { try { await updateMetadataService.call(run.id, run.metadata, env); } catch (e) { - logger.error("[runMetadataUpdated] Failed to update metadata", { - taskRun: run.id, - error: - e instanceof Error - ? { - name: e.name, - message: e.message, - stack: e.stack, - } - : e, - }); + if (e instanceof MetadataTooLargeError) { + logger.warn("[runMetadataUpdated] Failed to update metadata, too large", { + taskRun: run.id, + error: + e instanceof Error + ? { + name: e.name, + message: e.message, + stack: e.stack, + } + : e, + }); + } else { + logger.error("[runMetadataUpdated] Failed to update metadata", { + taskRun: run.id, + error: + e instanceof Error + ? { + name: e.name, + message: e.message, + stack: e.stack, + } + : e, + }); + } } }); diff --git a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts index ecbf8ef1bc..a27d738094 100644 --- a/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts +++ b/apps/webapp/app/v3/services/alerts/deliverAlert.server.ts @@ -158,7 +158,7 @@ export class DeliverAlertService extends BaseService { } } catch (error) { if (error instanceof SkipRetryError) { - logger.error("[DeliverAlert] Skipping retry", { + logger.warn("[DeliverAlert] Skipping retry", { reason: error.message, }); @@ -926,7 +926,7 @@ export class DeliverAlertService extends BaseService { }); if (!response.ok) { - logger.error("[DeliverAlert] Failed to send alert webhook", { + logger.info("[DeliverAlert] Failed to send alert webhook", { status: response.status, statusText: response.statusText, url: webhook.url, @@ -951,7 +951,7 @@ export class DeliverAlertService extends BaseService { return await client.chat.postMessage(message); } catch (error) { if (isWebAPIRateLimitedError(error)) { - logger.error("[DeliverAlert] Slack rate limited", { + logger.warn("[DeliverAlert] Slack rate limited", { error, message, }); @@ -960,7 +960,7 @@ export class DeliverAlertService extends BaseService { } if (isWebAPIHTTPError(error)) { - logger.error("[DeliverAlert] Slack HTTP error", { + logger.warn("[DeliverAlert] Slack HTTP error", { error, message, }); @@ -969,7 +969,7 @@ export class DeliverAlertService extends BaseService { } if (isWebAPIRequestError(error)) { - logger.error("[DeliverAlert] Slack request error", { + logger.warn("[DeliverAlert] Slack request error", { error, message, }); @@ -978,7 +978,7 @@ export class DeliverAlertService extends BaseService { } if (isWebAPIPlatformError(error)) { - logger.error("[DeliverAlert] Slack platform error", { + logger.warn("[DeliverAlert] Slack platform error", { error, message, }); @@ -991,10 +991,19 @@ export class DeliverAlertService extends BaseService { throw new SkipRetryError("Slack invalid blocks"); } + if (error.data.error === "account_inactive") { + logger.info("[DeliverAlert] Slack account inactive, skipping retry", { + error, + message, + }); + + throw new SkipRetryError("Slack account inactive"); + } + throw new Error("Slack platform error"); } - logger.error("[DeliverAlert] Failed to send slack message", { + logger.warn("[DeliverAlert] Failed to send slack message", { error, message, }); diff --git a/apps/webapp/app/v3/services/cancelTaskRunV1.server.ts b/apps/webapp/app/v3/services/cancelTaskRunV1.server.ts index fa30d7fc7b..8c208265de 100644 --- a/apps/webapp/app/v3/services/cancelTaskRunV1.server.ts +++ b/apps/webapp/app/v3/services/cancelTaskRunV1.server.ts @@ -43,7 +43,7 @@ export class CancelTaskRunServiceV1 extends BaseService { // Make sure the task run is in a cancellable state if (!isCancellableRunStatus(taskRun.status)) { - logger.error("Task run is not in a cancellable state", { + logger.info("Task run is not in a cancellable state", { runId: taskRun.id, status: taskRun.status, }); diff --git a/apps/webapp/app/v3/services/createCheckpoint.server.ts b/apps/webapp/app/v3/services/createCheckpoint.server.ts index ec87d5bacd..761f3a5185 100644 --- a/apps/webapp/app/v3/services/createCheckpoint.server.ts +++ b/apps/webapp/app/v3/services/createCheckpoint.server.ts @@ -132,7 +132,7 @@ export class CreateCheckpointService extends BaseService { } if (childRun.dependency?.resumedAt) { - logger.error("CreateCheckpointService: Child run already resumed", { + logger.info("CreateCheckpointService: Child run already resumed", { childRun, params, }); @@ -168,7 +168,7 @@ export class CreateCheckpointService extends BaseService { } if (batchRun.resumedAt) { - logger.error("CreateCheckpointService: Batch already resumed", { + logger.info("CreateCheckpointService: Batch already resumed", { batchRun, params, }); diff --git a/apps/webapp/app/v3/services/finalizeDeployment.server.ts b/apps/webapp/app/v3/services/finalizeDeployment.server.ts index 6e9b0c1da3..175986ae04 100644 --- a/apps/webapp/app/v3/services/finalizeDeployment.server.ts +++ b/apps/webapp/app/v3/services/finalizeDeployment.server.ts @@ -9,6 +9,8 @@ import { ChangeCurrentDeploymentService } from "./changeCurrentDeployment.server import { projectPubSub } from "./projectPubSub.server"; import { FailDeploymentService } from "./failDeployment.server"; import { TimeoutDeploymentService } from "./timeoutDeployment.server"; +import { engine } from "../runEngine.server"; +import { tryCatch } from "@trigger.dev/core"; export class FinalizeDeploymentService extends BaseService { public async call( @@ -116,6 +118,18 @@ export class FinalizeDeploymentService extends BaseService { }); } + if (deployment.worker.engine === "V2") { + const [schedulePendingVersionsError] = await tryCatch( + engine.scheduleEnqueueRunsForBackgroundWorker(deployment.worker.id) + ); + + if (schedulePendingVersionsError) { + logger.error("Error scheduling pending versions", { + error: schedulePendingVersionsError, + }); + } + } + await PerformDeploymentAlertsService.enqueue(deployment.id); return finalizedDeployment; diff --git a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts index 796a10a3b0..ab51df5de6 100644 --- a/apps/webapp/app/v3/services/finalizeTaskRun.server.ts +++ b/apps/webapp/app/v3/services/finalizeTaskRun.server.ts @@ -150,7 +150,7 @@ export class FinalizeTaskRunService extends BaseService { } if (isFatalRunStatus(run.status)) { - logger.error("FinalizeTaskRunService: Fatal status", { runId: run.id, status: run.status }); + logger.warn("FinalizeTaskRunService: Fatal status", { runId: run.id, status: run.status }); const extendedRun = await this._prisma.taskRun.findFirst({ where: { id: run.id }, @@ -170,7 +170,7 @@ export class FinalizeTaskRunService extends BaseService { }); if (extendedRun && extendedRun.runtimeEnvironment.type !== "DEVELOPMENT") { - logger.error("FinalizeTaskRunService: Fatal status, requesting worker exit", { + logger.warn("FinalizeTaskRunService: Fatal status, requesting worker exit", { runId: run.id, status: run.status, }); @@ -305,9 +305,10 @@ export class FinalizeTaskRunService extends BaseService { }); if (!run.lockedById) { - logger.error( + // This happens when a run is expired or was cancelled before an attempt, it's not a problem + logger.info( "FinalizeTaskRunService: No lockedById, so can't get the BackgroundWorkerTask. Not creating an attempt.", - { runId: run.id } + { runId: run.id, status: run.status } ); return; } diff --git a/apps/webapp/app/v3/services/replayTaskRun.server.ts b/apps/webapp/app/v3/services/replayTaskRun.server.ts index 104baa90b2..71b1028bc1 100644 --- a/apps/webapp/app/v3/services/replayTaskRun.server.ts +++ b/apps/webapp/app/v3/services/replayTaskRun.server.ts @@ -58,11 +58,12 @@ export class ReplayTaskRunService extends BaseService { const payloadType = payloadPacket.dataType; const metadata = overrideOptions.metadata ?? (await this.getExistingMetadata(existingTaskRun)); const tags = overrideOptions.tags ?? existingTaskRun.runTags; - // Only use the region from the existing task if neither environment is a development environment - const region = - existingEnvironment.type === "DEVELOPMENT" || authenticatedEnvironment.type === "DEVELOPMENT" - ? undefined - : existingTaskRun.workerQueue; + // Only use the region from the existing run if V2 engine and neither environment is dev + const ignoreRegion = + existingTaskRun.engine === "V1" || + existingEnvironment.type === "DEVELOPMENT" || + authenticatedEnvironment.type === "DEVELOPMENT"; + const region = ignoreRegion ? undefined : existingTaskRun.workerQueue; try { const taskQueue = await this._prisma.taskQueue.findFirst({ diff --git a/apps/webapp/app/v3/taskEventStore.server.ts b/apps/webapp/app/v3/taskEventStore.server.ts index 269aab84b4..27fc498112 100644 --- a/apps/webapp/app/v3/taskEventStore.server.ts +++ b/apps/webapp/app/v3/taskEventStore.server.ts @@ -23,6 +23,32 @@ export type TraceEvent = Pick< | "kind" >; +export type DetailedTraceEvent = Pick< + TaskEvent, + | "spanId" + | "parentId" + | "runId" + | "idempotencyKey" + | "message" + | "style" + | "startTime" + | "duration" + | "isError" + | "isPartial" + | "isCancelled" + | "level" + | "events" + | "environmentType" + | "kind" + | "taskSlug" + | "taskPath" + | "workerVersion" + | "queueName" + | "machinePreset" + | "properties" + | "output" +>; + export type TaskEventStoreTable = "taskEvent" | "taskEventPartitioned"; export function getTaskEventStoreTableForRun(run: { @@ -207,4 +233,95 @@ export class TaskEventStore { `; } } + + async findDetailedTraceEvents( + table: TaskEventStoreTable, + traceId: string, + startCreatedAt: Date, + endCreatedAt?: Date, + options?: { includeDebugLogs?: boolean } + ) { + const filterDebug = + options?.includeDebugLogs === false || options?.includeDebugLogs === undefined; + + if (table === "taskEventPartitioned") { + const createdAtBufferInMillis = env.TASK_EVENT_PARTITIONED_WINDOW_IN_SECONDS * 1000; + const startCreatedAtWithBuffer = new Date(startCreatedAt.getTime() - createdAtBufferInMillis); + const $endCreatedAt = endCreatedAt ?? new Date(); + const endCreatedAtWithBuffer = new Date($endCreatedAt.getTime() + createdAtBufferInMillis); + + return await this.readReplica.$queryRaw` + SELECT + "spanId", + "parentId", + "runId", + "idempotencyKey", + message, + style, + "startTime", + duration, + "isError", + "isPartial", + "isCancelled", + level, + events, + "environmentType", + "kind", + "taskSlug", + "taskPath", + "workerVersion", + "queueName", + "machinePreset", + properties, + output + FROM "TaskEventPartitioned" + WHERE + "traceId" = ${traceId} + AND "createdAt" >= ${startCreatedAtWithBuffer.toISOString()}::timestamp + AND "createdAt" < ${endCreatedAtWithBuffer.toISOString()}::timestamp + ${ + filterDebug + ? Prisma.sql`AND \"kind\" <> CAST('LOG'::text AS "public"."TaskEventKind")` + : Prisma.empty + } + ORDER BY "startTime" ASC + LIMIT ${env.MAXIMUM_TRACE_DETAILED_SUMMARY_VIEW_COUNT} + `; + } else { + return await this.readReplica.$queryRaw` + SELECT + "spanId", + "parentId", + "runId", + "idempotencyKey", + message, + style, + "startTime", + duration, + "isError", + "isPartial", + "isCancelled", + level, + events, + "environmentType", + "kind", + "taskSlug", + "taskPath", + "workerVersion", + "queueName", + "machinePreset", + properties, + output + FROM "TaskEvent" + WHERE "traceId" = ${traceId} + ${ + filterDebug + ? Prisma.sql`AND \"kind\" <> CAST('LOG'::text AS "public"."TaskEventKind")` + : Prisma.empty + } + ORDER BY "startTime" ASC + LIMIT ${env.MAXIMUM_TRACE_DETAILED_SUMMARY_VIEW_COUNT} + `; + } + } } diff --git a/docs/config/config-file.mdx b/docs/config/config-file.mdx index 8389da1d02..d11704d303 100644 --- a/docs/config/config-file.mdx +++ b/docs/config/config-file.mdx @@ -75,6 +75,20 @@ export default defineConfig({ }); ``` +## Custom tsconfig path + +You can specify a custom path to your tsconfig file. This is useful if you have a custom tsconfig file that you want to use. + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + dirs: ["./trigger"], + tsconfig: "./custom-tsconfig.json", // Custom tsconfig path +}); +``` + ## Lifecycle functions You can add lifecycle functions to get notified when any task starts, succeeds, or fails using `onStart`, `onSuccess` and `onFailure`: @@ -277,6 +291,21 @@ export default defineConfig({ The `logLevel` only determines which logs are sent to the Trigger.dev instance when using the `logger` API. All `console` based logs are always sent. +## Console logging + +You can control console logging behavior in development: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + // Your other config settings... + enableConsoleLogging: true, // Enable console logging while running dev CLI + disableConsoleInterceptor: false, // Disable console interceptor (prevents logs from being sent to the trigger.dev dashboard) +}); +``` + ## Max duration You can set the default `maxDuration` for all tasks in your project: @@ -293,6 +322,71 @@ export default defineConfig({ See our [maxDuration guide](/runs/max-duration) for more information. +## Process keep alive + +Keep the process alive after the task has finished running so the next task doesn't have to wait for the process to start up again. + +Note that the process could be killed at any time, and we don't make any guarantees about the process being alive for a certain amount of time + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + // Your other config settings... + processKeepAlive: true, +}); +``` + +You can pass an object to the `processKeepAlive` option to configure the behavior: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + // Your other config settings... + processKeepAlive: { + enabled: true, + // The maximum number of executions per process. If the process has run more than this number of times, it will be killed. + maxExecutionsPerProcess: 50, // Default: 50 + // The maximum number of concurrent processes to keep alive in dev. + devMaxPoolSize: 25, // Default: 25 + }, +}); +``` + +## Development behavior + +You can control the working directory behavior in development: + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + // Your other config settings... + legacyDevProcessCwdBehaviour: false, // Default: true +}); +``` + +When set to `false`, the current working directory will be set to the build directory, which more closely matches production behavior. + +## CA certificates + +CA Cert file to be added to NODE_EXTRA_CA_CERT environment variable, useful in use with self signed cert in the trigger.dev environment. + +```ts trigger.config.ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", + // Your other config settings... + // Must start with "./" and be relative to project root + extraCACerts: "./certs/ca.crt", +}); +``` + ## Build configuration You can customize the build process using the `build` option: @@ -306,6 +400,12 @@ export default defineConfig({ build: { // Don't bundle these packages external: ["header-generator"], + // Automatically detect external dependencies (default: true) + autoDetectExternal: true, + // Keep function/class names in bundle (default: true) + keepNames: true, + // Minify generated code (default: false, experimental) + minify: false, }, }); ``` diff --git a/docs/config/extensions/lightpanda.mdx b/docs/config/extensions/lightpanda.mdx index 0408d45ad5..1e1efc8f63 100644 --- a/docs/config/extensions/lightpanda.mdx +++ b/docs/config/extensions/lightpanda.mdx @@ -5,10 +5,6 @@ description: "Use the lightpanda build extension to add Lightpanda browser to yo tag: "v4" --- -import UpgradeToV4Note from "/snippets/upgrade-to-v4-note.mdx"; - - - To use the Lightpanda browser in your project, add the extension to your `trigger.config.ts` file: ```ts trigger.config.ts diff --git a/docs/docs.json b/docs/docs.json index 8a9c6ebdc5..5cd39f727e 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -165,6 +165,10 @@ } ] }, + { + "group": "MCP Server", + "pages": ["mcp-introduction", "mcp-tools", "mcp-agent-rules"] + }, { "group": "Using the Dashboard", "pages": ["run-tests", "troubleshooting-alerts", "replaying", "bulk-actions"] @@ -298,7 +302,6 @@ "guides/ai-agents/verify-news-article" ] }, - "guides/cursor-rules", "guides/frameworks/drizzle", "guides/frameworks/prisma", "guides/frameworks/sequin", @@ -620,6 +623,14 @@ "source": "/management/projects/runs", "destination": "/management/overview" }, + { + "source": "/guides/cursor-rules", + "destination": "/mcp-agent-rules" + }, + { + "source": "/agents/rules/overview", + "destination": "/mcp-agent-rules" + }, { "source": "/upgrade-to-v4", "destination": "/migrating-from-v3" diff --git a/docs/guides/examples/lightpanda.mdx b/docs/guides/examples/lightpanda.mdx index 9eab531176..d2a431624e 100644 --- a/docs/guides/examples/lightpanda.mdx +++ b/docs/guides/examples/lightpanda.mdx @@ -6,9 +6,6 @@ tag: "v4" --- import ScrapingWarning from "/snippets/web-scraping-warning.mdx"; -import UpgradeToV4Note from "/snippets/upgrade-to-v4-note.mdx"; - - ## Overview diff --git a/docs/guides/frameworks/bun.mdx b/docs/guides/frameworks/bun.mdx index 0d369fbf98..e5f4ab1cd0 100644 --- a/docs/guides/frameworks/bun.mdx +++ b/docs/guides/frameworks/bun.mdx @@ -1,7 +1,7 @@ --- title: "Bun guide" sidebarTitle: "Bun" -description: "This guide will show you how to setup Trigger.dev with Bun" +description: "This guide will show you how to setup Trigger.dev in your existing Bun project, test an example task, and view the run." icon: "js" --- @@ -9,17 +9,17 @@ import Prerequisites from "/snippets/framework-prerequisites.mdx"; import CliRunTestStep from "/snippets/step-run-test.mdx"; import CliViewRunStep from "/snippets/step-view-run.mdx"; -A specific Bun version is currently required for the dev command to work. This is due to a [bug](https://github.com/oven-sh/bun/issues/13799) with IPC. Please use Bun version 1.1.24 or lower: `curl -fsSL https://bun.sh/install | bash -s -- bun-v1.1.24` - -We now have experimental support for Bun. This guide will show you have to setup Trigger.dev in your existing Bun project, test an example task, and view the run. - - - The trigger.dev CLI does not yet support Bun. So you will need to run the CLI using Node.js. But + + The trigger.dev CLI does not yet support Bun. So you will need to run the CLI using Node.js. Bun will still be used to execute your tasks, even in the `dev` environment. - + +## Known issues + +- Certain OpenTelemetry instrumentation will not work with Bun, because Bun does not support Node's `register` hook. This means that some libraries that rely on this hook will not work with Bun. + ## Initial setup @@ -109,7 +109,3 @@ yarn dlx trigger.dev@latest dev - -## Known issues - -- Certain OpenTelemetry instrumentation will not work with Bun, because Bun does not support Node's `register` hook. This means that some libraries that rely on this hook will not work with Bun. diff --git a/docs/guides/frameworks/nextjs.mdx b/docs/guides/frameworks/nextjs.mdx index 78a54b7c21..d8b35cbe64 100644 --- a/docs/guides/frameworks/nextjs.mdx +++ b/docs/guides/frameworks/nextjs.mdx @@ -14,7 +14,6 @@ import UsefulNextSteps from "/snippets/useful-next-steps.mdx"; import TriggerTaskNextjs from "/snippets/trigger-tasks-nextjs.mdx"; import NextjsTroubleshootingMissingApiKey from "/snippets/nextjs-missing-api-key.mdx"; import NextjsTroubleshootingButtonSyntax from "/snippets/nextjs-button-syntax.mdx"; -import WorkerFailedToStartWhenRunningDevCommand from "/snippets/worker-failed-to-start.mdx"; import AddEnvironmentVariables from "/snippets/add-environment-variables.mdx"; import DeployingYourTask from "/snippets/deplopying-your-task.mdx"; import VercelDocsCards from "/snippets/vercel-docs-cards.mdx"; @@ -432,7 +431,6 @@ You can test your revalidation task in the Trigger.dev dashboard on the testing - diff --git a/docs/guides/frameworks/supabase-edge-functions-basic.mdx b/docs/guides/frameworks/supabase-edge-functions-basic.mdx index f848f05000..db050292ff 100644 --- a/docs/guides/frameworks/supabase-edge-functions-basic.mdx +++ b/docs/guides/frameworks/supabase-edge-functions-basic.mdx @@ -14,7 +14,6 @@ import UsefulNextSteps from "/snippets/useful-next-steps.mdx"; import TriggerTaskNextjs from "/snippets/trigger-tasks-nextjs.mdx"; import NextjsTroubleshootingMissingApiKey from "/snippets/nextjs-missing-api-key.mdx"; import NextjsTroubleshootingButtonSyntax from "/snippets/nextjs-button-syntax.mdx"; -import WorkerFailedToStartWhenRunningDevCommand from "/snippets/worker-failed-to-start.mdx"; import SupabaseDocsCards from "/snippets/supabase-docs-cards.mdx"; import SupabaseAuthInfo from "/snippets/supabase-auth-info.mdx"; diff --git a/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx b/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx index 89f31b4b53..81b04cae6d 100644 --- a/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx +++ b/docs/guides/frameworks/supabase-edge-functions-database-webhooks.mdx @@ -10,7 +10,6 @@ import UsefulNextSteps from "/snippets/useful-next-steps.mdx"; import TriggerTaskNextjs from "/snippets/trigger-tasks-nextjs.mdx"; import NextjsTroubleshootingMissingApiKey from "/snippets/nextjs-missing-api-key.mdx"; import NextjsTroubleshootingButtonSyntax from "/snippets/nextjs-button-syntax.mdx"; -import WorkerFailedToStartWhenRunningDevCommand from "/snippets/worker-failed-to-start.mdx"; import SupabaseDocsCards from "/snippets/supabase-docs-cards.mdx"; import SupabaseAuthInfo from "/snippets/supabase-auth-info.mdx"; diff --git a/docs/images/claude-code-subagent.png b/docs/images/claude-code-subagent.png new file mode 100644 index 0000000000..0adf0d5c3c Binary files /dev/null and b/docs/images/claude-code-subagent.png differ diff --git a/docs/mcp-agent-rules.mdx b/docs/mcp-agent-rules.mdx new file mode 100644 index 0000000000..664d8bcf29 --- /dev/null +++ b/docs/mcp-agent-rules.mdx @@ -0,0 +1,118 @@ +--- +title: "Agent rules" +sidebarTitle: "Agent rules" +description: "Learn how to use the Trigger.dev agent rules with the MCP server" +tag: "new" +--- + +## What are Trigger.dev agent rules? + +Trigger.dev agent rules are comprehensive instruction sets that guide AI assistants to write optimal Trigger.dev code. These rules ensure your AI assistant understands best practices, current APIs, and recommended patterns when working with Trigger.dev projects. + +## Installation + +Install the agent rules with the following command: + +```bash +npx trigger.dev@latest install-rules +``` + +## Available rule sets + +We provide five specialized rule sets, each optimized for different aspects of Trigger.dev development: + +| Rule set | Tokens | Description | GitHub | +|:---------|:-------|:------------|:------------| +| **Basic tasks** | 1,200 | Essential rules for writing basic Trigger.dev tasks and fundamental patterns | [View](https://github.com/triggerdotdev/trigger.dev/blob/main/rules/4.0.0/basic-tasks.md) | +| **Advanced tasks** | 3,000 | Comprehensive rules for complex workflows, error handling, and advanced task patterns | [View](https://github.com/triggerdotdev/trigger.dev/blob/main/rules/4.0.0/advanced-tasks.md) | +| **Scheduled tasks** | 780 | Specialized guidance for cron jobs, scheduled workflows, and time-based triggers | [View](https://github.com/triggerdotdev/trigger.dev/blob/main/rules/4.0.0/scheduled-tasks.md) | +| **Configuration** | 1,900 | Complete guide for trigger.config.ts setup, environment configuration, and project structure | [View](https://github.com/triggerdotdev/trigger.dev/blob/main/rules/4.0.0/config.md) | +| **Realtime** | 1,700 | Using Trigger.dev Realtime features and frontend integration patterns | [View](https://github.com/triggerdotdev/trigger.dev/blob/main/rules/4.0.0/realtime.md) | + +## Claude Code subagent + +For Claude Code users, we provide a subagent called `trigger-dev-expert` that's an expert at writing well-structured Trigger.dev code. + +### Installation + +The subagent is available as an option when running the rules installation command. Select "Claude Code" as your client and choose to include the subagent when prompted. + +![Claude Code subagent installation](/images/claude-code-subagent.png) + +### Usage + +Activate the subagent in your prompts by requesting it explicitly: + +```markdown +use the trigger-dev-expert subagent to create a trigger.dev job that accepts a video url, processes it with ffmpeg to extract the audio, runs the audio through a text-to-speech API like openai, and then uploads both the transcription and the audio to s3 +``` + +The subagent works best when combined with the appropriate rule sets installed alongside it, providing both high-level architectural guidance and detailed implementation knowledge. + +## Supported AI clients + +The Trigger.dev rules work across a wide range of AI coding assistants and editors: + +| Client | Rule activation | Docs | +|:--------|:----------------|:--------------| +| **Cursor** | Automatic when working in trigger directories | [Link](https://docs.cursor.com/en/context/rules#rules/) | +| **Claude Code** | Context-aware activation + custom subagent | [Link](https://docs.anthropic.com/en/docs/claude-code) | +| **VSCode Copilot** | Integration with GitHub Copilot chat | [Link](https://code.visualstudio.com/docs/copilot/overview) | +| **Windsurf** | Automatic activation in Trigger.dev projects | [Link](https://docs.windsurf.com/windsurf/cascade/memories#rules) | +| **Gemini CLI** | Command-line integration | [Link](https://ai.google.dev/gemini-api/docs) | +| **Cline** | Automatic context detection | [Link](https://github.com/cline/cline) | +| **Sourcegraph AMP** | Code intelligence integration | [Link](https://sourcegraph.com/docs) | +| **Kilo** | Custom rule integration | [Link](https://kilocode.ai/docs/advanced-usage/custom-rules) | +| **Ruler** | Rule management | [Link](https://github.com/intellectronica/ruler) | +| **AGENTS.md** | Universal format for OpenAI Codex, Jules, OpenCode, etc. | | + +### Rule activation behavior + +Different AI tools handle rules differently: + +- **Automatic Activation**: Cursor, Windsurf, VSCode Copilot, and Cline automatically apply relevant rules when working in Trigger.dev projects or when `trigger.config.ts` is detected +- **Context-Aware**: Claude Code intelligently applies rules based on the current context and file types +- **Manual Integration**: AGENTS.md clients and others append rules to configuration files for manual activation + +## Keeping rules updated + +Trigger.dev rules are regularly updated to reflect new features, API changes, and best practices. The CLI includes automatic update detection. + +### Automatic update notifications + +When running `npx trigger.dev@latest dev`, you'll receive notifications when newer rule versions are available with a simple update command. + +### Manual updates + +Update rules anytime with: + +```bash +npx trigger.dev@latest install-rules +``` + +The update process replaces existing rules without creating duplicates, keeping your configuration files clean and organized. + +### Why updates matter + +- **Current API patterns**: Access the latest Trigger.dev APIs and features +- **Performance optimizations**: Benefit from improved patterns and practices +- **Deprecated pattern avoidance**: Prevent AI assistants from generating outdated code +- **New feature support**: Immediate access to newly released capabilities + +## Getting started + +1. Install the rules: + +```bash +npx trigger.dev@latest install-rules +``` + +2. Follow the prompts to install the rules for your AI client. + +3. Consider installing the `trigger-dev-expert` subagent if using Claude Code. + +## Next steps + +- [Install the MCP server](/mcp-introduction) for complete Trigger.dev integration +- [Explore MCP tools](/mcp-tools) for project management and task execution + diff --git a/docs/mcp-introduction.mdx b/docs/mcp-introduction.mdx new file mode 100644 index 0000000000..d9dc3474e1 --- /dev/null +++ b/docs/mcp-introduction.mdx @@ -0,0 +1,187 @@ +--- +title: "MCP Introduction" +sidebarTitle: "Introduction" +description: "Learn how to install and configure the Trigger.dev MCP Server" +tag: "new" +--- + +## What is the Trigger.dev MCP Server? + +The Trigger.dev MCP (Model Context Protocol) Server enables AI assistants to interact directly with your Trigger.dev projects. It provides a comprehensive set of tools to: + +- Search Trigger.dev documentation +- Initialize new Trigger.dev projects +- List and manage your projects and organizations +- Get task information and trigger task runs +- Deploy projects to different environments +- Monitor run details and list runs with filtering options + +## Installation + +### Automatic Installation (Recommended) + +The easiest way to install the Trigger.dev MCP Server is using the interactive installation wizard: + +```bash +npx trigger.dev@latest install-mcp +``` + +This command will guide you through: + +1. Selecting which MCP clients to configure +2. Choosing installation scope (user, project, or local) +3. Automatically configuring the selected clients + +## Command Line Options + +The `install-mcp` command supports the following options: + +### Core Options + +- `-p, --project-ref ` - Scope the MCP server to a specific Trigger.dev project by providing its project ref +- `-t, --tag ` - The version of the trigger.dev CLI package to use for the MCP server (default: latest or v4-beta) +- `--dev-only` - Restrict the MCP server to the dev environment only +- `--yolo` - Install the MCP server into all supported clients automatically +- `--scope ` - Choose the scope of the MCP server: `user`, `project`, or `local` +- `--client ` - Choose specific client(s) to install into + +### Configuration Options + +- `--log-file ` - Configure the MCP server to write logs to a file +- `-a, --api-url ` - Configure a custom Trigger.dev API URL +- `-l, --log-level ` - Set CLI log level (debug, info, log, warn, error, none) + +## Authentication + +You can use the MCP server without authentication with the `search_docs` tool, but for any other tool call you will need to authenticate the MCP server via the same method as the [Trigger.dev CLI](/cli-login-commands).The first time you attempt to use a tool that requires authentication, you will be prompted to authenticate the MCP server via the MCP client. + +### Examples + +Install for all supported clients: + +```bash +npx trigger.dev@latest install-mcp --yolo +``` + +Install for specific clients: + +```bash +npx trigger.dev@latest install-mcp --client claude-code cursor --scope user +``` + +Install with development environment restriction: + +```bash +npx trigger.dev@latest install-mcp --dev-only --project-ref proj_abc123 +``` + +## Supported MCP Clients + +The Trigger.dev MCP Server supports the following clients: + +| Client | Scope Options | Configuration File | Documentation | +| -------------------- | -------------------- | --------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ | +| **Claude Code** | user, project, local | `~/.claude.json` or `./.mcp.json` (project/local scope) | [Claude Code MCP Docs](https://docs.anthropic.com/en/docs/claude-code/mcp) | +| **Cursor** | user, project | `~/.cursor/mcp.json` (user) or `./.cursor/mcp.json` (project) | [Cursor MCP Docs](https://docs.cursor.com/features/mcp) | +| **VSCode** | user, project | `~/Library/Application Support/Code/User/mcp.json` (user) or `./.vscode/mcp.json` (project) | [VSCode MCP Docs](https://code.visualstudio.com/docs/copilot/chat/mcp-servers) | +| **Zed** | user | `~/.config/zed/settings.json` | [Zed Context Servers Docs](https://zed.dev/docs/context-servers) | +| **Windsurf** | user | `~/.codeium/windsurf/mcp_config.json` | [Windsurf MCP Docs](https://docs.codeium.com/windsurf/mcp) | +| **Gemini CLI** | user, project | `~/.gemini/settings.json` (user) or `./.gemini/settings.json` (project) | [Gemini CLI MCP Tutorial](https://medium.com/@joe.njenga/gemini-cli-mcp-tutorial-setup-commands-practical-use-step-by-step-example-b57f55db5f4a) | +| **Charm Crush** | user, project, local | `~/.config/crush/crush.json` (user), `./crush.json` (project), or `./.crush.json` (local) | [Charm MCP Docs](https://github.com/charmbracelet/mcp) | +| **Cline** | user | `~/Library/Application Support/Code/User/globalStorage/saoudrizwan.claude-dev/settings/cline_mcp_settings.json` | [Cline MCP Docs](https://github.com/saoudrizwan/claude-dev#mcp) | +| **OpenAI Codex CLI** | user | `~/.codex/config.toml` | See OpenAI Codex CLI documentation for MCP configuration | +| **Sourcegraph AMP** | user | `~/.config/amp/settings.json` | [Sourcegraph AMP MCP Docs](https://docs.sourcegraph.com/amp/mcp) | +| **opencode** | user, project | `~/.config/opencode/opencode.json` (user) or `./opencode.json` (project) | [opencode MCP Docs](https://opencode.ai/docs/mcp-servers/) | + +## Manual Configuration + +If your client isn't directly supported by the installer, you can configure it manually. The MCP server uses the following configuration: + +**Server Name:** `trigger` + +**Command:** `npx` + +**Arguments:** `["trigger.dev@latest", "mcp"]` + +### Example JSON Configuration + +```json +{ + "mcpServers": { + "trigger": { + "command": "npx", + "args": ["trigger.dev@latest", "mcp"] + } + } +} +``` + +### Example TOML Configuration (for Codex CLI) + +```toml +[mcp_servers.trigger] +command = "npx" +args = ["trigger.dev@latest", "mcp"] +``` + +### Additional Options + +You can add these optional arguments to customize the server behavior: + +- `--log-file ` - Log to a specific file +- `--api-url ` - Use a custom Trigger.dev API URL +- `--dev-only` - Restrict to dev environment only +- `--project-ref ` - Scope to a specific project + +## Environment-Specific Configuration + +### Development Only + +To restrict the MCP server to only work with the development environment: + +```json +{ + "mcpServers": { + "trigger": { + "command": "npx", + "args": ["trigger.dev@latest", "mcp", "--dev-only"] + } + } +} +``` + +### Project-Scoped + +To scope the server to a specific project: + +```json +{ + "mcpServers": { + "trigger": { + "command": "npx", + "args": ["trigger.dev@latest", "mcp", "--project-ref", "proj_your_project_ref"] + } + } +} +``` + +## Verification + +After installation, restart your MCP client and look for a server named "trigger". The server should connect automatically and provide access to all Trigger.dev tools. + +## Getting Started + +Once installed, you can start using the MCP server by asking your AI assistant questions like: + +- `"Search the trigger docs for a ffmpeg example"` +- `"Initialize trigger.dev in my project"` +- `"Get all tasks in my project"` +- `"Trigger my foobar task with a sample payload"` +- `"Get the details of the latest run for my foobar task"` +- `"List all runs for my foobar task"` +- `"Deploy my project to staging"` +- `"Deploy my project to production"` + +## Next Steps + +- [Explore available MCP tools](/mcp-tools) diff --git a/docs/mcp-tools.mdx b/docs/mcp-tools.mdx new file mode 100644 index 0000000000..0163de97a4 --- /dev/null +++ b/docs/mcp-tools.mdx @@ -0,0 +1,527 @@ +--- +title: "MCP Tools" +sidebarTitle: "Tools" +description: "Learn about the tools available in the Trigger.dev MCP Server" +tag: "new" +--- + +The Trigger.dev MCP Server provides a comprehensive set of tools that enable AI assistants to interact with your Trigger.dev projects. These tools cover everything from project management to task execution and monitoring. + +## Documentation and Search Tools + +### search_docs + +Search across the Trigger.dev documentation to find relevant information, code examples, API references, and guides. + + + The search query to find information in the Trigger.dev documentation + + +**Usage Examples:** + +- "How do I create a scheduled task?" +- "webhook examples" +- "deployment configuration" +- "error handling patterns" + + +```json Example Usage +{ + "tool": "search_docs", + "arguments": { + "query": "webhook examples" + } +} +``` + + +## Project Management Tools + +### list_projects + +List all projects in your Trigger.dev account. + +**No parameters required** + + + Array of project objects containing project details, IDs, and metadata + + + +```json Example Response +{ + "projects": [ + { + "id": "proj_abc123", + "name": "My App", + "slug": "my-app", + "organizationId": "org_xyz789" + } + ] +} +``` + + +### list_orgs + +List all organizations you have access to. + +**No parameters required** + + + Array of organization objects containing organization details and metadata + + +### create_project_in_org + +Create a new project in an organization. + + + The organization to create the project in, can either be the organization slug or the ID. Use the + `list_orgs` tool to get a list of organizations and ask the user to select one. + + + + The name of the project to create + + + +```json Example Usage +{ + "tool": "create_project_in_org", + "arguments": { + "orgParam": "my-org", + "name": "New Project" + } +} +``` + + +### initialize_project + +Initialize Trigger.dev in your project with automatic setup and configuration. + + + The organization to create the project in, can either be the organization slug or the ID. Use the + `list_orgs` tool to get a list of organizations and ask the user to select one. + + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The name of the project to create. If projectRef is not provided, we will use this name to create + a new project in the organization you select. + + + + The current working directory of the project + + +## Task Management Tools + +### get_tasks + +Get all tasks in a project. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). If not provided, we will try to find the config file in the + current working directory. + + + + The environment to get tasks for. Options: `dev`, `staging`, `prod`, `preview` + + + + The branch to get tasks for, only used for preview environments + + + +```json Example Usage +{ + "tool": "get_tasks", + "arguments": { + "projectRef": "proj_abc123", + "environment": "dev" + } +} +``` + + +### trigger_task + +Trigger a task to run. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to trigger the task in. Options: `dev`, `staging`, `prod`, `preview` + + + + The branch to trigger the task in, only used for preview environments + + + + The ID/slug of the task to trigger. Use the `get_tasks` tool to get a list of tasks and ask the + user to select one if it's not clear which one to use. + + + + The payload to trigger the task with, must be a valid JSON string + + + + Additional options for the task run + + + The name of the queue to trigger the task in, by default will use the queue configured in the + task + + + The delay before the task run is executed + + + The idempotency key to use for the task run + + + The machine preset to use for the task run. Options: `micro`, `small-1x`, `small-2x`, + `medium-1x`, `medium-2x`, `large-1x`, `large-2x` + + + The maximum number of attempts to retry the task run + + + The maximum duration in seconds of the task run + + + Tags to add to the task run. Must be less than 128 characters and cannot have more than 5 + + + The time to live of the task run. If the run doesn't start executing within this time, it will + be automatically cancelled. + + + + + +```json Example Usage +{ + "tool": "trigger_task", + "arguments": { + "projectRef": "proj_abc123", + "taskId": "email-notification", + "payload": "{\"email\": \"user@example.com\", \"subject\": \"Hello World\"}", + "options": { + "tags": ["urgent"], + "maxAttempts": 3 + } + } +} +``` + + +## Run Monitoring Tools + +### get_run_details + +Get the details of a specific task run. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to get the run details from. Options: `dev`, `staging`, `prod`, `preview` + + + + The branch to get the run details from, only used for preview environments + + + + The ID of the run to get the details of, starts with `run_` + + + + Enable debug mode to get more detailed information about the run, including the entire trace (all logs and spans for the run and any child run). Set this to true if prompted to debug a run. + + +### cancel_run + +Cancel a running task. + + + The ID of the run to cancel, starts with `run_` + + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to cancel the run in. Options: `dev`, `staging`, `prod`, `preview` + + + + The branch to cancel the run in, only used for preview environments + + + +```json Example Usage +{ + "tool": "cancel_run", + "arguments": { + "runId": "run_abc123", + "projectRef": "proj_abc123" + } +} +``` + + +### list_runs + +List all runs for a project with comprehensive filtering options. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to list runs from. Options: `dev`, `staging`, `prod`, `preview` + + + + The branch to list runs from, only used for preview environments + + + + The cursor to use for pagination, starts with `run_` + + + + The number of runs to list in a single page. Up to 100 + + + + Filter for runs with this run status. Options: `PENDING_VERSION`, `QUEUED`, `DEQUEUED`, + `EXECUTING`, `WAITING`, `COMPLETED`, `CANCELED`, `FAILED`, `CRASHED`, `SYSTEM_FAILURE`, `DELAYED`, + `EXPIRED`, `TIMED_OUT` + + + + Filter for runs that match this task identifier + + + + Filter for runs that match this version, e.g. `20250808.3` + + + + Filter for runs that include this tag + + + + Filter for runs created after this ISO 8601 timestamp + + + + Filter for runs created before this ISO 8601 timestamp + + + + Filter for runs created in the last N time period. Examples: `7d`, `30d`, `365d` + + + + Filter for runs that match this machine preset. Options: `micro`, `small-1x`, `small-2x`, + `medium-1x`, `medium-2x`, `large-1x`, `large-2x` + + + +```json Example Usage +{ + "tool": "list_runs", + "arguments": { + "projectRef": "proj_abc123", + "status": "COMPLETED", + "limit": 10, + "period": "7d" + } +} +``` + + +## Deployment Tools + +### deploy + +Deploy a project to staging or production environments. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to deploy to. Options: `staging`, `prod`, `preview` + + + + The branch to deploy, only used for preview environments + + + + Skip promoting the deployment to the current deployment for the environment + + + + Skip syncing environment variables when using the syncEnvVars extension + + + + Skip checking for @trigger.dev package updates + + + +```json Example Usage +{ + "tool": "deploy", + "arguments": { + "projectRef": "proj_abc123", + "environment": "prod", + "skipUpdateCheck": true + } +} +``` + + +### list_deployments + +List deployments for a project with comprehensive filtering options. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). + + + + The environment to list deployments for. Options: `staging`, `prod`, `preview` + + + + The branch to list deployments from, only used for preview environments + + + + The deployment ID to start the search from, to get the next page + + + + The number of deployments to return, defaults to 20 (max 100) + + + + Filter deployments that are in this status. Options: `PENDING`, `BUILDING`, `DEPLOYING`, `DEPLOYED`, `FAILED`, `CANCELED`, `TIMED_OUT` + + + + The date to start the search from, in ISO 8601 format + + + + The date to end the search, in ISO 8601 format + + + + The period to search within. Examples: `1d`, `7d`, `3h` + + + +```json Example Usage +{ + "tool": "list_deployments", + "arguments": { + "projectRef": "proj_abc123", + "environment": "prod", + "status": "DEPLOYED", + "limit": 10 + } +} +``` + + +### list_preview_branches + +List all preview branches in the project. + + + The trigger.dev project ref, starts with `proj_`. We will attempt to automatically detect the + project ref if running inside a directory that includes a trigger.config.ts file. + + + + The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the + root dir (like in a monorepo setup). If not provided, we will try to find the config file in the + current working directory. + + + +```json Example Usage +{ + "tool": "list_preview_branches", + "arguments": { + "projectRef": "proj_abc123" + } +} +``` + + + + The deploy tool and list_preview_branches tool are not available when the MCP server is running with the `--dev-only` flag. + diff --git a/docs/migrating-from-v3.mdx b/docs/migrating-from-v3.mdx index cb1b01ce1e..726b0721d5 100644 --- a/docs/migrating-from-v3.mdx +++ b/docs/migrating-from-v3.mdx @@ -210,6 +210,17 @@ await myTask.trigger({ foo: "bar" }); // Will use the queue defined on the task await myTask2.trigger({ foo: "bar" }); // Will use the queue defined on the task ``` +If you're using `concurrencyKey` you can specify the `queue` and `concurrencyKey` like this: + +```ts +const handle = await generatePullRequest.trigger(data, { + queue: "paid-users", + concurrencyKey: data.userId, +}); +``` + +For each unique value of `concurrencyKey`, a new queue will be created using the `concurrencyLimit` from the queue. This allows you to have a queue per user. + ### Lifecycle hooks We've changed the function signatures of the lifecycle hooks to be more consistent and easier to use, by unifying all the parameters into a single object that can be destructured. diff --git a/docs/queue-concurrency.mdx b/docs/queue-concurrency.mdx index cb85e2c829..fc4829757d 100644 --- a/docs/queue-concurrency.mdx +++ b/docs/queue-concurrency.mdx @@ -3,7 +3,9 @@ title: "Concurrency & Queues" description: "Configure what you want to happen when there is more than one run at a time." --- -When you trigger a task, it isn't executed immediately. Instead, the task [run](/runs) is placed into a queue for execution. By default, each task gets its own queue with unbounded concurrencyβ€”meaning the task runs as soon as resources are available, subject only to the overall concurrency limits of your environment. If you need more control (for example, to limit concurrency or share limits across multiple tasks), you can define a custom queue as described later in this document. +When you trigger a task, it isn't executed immediately. Instead, the task [run](/runs) is placed into a queue for execution. + +By default, each task gets its own queue and the concurrency is only limited by your environment concurrency limit. If you need more control (for example, to limit concurrency or share limits across multiple tasks), you can define a custom queue as described later. Controlling concurrency is useful when you have a task that can't be run concurrently, or when you want to limit the number of runs to avoid overloading a resource. @@ -11,13 +13,14 @@ It's important to note that only actively executing runs count towards concurren ## Default concurrency -By default, all tasks have an unbounded concurrency limit, limited only by the overall concurrency limits of your environment. This means that each task could possibly "fill up" the entire -concurrency limit of your environment. - -Each individual queue has a maximum concurrency limit equal to your environment's base concurrency limit. If you don't explicitly set a queue's concurrency limit, it will default to your environment's base concurrency limit. +By default, all tasks have an unbounded concurrency limit, limited only by the overall concurrency limits of your environment. - Your environment has a base concurrency limit and a burstable limit (default burst factor of 2.0x the base limit). Individual queues are limited by the base concurrency limit, not the burstable limit. For example, if your base limit is 10, your environment can burst up to 20 concurrent runs, but any single queue can have at most 10 concurrent runs. If you're a paying customer you can request higher limits by [contacting us](https://www.trigger.dev/contact). + Your environment has a base concurrency limit and a burstable limit (default burst factor of 2.0x + the base limit). Individual queues are limited by the base concurrency limit, not the burstable + limit. For example, if your base limit is 10, your environment can burst up to 20 concurrent runs, + but any single queue can have at most 10 concurrent runs. If you're a paying customer you can + request higher burst limits by [contacting us](https://www.trigger.dev/contact). ## Setting task concurrency @@ -68,13 +71,18 @@ export const task2 = task({ In this example, `task1` and `task2` share the same queue, so only one of them can run at a time. -## Setting the concurrency when you trigger a run +## Setting the queue when you trigger a run -When you trigger a task you can override the concurrency limit. This is really useful if you sometimes have high priority runs. +When you trigger a task you can override the default queue. This is really useful if you sometimes have high priority runs. -The task: +The task and queue definition: ```ts /trigger/override-concurrency.ts +const paidQueue = queue({ + name: "paid-users", + concurrencyLimit: 10, +}); + export const generatePullRequest = task({ id: "generate-pull-request", queue: { @@ -87,7 +95,7 @@ export const generatePullRequest = task({ }); ``` -Triggering from your backend and overriding the concurrency: +Triggering from your backend and overriding the queue: ```ts app/api/push/route.ts import { generatePullRequest } from "~/trigger/override-concurrency"; @@ -96,19 +104,15 @@ export async function POST(request: Request) { const data = await request.json(); if (data.branch === "main") { - //trigger the task, with a different queue + //trigger the task, with the paid users queue const handle = await generatePullRequest.trigger(data, { - queue: { - //the "main-branch" queue will have a concurrency limit of 10 - //this triggered run will use that queue - name: "main-branch", // Make sure to change the queue name or the task concurrency limit will be updated - concurrencyLimit: 10, - }, + // Set the paid users queue + queue: "paid-users", }); return Response.json(handle); } else { - //triggered with the default (concurrency of 1) + //triggered with the default queue (concurrency of 1) const handle = await generatePullRequest.trigger(data); return Response.json(handle); } @@ -119,7 +123,7 @@ export async function POST(request: Request) { If you're building an application where you want to run tasks for your users, you might want a separate queue for each of your users (or orgs, projects, etc.). -You can do this by using `concurrencyKey`. It creates a separate queue for each value of the key. +You can do this by using `concurrencyKey`. It creates a copy of the queue for each unique value of the key. Your backend code: @@ -130,26 +134,20 @@ export async function POST(request: Request) { const data = await request.json(); if (data.isFreeUser) { - //free users can only have 1 PR generated at a time + //the "free-users" queue has a concurrency limit of 1 const handle = await generatePullRequest.trigger(data, { - queue: { - //every free user gets a queue with a concurrency limit of 1 - name: "free-users", - concurrencyLimit: 1, - }, + queue: "free-users", + //this creates a free-users queue for each user concurrencyKey: data.userId, }); //return a success response with the handle return Response.json(handle); } else { - //trigger the task, with a different queue + //the "paid-users" queue has a concurrency limit of 10 const handle = await generatePullRequest.trigger(data, { - queue: { - //every paid user gets a queue with a concurrency limit of 10 - name: "paid-users", - concurrencyLimit: 10, - }, + queue: "paid-users", + //this creates a paid-users queue for each user concurrencyKey: data.userId, }); @@ -161,7 +159,7 @@ export async function POST(request: Request) { ## Concurrency and subtasks -When you trigger a task that has subtasks, the subtasks will not inherit the concurrency settings of the parent task. Unless otherwise specified, subtasks will run on their own queue +When you trigger a task that has subtasks, the subtasks will not inherit the queue from the parent task. Unless otherwise specified, subtasks will run on their own queue ```ts /trigger/subtasks.ts export const parentTask = task({ @@ -188,22 +186,19 @@ With our [task checkpoint system](/how-it-works#the-checkpoint-resume-system), t Concurrency is only released when a run reaches a waitpoint and is checkpointed. When a run is checkpointed, it transitions to the `WAITING` state and releases its concurrency slot back to both the queue and the environment, allowing other runs to execute or resume. This means that: + - Only actively executing runs count towards concurrency limits - Runs in the `WAITING` state (checkpointed at waitpoints) do not consume concurrency slots - You can have more runs in the `WAITING` state than your queue's concurrency limit - When a waiting run resumes (e.g., when a subtask completes), it must re-acquire a concurrency slot For example, if you have a queue with a `concurrencyLimit` of 1: + - You can only have exactly 1 run executing at a time - You may have multiple runs in the `WAITING` state that belong to that queue - When the executing run reaches a waitpoint and checkpoints, it releases its slot - The next queued run can then begin execution - - We sometimes refer to the parent task as the "parent" and the subtask as the "child". Subtask and - child task are used interchangeably. We apologize for the confusion. - - ### Waiting for a subtask on a different queue When a parent task triggers and waits for a subtask on a different queue, the parent task will checkpoint and release its concurrency slot once it reaches the wait point. This prevents environment deadlocks where all concurrency slots would be occupied by waiting tasks. @@ -231,80 +226,3 @@ export const subtask = task({ ``` When the parent task reaches the `triggerAndWait` call, it checkpoints and transitions to the `WAITING` state, releasing its concurrency slot back to both its queue and the environment. Once the subtask completes, the parent task will resume and re-acquire a concurrency slot. - -### Waiting for a subtask on the same queue - -When a parent task and subtask share the same queue, the checkpointing behavior ensures that recursive task execution can proceed without deadlocks, up to the queue's concurrency limit. - -```ts /trigger/waiting-same-queue.ts -export const myQueue = queue({ - name: "my-queue", - concurrencyLimit: 1, -}); - -export const parentTask = task({ - id: "parent-task", - queue: myQueue, - run: async (payload) => { - //trigger a subtask and wait for it to complete - await subtask.triggerAndWait(payload); - }, -}); - -export const subtask = task({ - id: "subtask", - queue: myQueue, - run: async (payload) => { - //... - }, -}); -``` - -When the parent task checkpoints at the `triggerAndWait` call, it releases its concurrency slot back to the queue, allowing the subtask to execute. Once the subtask completes, the parent task will resume. - -However, you can only have recursive waits up to your queue's concurrency limit. If you exceed this limit, you will receive a `RECURSIVE_WAIT_DEADLOCK` error: - -```ts /trigger/deadlock.ts -export const myQueue = queue({ - name: "my-queue", - concurrencyLimit: 1, -}); - -export const parentTask = task({ - id: "parent-task", - queue: myQueue, - run: async (payload) => { - await subtask.triggerAndWait(payload); - }, -}); - -export const subtask = task({ - id: "subtask", - queue: myQueue, - run: async (payload) => { - await subsubtask.triggerAndWait(payload); // This will cause a deadlock - }, -}); - -export const subsubtask = task({ - id: "subsubtask", - queue: myQueue, - run: async (payload) => { - //... - }, -}); -``` - -This results in a `RECURSIVE_WAIT_DEADLOCK` error because the queue can only support one level of recursive waiting with a concurrency limit of 1: - -![Recursive task deadlock](/images/recursive-task-deadlock-min.png) - -### Mitigating recursive wait deadlocks - -To avoid recursive wait deadlocks when using shared queues: - -1. **Increase the queue's concurrency limit** to allow more levels of recursive waiting -2. **Use different queues** for parent and child tasks to eliminate the possibility of deadlock -3. **Design task hierarchies** to minimize deep recursive waiting patterns - -Remember that the number of recursive waits you can have on a shared queue is limited by that queue's concurrency limit. diff --git a/hosting/docker/.env.example b/hosting/docker/.env.example index 1f50e5e703..1fe7f6323f 100644 --- a/hosting/docker/.env.example +++ b/hosting/docker/.env.example @@ -72,6 +72,9 @@ RUN_REPLICATION_CLICKHOUSE_URL=http://default:password@clickhouse:8123 DOCKER_REGISTRY_URL=localhost:5000 DOCKER_REGISTRY_USERNAME=registry-user DOCKER_REGISTRY_PASSWORD=very-secure-indeed +# When using an external registry you will have to change this +# On Docker Hub it should generally be the same as your username +DOCKER_REGISTRY_NAMESPACE=trigger # Object store # - You need to log into the Minio dashboard and create a bucket called "packets" diff --git a/hosting/docker/webapp/docker-compose.yml b/hosting/docker/webapp/docker-compose.yml index 1935ad5edc..eb636c78b9 100644 --- a/hosting/docker/webapp/docker-compose.yml +++ b/hosting/docker/webapp/docker-compose.yml @@ -77,7 +77,6 @@ services: # DEFAULT_ORG_EXECUTION_CONCURRENCY_LIMIT: 100 # Internal OTEL configuration INTERNAL_OTEL_TRACE_LOGGING_ENABLED: ${INTERNAL_OTEL_TRACE_LOGGING_ENABLED:-0} - TRIGGER_CLI_TAG: ${TRIGGER_CLI_TAG:-v4-beta} postgres: image: postgres:${POSTGRES_IMAGE_TAG:-14} diff --git a/hosting/k8s/helm/Chart.yaml b/hosting/k8s/helm/Chart.yaml index fd9b857883..afbe254b8c 100644 --- a/hosting/k8s/helm/Chart.yaml +++ b/hosting/k8s/helm/Chart.yaml @@ -2,8 +2,8 @@ apiVersion: v2 name: trigger description: The official Trigger.dev Helm chart type: application -version: 4.0.0-beta.19 -appVersion: v4.0.0-v4-beta.26.1 +version: 4.0.0 +appVersion: v4.0.0 home: https://trigger.dev sources: - https://github.com/triggerdotdev/trigger.dev diff --git a/hosting/k8s/helm/README.md b/hosting/k8s/helm/README.md index 17b8bfde70..8997004a0f 100644 --- a/hosting/k8s/helm/README.md +++ b/hosting/k8s/helm/README.md @@ -52,7 +52,7 @@ Dashboard: http://localhost:3040/ ```bash # The --push arg is required when testing locally -npx trigger.dev@v4-beta deploy --push +npx trigger.dev@latest deploy --push ``` ## ⚠️ Security Requirements @@ -537,7 +537,7 @@ kubectl port-forward svc/trigger.dev-webapp 3040:3030 --address 0.0.0.0 2. **Database connection**: Check PostgreSQL is ready before webapp starts 3. **Resource limits**: Increase limits for ClickHouse in constrained environments 4. **Config not applying**: Use the pod restart technique above to force config reload -5. **Image pull errors**: When testing locally, deploy with `npx trigger.dev@v4-beta deploy --push` +5. **Image pull errors**: When testing locally, deploy with `npx trigger.dev@latest deploy --push` ## Examples diff --git a/hosting/k8s/helm/templates/webapp.yaml b/hosting/k8s/helm/templates/webapp.yaml index 9a0794ce69..f9f59c363f 100644 --- a/hosting/k8s/helm/templates/webapp.yaml +++ b/hosting/k8s/helm/templates/webapp.yaml @@ -229,8 +229,6 @@ spec: value: {{ include "trigger-v4.s3.url" . | quote }} - name: GRACEFUL_SHUTDOWN_TIMEOUT value: {{ .Values.webapp.gracefulShutdownTimeout | quote }} - - name: TRIGGER_CLI_TAG - value: "v4-beta" {{- if .Values.webapp.bootstrap.enabled }} - name: TRIGGER_BOOTSTRAP_ENABLED value: "1" diff --git a/internal-packages/run-engine/src/engine/index.ts b/internal-packages/run-engine/src/engine/index.ts index f71c62dcd3..6ee122c418 100644 --- a/internal-packages/run-engine/src/engine/index.ts +++ b/internal-packages/run-engine/src/engine/index.ts @@ -565,6 +565,7 @@ export class RunEngine { workerId, runnerId, tx, + skipObserving, }: { consumerId: string; workerQueue: string; @@ -572,9 +573,12 @@ export class RunEngine { workerId?: string; runnerId?: string; tx?: PrismaClientOrTransaction; + skipObserving?: boolean; }): Promise { - // We only do this with "prod" worker queues because we don't want to observe dev (e.g. environment) worker queues - this.runQueue.registerObservableWorkerQueue(workerQueue); + if (!skipObserving) { + // We only do this with "prod" worker queues because we don't want to observe dev (e.g. environment) worker queues + this.runQueue.registerObservableWorkerQueue(workerQueue); + } const dequeuedMessage = await this.dequeueSystem.dequeueFromWorkerQueue({ consumerId, @@ -614,6 +618,7 @@ export class RunEngine { workerId, runnerId, tx, + skipObserving: true, }); } diff --git a/internal-packages/run-engine/src/engine/machinePresets.ts b/internal-packages/run-engine/src/engine/machinePresets.ts index 4c526942a7..a2edf5b1b1 100644 --- a/internal-packages/run-engine/src/engine/machinePresets.ts +++ b/internal-packages/run-engine/src/engine/machinePresets.ts @@ -26,7 +26,7 @@ export function getMachinePreset({ const parsedConfig = MachineConfig.safeParse(config); if (!parsedConfig.success) { - logger.error("Failed to parse machine config", { config }); + logger.info("Failed to parse machine config", { config }); return machinePresetFromName(machines, "small-1x"); } diff --git a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts index bec173d960..384384fd8c 100644 --- a/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/checkpointSystem.ts @@ -62,7 +62,7 @@ export class CheckpointSystem { snapshot.executionStatus === "QUEUED_EXECUTING"); if (!isValidSnapshot) { - this.$.logger.error("Tried to createCheckpoint on an invalid snapshot", { + this.$.logger.info("Tried to createCheckpoint on an invalid snapshot", { snapshot, snapshotId, }); diff --git a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts index 85683c5a17..3935802a5c 100644 --- a/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts +++ b/internal-packages/run-engine/src/engine/systems/dequeueSystem.ts @@ -8,7 +8,7 @@ import { PrismaClientOrTransaction } from "@trigger.dev/database"; import { getRunWithBackgroundWorkerTasks } from "../db/worker.js"; import { sendNotificationToWorker } from "../eventBus.js"; import { getMachinePreset } from "../machinePresets.js"; -import { isDequeueableExecutionStatus } from "../statuses.js"; +import { isDequeueableExecutionStatus, isExecuting } from "../statuses.js"; import { RunEngineOptions } from "../types.js"; import { ExecutionSnapshotSystem, getLatestExecutionSnapshot } from "./executionSnapshotSystem.js"; import { RunAttemptSystem } from "./runAttemptSystem.js"; @@ -132,9 +132,26 @@ export class DequeueSystem { }, tx: prisma, }); - this.$.logger.error( - `RunEngine.dequeueFromWorkerQueue(): Run is not in a valid state to be dequeued: ${runId}\n ${snapshot.id}:${snapshot.executionStatus}` - ); + + if (isExecuting(snapshot.executionStatus)) { + this.$.logger.error( + `RunEngine.dequeueFromWorkerQueue(): Run is not in a valid state to be dequeued`, + { + runId, + snapshotId: snapshot.id, + executionStatus: snapshot.executionStatus, + } + ); + } else { + this.$.logger.warn( + `RunEngine.dequeueFromWorkerQueue(): Run is in an expected not valid state to be dequeued`, + { + runId, + snapshotId: snapshot.id, + executionStatus: snapshot.executionStatus, + } + ); + } return; } diff --git a/packages/build/CHANGELOG.md b/packages/build/CHANGELOG.md index 11f59f7ccd..b11ee64f55 100644 --- a/packages/build/CHANGELOG.md +++ b/packages/build/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/build +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/build/package.json b/packages/build/package.json index 3a0c1e3a69..5f321bf630 100644 --- a/packages/build/package.json +++ b/packages/build/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/build", - "version": "4.0.0", + "version": "4.0.1", "description": "trigger.dev build extensions", "license": "MIT", "publishConfig": { @@ -77,7 +77,7 @@ "check-exports": "attw --pack ." }, "dependencies": { - "@trigger.dev/core": "workspace:4.0.0", + "@trigger.dev/core": "workspace:4.0.1", "pkg-types": "^1.1.3", "tinyglobby": "^0.2.2", "tsconfck": "3.1.3" diff --git a/packages/cli-v3/CHANGELOG.md b/packages/cli-v3/CHANGELOG.md index c198865faf..9d616c388a 100644 --- a/packages/cli-v3/CHANGELOG.md +++ b/packages/cli-v3/CHANGELOG.md @@ -1,5 +1,15 @@ # trigger.dev +## 4.0.1 + +### Patch Changes + +- feat: Add official MCP server, install MCP and rules CLI commands and wizards ([#2384](https://github.com/triggerdotdev/trigger.dev/pull/2384)) +- Updated dependencies: + - `@trigger.dev/build@4.0.1` + - `@trigger.dev/core@4.0.1` + - `@trigger.dev/schema-to-json@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/cli-v3/install-mcp.sh b/packages/cli-v3/install-mcp.sh new file mode 100755 index 0000000000..e2612a34e5 --- /dev/null +++ b/packages/cli-v3/install-mcp.sh @@ -0,0 +1,582 @@ +#!/bin/bash + +set -e # Exit on error + +# Default target +TARGET="all" + +# Parse command line arguments +show_help() { + echo "πŸš€ Trigger.dev MCP Server Installer" + echo "" + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Options:" + echo " -t, --target TARGET Install target: claude, claude-desktop, cursor, vscode, crush, windsurf, or all (default: all)" + echo " -h, --help Show this help message" + echo "" + echo "Targets:" + echo " claude Install for Claude Code (~/.claude.json)" + echo " claude-desktop Install for Claude Desktop (~/Library/Application Support/Claude/claude_desktop_config.json)" + echo " cursor Install for Cursor (~/.cursor/mcp.json)" + echo " vscode Install for VS Code (~/Library/Application Support/Code/User/mcp.json)" + echo " crush Install for Crush (~/.config/crush/crush.json)" + echo " windsurf Install for Windsurf (~/.codeium/windsurf/mcp_config.json)" + echo " all Install for all supported targets" + echo "" + echo "Examples:" + echo " $0 # Install for all targets" + echo " $0 -t claude # Install only for Claude Code" + echo " $0 -t claude-desktop # Install only for Claude Desktop" + echo " $0 -t cursor # Install only for Cursor" + echo " $0 -t vscode # Install only for VS Code" + echo " $0 -t crush # Install only for Crush" + echo " $0 -t windsurf # Install only for Windsurf" +} + +# Parse arguments +while [[ $# -gt 0 ]]; do + case $1 in + -t|--target) + TARGET="$2" + shift 2 + ;; + -h|--help) + show_help + exit 0 + ;; + *) + echo "❌ Unknown option: $1" + echo "Use -h or --help for usage information" + exit 1 + ;; + esac +done + +# Validate target +case $TARGET in + claude|claude-desktop|cursor|vscode|crush|windsurf|all) + ;; + *) + echo "❌ Invalid target: $TARGET" + echo "Valid targets are: claude, claude-desktop, cursor, vscode, crush, windsurf, all" + exit 1 + ;; +esac + +echo "πŸš€ Installing Trigger.dev MCP Server for target: $TARGET" + +# Get the absolute path to the node binary +NODE_PATH=$(which node) +if [ -z "$NODE_PATH" ]; then + echo "❌ Error: Node.js not found in PATH" + echo "Please ensure Node.js is installed and available in your PATH" + exit 1 +fi + +# Get the directory where this script is located +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Construct the path to the CLI index.js file +CLI_PATH="$SCRIPT_DIR/dist/esm/index.js" + +# Construct the path to the MCP log file +MCP_LOG_FILE="$SCRIPT_DIR/.mcp.log" + +# Make sure the MCP log file exists +touch "$MCP_LOG_FILE" + +# Check if the CLI file exists +if [ ! -f "$CLI_PATH" ]; then + echo "❌ Error: CLI file not found at $CLI_PATH" + echo "Make sure to build the CLI first with: pnpm run build" + exit 1 +fi + +# Ensure the CLI is executable +chmod +x "$CLI_PATH" + +echo "βœ… Found Node.js at: $NODE_PATH" +echo "βœ… Found CLI at: $CLI_PATH" + +# Function to install for Claude Code +install_claude() { + echo "" + echo "πŸ”§ Installing for Claude Code..." + + local CLAUDE_CONFIG="$HOME/.claude.json" + echo "πŸ“ Claude configuration file: $CLAUDE_CONFIG" + + # Check if Claude config exists, create if it doesn't + if [ ! -f "$CLAUDE_CONFIG" ]; then + echo "πŸ“ Creating new Claude configuration file..." + echo '{"mcpServers": {}}' > "$CLAUDE_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating Claude configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$CLAUDE_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure mcpServers object exists + if (!config.mcpServers) { + config.mcpServers = {}; + } + + // Add/update trigger.dev entry + config.mcpServers['trigger'] = { + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to Claude Code'); + console.log(''); + console.log('πŸ“‹ Claude Code Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ Try typing @ in Claude Code and select \"triggerdev\" to get started.'); + + } catch (error) { + console.error('❌ Error updating Claude configuration:', error.message); + process.exit(1); + } + " +} + +# Function to install for Claude Desktop +install_claude_desktop() { + echo "" + echo "πŸ”§ Installing for Claude Desktop..." + + local CLAUDE_DESKTOP_DIR="$HOME/Library/Application Support/Claude" + local CLAUDE_DESKTOP_CONFIG="$CLAUDE_DESKTOP_DIR/claude_desktop_config.json" + + echo "πŸ“ Claude Desktop configuration file: $CLAUDE_DESKTOP_CONFIG" + + # Create Claude Desktop directory if it doesn't exist + if [ ! -d "$CLAUDE_DESKTOP_DIR" ]; then + echo "πŸ“ Creating Claude Desktop configuration directory..." + mkdir -p "$CLAUDE_DESKTOP_DIR" + fi + + # Check if Claude Desktop config exists, create if it doesn't + if [ ! -f "$CLAUDE_DESKTOP_CONFIG" ]; then + echo "πŸ“ Creating new Claude Desktop configuration file..." + echo '{"mcpServers": {}}' > "$CLAUDE_DESKTOP_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating Claude Desktop configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$CLAUDE_DESKTOP_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure mcpServers object exists + if (!config.mcpServers) { + config.mcpServers = {}; + } + + // Add/update trigger.dev entry + config.mcpServers['trigger'] = { + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to Claude Desktop'); + console.log(''); + console.log('πŸ“‹ Claude Desktop Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ You can now use Trigger.dev MCP commands in Claude Desktop.'); + + } catch (error) { + console.error('❌ Error updating Claude Desktop configuration:', error.message); + process.exit(1); + } + " +} + +# Function to install for Cursor +install_cursor() { + echo "" + echo "πŸ”§ Installing for Cursor..." + + local CURSOR_DIR="$HOME/.cursor" + local CURSOR_CONFIG="$CURSOR_DIR/mcp.json" + + echo "πŸ“ Cursor configuration file: $CURSOR_CONFIG" + + # Create Cursor directory if it doesn't exist + if [ ! -d "$CURSOR_DIR" ]; then + echo "πŸ“ Creating Cursor configuration directory..." + mkdir -p "$CURSOR_DIR" + fi + + # Check if Cursor config exists, create if it doesn't + if [ ! -f "$CURSOR_CONFIG" ]; then + echo "πŸ“ Creating new Cursor configuration file..." + echo '{"mcpServers": {}}' > "$CURSOR_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating Cursor configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$CURSOR_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure mcpServers object exists + if (!config.mcpServers) { + config.mcpServers = {}; + } + + // Add/update trigger.dev entry + config.mcpServers['trigger'] = { + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to Cursor'); + console.log(''); + console.log('πŸ“‹ Cursor Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ You can now use Trigger.dev MCP commands in Cursor.'); + + } catch (error) { + console.error('❌ Error updating Cursor configuration:', error.message); + process.exit(1); + } + " +} + +# Function to install for VS Code +install_vscode() { + echo "" + echo "πŸ”§ Installing for VS Code..." + + local VSCODE_DIR="$HOME/Library/Application Support/Code/User" + local VSCODE_CONFIG="$VSCODE_DIR/mcp.json" + + echo "πŸ“ VS Code configuration file: $VSCODE_CONFIG" + + # Create VS Code User directory if it doesn't exist + if [ ! -d "$VSCODE_DIR" ]; then + echo "πŸ“ Creating VS Code User configuration directory..." + mkdir -p "$VSCODE_DIR" + fi + + # Check if VS Code config exists, create if it doesn't + if [ ! -f "$VSCODE_CONFIG" ]; then + echo "πŸ“ Creating new VS Code configuration file..." + echo '{"servers": {}}' > "$VSCODE_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating VS Code configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$VSCODE_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure servers object exists + if (!config.servers) { + config.servers = {}; + } + + // Add/update trigger.dev entry + config.servers['trigger'] = { + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to VS Code'); + console.log(''); + console.log('πŸ“‹ VS Code Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ You can now use Trigger.dev MCP commands in VS Code.'); + + } catch (error) { + console.error('❌ Error updating VS Code configuration:', error.message); + process.exit(1); + } + " +} + +# Function to install for Crush +install_crush() { + echo "" + echo "πŸ”§ Installing for Crush..." + + local CRUSH_DIR="$HOME/.config/crush" + local CRUSH_CONFIG="$CRUSH_DIR/crush.json" + + echo "πŸ“ Crush configuration file: $CRUSH_CONFIG" + + # Create Crush config directory if it doesn't exist + if [ ! -d "$CRUSH_DIR" ]; then + echo "πŸ“ Creating Crush configuration directory..." + mkdir -p "$CRUSH_DIR" + fi + + # Check if Crush config exists, create if it doesn't + if [ ! -f "$CRUSH_CONFIG" ]; then + echo "πŸ“ Creating new Crush configuration file..." + echo '{"$schema": "https://charm.land/crush.json", "mcp": {}}' > "$CRUSH_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating Crush configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$CRUSH_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure schema and mcp object exists + if (!config['\$schema']) { + config['\$schema'] = 'https://charm.land/crush.json'; + } + if (!config.mcp) { + config.mcp = {}; + } + + // Add/update trigger.dev entry + config.mcp['trigger'] = { + type: 'stdio', + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to Crush'); + console.log(''); + console.log('πŸ“‹ Crush Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ You can now use Trigger.dev MCP commands in Crush.'); + + } catch (error) { + console.error('❌ Error updating Crush configuration:', error.message); + process.exit(1); + } + " +} + +# Function to install for Windsurf +install_windsurf() { + echo "" + echo "πŸ”§ Installing for Windsurf..." + + local WINDSURF_DIR="$HOME/.codeium/windsurf" + local WINDSURF_CONFIG="$WINDSURF_DIR/mcp_config.json" + + echo "πŸ“ Windsurf configuration file: $WINDSURF_CONFIG" + + # Create Windsurf config directory if it doesn't exist + if [ ! -d "$WINDSURF_DIR" ]; then + echo "πŸ“ Creating Windsurf configuration directory..." + mkdir -p "$WINDSURF_DIR" + fi + + # Check if Windsurf config exists, create if it doesn't + if [ ! -f "$WINDSURF_CONFIG" ]; then + echo "πŸ“ Creating new Windsurf configuration file..." + echo '{"mcpServers": {}}' > "$WINDSURF_CONFIG" + fi + + # Use Node.js to manipulate the JSON + echo "πŸ”§ Updating Windsurf configuration..." + + node -e " + const fs = require('fs'); + const path = require('path'); + + const configPath = '$WINDSURF_CONFIG'; + const nodePath = '$NODE_PATH'; + const cliPath = '$CLI_PATH'; + const logFile = '$MCP_LOG_FILE'; + + try { + // Read existing config + let config; + try { + const configContent = fs.readFileSync(configPath, 'utf8'); + config = JSON.parse(configContent); + } catch (error) { + console.log('πŸ“ Creating new configuration structure...'); + config = {}; + } + + // Ensure mcpServers object exists + if (!config.mcpServers) { + config.mcpServers = {}; + } + + // Add/update trigger.dev entry + config.mcpServers['trigger'] = { + command: nodePath, + args: [cliPath, 'mcp', '--log-file', logFile, '--api-url', 'http://localhost:3030'] + }; + + // Write back to file with proper formatting + fs.writeFileSync(configPath, JSON.stringify(config, null, 2)); + + console.log('βœ… Successfully installed Trigger.dev MCP server to Windsurf'); + console.log(''); + console.log('πŸ“‹ Windsurf Configuration:'); + console.log(' β€’ Config file:', configPath); + console.log(' β€’ Node.js path:', nodePath); + console.log(' β€’ CLI path:', cliPath); + console.log(''); + console.log('πŸ’‘ You can now use Trigger.dev MCP commands in Windsurf.'); + + } catch (error) { + console.error('❌ Error updating Windsurf configuration:', error.message); + process.exit(1); + } + " +} + +# Install based on target +case $TARGET in + claude) + install_claude + ;; + claude-desktop) + install_claude_desktop + ;; + cursor) + install_cursor + ;; + vscode) + install_vscode + ;; + crush) + install_crush + ;; + windsurf) + install_windsurf + ;; + all) + install_claude + install_claude_desktop + install_cursor + install_vscode + install_crush + install_windsurf + ;; +esac + +echo "" +echo "πŸŽ‰ Installation complete!" +echo "" +echo "πŸ” You can test the MCP server with:" +echo " pnpm run inspector" diff --git a/packages/cli-v3/package.json b/packages/cli-v3/package.json index 184e36ef07..77f2cbdce1 100644 --- a/packages/cli-v3/package.json +++ b/packages/cli-v3/package.json @@ -1,6 +1,6 @@ { "name": "trigger.dev", - "version": "4.0.0", + "version": "4.0.1", "description": "A Command-Line Interface for Trigger.dev (v3) projects", "type": "module", "license": "MIT", @@ -75,12 +75,14 @@ "dev": "tshy --watch", "test": "vitest", "test:e2e": "vitest --run -c ./e2e/vitest.config.ts", - "update-version": "tsx ../../scripts/updateVersion.ts" + "update-version": "tsx ../../scripts/updateVersion.ts", + "install-mcp": "./install-mcp.sh", + "inspector": "npx @modelcontextprotocol/inspector dist/esm/index.js mcp --log-file .mcp.log --api-url http://localhost:3030" }, "dependencies": { - "@clack/prompts": "^0.10.0", + "@clack/prompts": "0.11.0", "@depot/cli": "0.0.1-cli.2.80.0", - "@modelcontextprotocol/sdk": "^1.6.1", + "@modelcontextprotocol/sdk": "^1.17.0", "@opentelemetry/api": "1.9.0", "@opentelemetry/api-logs": "0.203.0", "@opentelemetry/exporter-trace-otlp-http": "0.203.0", @@ -89,9 +91,9 @@ "@opentelemetry/resources": "2.0.1", "@opentelemetry/sdk-trace-node": "2.0.1", "@opentelemetry/semantic-conventions": "1.36.0", - "@trigger.dev/build": "workspace:4.0.0", - "@trigger.dev/core": "workspace:4.0.0", - "@trigger.dev/schema-to-json": "workspace:4.0.0", + "@trigger.dev/build": "workspace:4.0.1", + "@trigger.dev/core": "workspace:4.0.1", + "@trigger.dev/schema-to-json": "workspace:4.0.1", "ansi-escapes": "^7.0.0", "braces": "^3.0.3", "c12": "^1.11.1", @@ -99,6 +101,7 @@ "chokidar": "^3.6.0", "cli-table3": "^0.6.3", "commander": "^9.4.1", + "confbox": "^0.2.2", "defu": "^6.1.4", "dotenv": "^16.4.5", "esbuild": "^0.23.0", @@ -129,6 +132,7 @@ "socket.io-client": "4.7.5", "source-map-support": "0.5.21", "std-env": "^3.7.0", + "strip-ansi": "^7.1.0", "supports-color": "^10.0.0", "tiny-invariant": "^1.2.0", "tinyexec": "^0.3.1", diff --git a/packages/cli-v3/src/apiClient.ts b/packages/cli-v3/src/apiClient.ts index 5056c1c47a..b5a9ed6a43 100644 --- a/packages/cli-v3/src/apiClient.ts +++ b/packages/cli-v3/src/apiClient.ts @@ -31,6 +31,12 @@ import { WorkersCreateRequestBody, WorkersCreateResponseBody, WorkersListResponseBody, + CreateProjectRequestBody, + GetOrgsResponseBody, + GetWorkerByTagResponse, + GetJWTRequestBody, + GetJWTResponse, + ApiBranchListResponseBody, } from "@trigger.dev/core/v3"; import { WorkloadDebugLogRequestBody, @@ -136,6 +142,75 @@ export class CliApiClient { }); } + async getOrgs() { + if (!this.accessToken) { + throw new Error("getOrgs: No access token"); + } + + return wrapZodFetch(GetOrgsResponseBody, `${this.apiURL}/api/v1/orgs`, { + headers: { + Authorization: `Bearer ${this.accessToken}`, + "Content-Type": "application/json", + }, + }); + } + + async createProject(orgParam: string, body: CreateProjectRequestBody) { + if (!this.accessToken) { + throw new Error("createProject: No access token"); + } + + return wrapZodFetch(GetProjectResponseBody, `${this.apiURL}/api/v1/orgs/${orgParam}/projects`, { + method: "POST", + headers: this.getHeaders(), + body: JSON.stringify(body), + }); + } + + async getWorkerByTag(projectRef: string, envName: string, tagName: string = "current") { + if (!this.accessToken) { + throw new Error("getWorkerByTag: No access token"); + } + + return wrapZodFetch( + GetWorkerByTagResponse, + `${this.apiURL}/api/v1/projects/${projectRef}/${envName}/workers/${tagName}`, + { + headers: this.getHeaders(), + } + ); + } + + async getJWT(projectRef: string, envName: string, body: GetJWTRequestBody) { + if (!this.accessToken) { + throw new Error("getJWT: No access token"); + } + + return wrapZodFetch( + GetJWTResponse, + `${this.apiURL}/api/v1/projects/${projectRef}/${envName}/jwt`, + { + method: "POST", + headers: this.getHeaders(), + body: JSON.stringify(body), + } + ); + } + + async getDevStatus(projectRef: string) { + if (!this.accessToken) { + throw new Error("getDevStatus: No access token"); + } + + return wrapZodFetch( + z.object({ isConnected: z.boolean() }), + `${this.apiURL}/api/v1/projects/${projectRef}/dev-status`, + { + headers: this.getHeaders(), + } + ); + } + async createBackgroundWorker(projectRef: string, body: CreateBackgroundWorkerRequestBody) { if (!this.accessToken) { throw new Error("createBackgroundWorker: No access token"); @@ -204,6 +279,20 @@ export class CliApiClient { ); } + async listBranches(projectRef: string) { + if (!this.accessToken) { + throw new Error("listBranches: No access token"); + } + + return wrapZodFetch( + ApiBranchListResponseBody, + `${this.apiURL}/api/v1/projects/${projectRef}/branches`, + { + headers: this.getHeaders(), + } + ); + } + async getEnvironmentVariables(projectRef: string) { if (!this.accessToken) { throw new Error("getEnvironmentVariables: No access token"); diff --git a/packages/cli-v3/src/cli/common.ts b/packages/cli-v3/src/cli/common.ts index 3cf9f2aba1..f1508c47b9 100644 --- a/packages/cli-v3/src/cli/common.ts +++ b/packages/cli-v3/src/cli/common.ts @@ -68,7 +68,7 @@ export async function wrapCommandAction( if (e instanceof SkipLoggingError) { // do nothing } else if (e instanceof OutroCommandError) { - outro("Operation cancelled"); + outro(e.message ?? "Operation cancelled"); } else if (e instanceof SkipCommandError) { // do nothing } else if (e instanceof BundleError) { diff --git a/packages/cli-v3/src/cli/index.ts b/packages/cli-v3/src/cli/index.ts index 4a575831a5..bea6eacd04 100644 --- a/packages/cli-v3/src/cli/index.ts +++ b/packages/cli-v3/src/cli/index.ts @@ -1,21 +1,22 @@ import { Command } from "commander"; +import { configureAnalyzeCommand } from "../commands/analyze.js"; +import { configureDeployCommand } from "../commands/deploy.js"; import { configureDevCommand } from "../commands/dev.js"; import { configureInitCommand } from "../commands/init.js"; +import { configureListProfilesCommand } from "../commands/list-profiles.js"; import { configureLoginCommand } from "../commands/login.js"; import { configureLogoutCommand } from "../commands/logout.js"; +import { configurePreviewCommand } from "../commands/preview.js"; +import { configurePromoteCommand } from "../commands/promote.js"; +import { configureSwitchProfilesCommand } from "../commands/switch.js"; +import { configureUpdateCommand } from "../commands/update.js"; import { configureWhoamiCommand } from "../commands/whoami.js"; +import { configureMcpCommand } from "../commands/mcp.js"; import { COMMAND_NAME } from "../consts.js"; -import { configureListProfilesCommand } from "../commands/list-profiles.js"; -import { configureAnalyzeCommand } from "../commands/analyze.js"; -import { configureUpdateCommand } from "../commands/update.js"; import { VERSION } from "../version.js"; -import { configureDeployCommand } from "../commands/deploy.js"; import { installExitHandler } from "./common.js"; -import { configureWorkersCommand } from "../commands/workers/index.js"; -import { configureSwitchProfilesCommand } from "../commands/switch.js"; -import { configureTriggerTaskCommand } from "../commands/trigger.js"; -import { configurePromoteCommand } from "../commands/promote.js"; -import { configurePreviewCommand } from "../commands/preview.js"; +import { configureInstallMcpCommand } from "../commands/install-mcp.js"; +import { configureInstallRulesCommand } from "../commands/install-rules.js"; export const program = new Command(); @@ -36,7 +37,8 @@ configureSwitchProfilesCommand(program); configureUpdateCommand(program); configurePreviewCommand(program); configureAnalyzeCommand(program); -// configureWorkersCommand(program); -// configureTriggerTaskCommand(program); +configureMcpCommand(program); +configureInstallMcpCommand(program); +configureInstallRulesCommand(program); installExitHandler(); diff --git a/packages/cli-v3/src/commands/deploy.ts b/packages/cli-v3/src/commands/deploy.ts index b64e600014..87dbbc9787 100644 --- a/packages/cli-v3/src/commands/deploy.ts +++ b/packages/cli-v3/src/commands/deploy.ts @@ -68,7 +68,7 @@ export function configureDeployCommand(program: Command) { commonOptions( program .command("deploy") - .description("Deploy your Trigger.dev v3 project to the cloud.") + .description("Deploy your Trigger.dev project to the cloud.") .argument("[path]", "The path to the project", ".") .option( "-e, --env ", diff --git a/packages/cli-v3/src/commands/dev.ts b/packages/cli-v3/src/commands/dev.ts index d3041ba0f8..3253fdc573 100644 --- a/packages/cli-v3/src/commands/dev.ts +++ b/packages/cli-v3/src/commands/dev.ts @@ -13,6 +13,15 @@ import { runtimeChecks } from "../utilities/runtimeCheck.js"; import { getProjectClient, LoginResultOk } from "../utilities/session.js"; import { login } from "./login.js"; import { updateTriggerPackages } from "./update.js"; +import { + readConfigHasSeenMCPInstallPrompt, + writeConfigHasSeenMCPInstallPrompt, +} from "../utilities/configFiles.js"; +import { confirm, isCancel, log } from "@clack/prompts"; +import { installMcpServer } from "./install-mcp.js"; +import { tryCatch } from "@trigger.dev/core/utils"; +import { VERSION } from "@trigger.dev/core"; +import { initiateRulesInstallWizard } from "./install-rules.js"; const DevCommandOptions = CommonCommandOptions.extend({ debugOtel: z.boolean().default(false), @@ -26,6 +35,10 @@ const DevCommandOptions = CommonCommandOptions.extend({ mcpPort: z.coerce.number().optional().default(3333), analyze: z.boolean().default(false), disableWarnings: z.boolean().default(false), + skipMCPInstall: z.boolean().default(false), + skipRulesInstall: z.boolean().default(false), + rulesInstallManifestPath: z.string().optional(), + rulesInstallBranch: z.string().optional(), }); export type DevCommandOptions = z.infer; @@ -59,6 +72,30 @@ export function configureDevCommand(program: Command) { .addOption( new CommandOption("--analyze", "Analyze the build output and import timings").hideHelp() ) + .addOption( + new CommandOption( + "--skip-mcp-install", + "Skip the Trigger.dev MCP server install wizard" + ).hideHelp() + ) + .addOption( + new CommandOption( + "--skip-rules-install", + "Skip the Trigger.dev Agent rules install wizard" + ).hideHelp() + ) + .addOption( + new CommandOption( + "--rules-install-manifest-path ", + "The path to the rules install manifest" + ).hideHelp() + ) + .addOption( + new CommandOption( + "--rules-install-branch ", + "The branch to install the rules from" + ).hideHelp() + ) .addOption(new CommandOption("--disable-warnings", "Suppress warnings output").hideHelp()) ).action(async (options) => { wrapCommandAction("dev", DevCommandOptions, options, async (opts) => { @@ -70,6 +107,52 @@ export function configureDevCommand(program: Command) { export async function devCommand(options: DevCommandOptions) { runtimeChecks(); + // Only show these install prompts if the user is in a terminal (not in a Coding Agent) + if (process.stdout.isTTY) { + const skipMCPInstall = typeof options.skipMCPInstall === "boolean" && options.skipMCPInstall; + + if (!skipMCPInstall) { + const hasSeenMCPInstallPrompt = readConfigHasSeenMCPInstallPrompt(); + + if (!hasSeenMCPInstallPrompt) { + const installChoice = await confirm({ + message: "Would you like to install the Trigger.dev MCP server?", + initialValue: true, + }); + + writeConfigHasSeenMCPInstallPrompt(true); + + const skipInstall = isCancel(installChoice) || !installChoice; + + if (!skipInstall) { + log.step("Welcome to the Trigger.dev MCP server install wizard πŸ§™"); + + const [installError] = await tryCatch( + installMcpServer({ + yolo: false, + tag: VERSION as string, + logLevel: options.logLevel, + }) + ); + + if (installError) { + log.error(`Failed to install MCP server: ${installError.message}`); + } + } + } + } + + const skipRulesInstall = + typeof options.skipRulesInstall === "boolean" && options.skipRulesInstall; + + if (!skipRulesInstall) { + await initiateRulesInstallWizard({ + manifestPath: options.rulesInstallManifestPath, + branch: options.rulesInstallBranch, + }); + } + } + const authorization = await login({ embedded: true, silent: true, diff --git a/packages/cli-v3/src/commands/init.ts b/packages/cli-v3/src/commands/init.ts index fcae774961..9e7ca46697 100644 --- a/packages/cli-v3/src/commands/init.ts +++ b/packages/cli-v3/src/commands/init.ts @@ -1,13 +1,18 @@ import { intro, isCancel, log, outro, select, text } from "@clack/prompts"; import { context, trace } from "@opentelemetry/api"; -import { GetProjectResponseBody, flattenAttributes } from "@trigger.dev/core/v3"; +import { + GetProjectResponseBody, + LogLevel, + flattenAttributes, + tryCatch, +} from "@trigger.dev/core/v3"; import { recordSpanException } from "@trigger.dev/core/v3/workers"; import chalk from "chalk"; import { Command, Option as CommandOption } from "commander"; import { applyEdits, findNodeAtLocation, getNodeValue, modify, parseTree } from "jsonc-parser"; import { writeFile } from "node:fs/promises"; import { join, relative, resolve } from "node:path"; -import { addDependency, addDevDependency, detectPackageManager } from "nypm"; +import { addDependency, addDevDependency } from "nypm"; import { resolveTSConfig } from "pkg-types"; import { z } from "zod"; import { CliApiClient } from "../apiClient.js"; @@ -31,8 +36,13 @@ import { createFile, pathExists, readFile } from "../utilities/fileSystem.js"; import { printStandloneInitialBanner } from "../utilities/initialBanner.js"; import { logger } from "../utilities/logger.js"; import { spinner } from "../utilities/windows.js"; -import { login } from "./login.js"; import { VERSION } from "../version.js"; +import { login } from "./login.js"; +import { + readConfigHasSeenMCPInstallPrompt, + writeConfigHasSeenMCPInstallPrompt, +} from "../utilities/configFiles.js"; +import { installMcpServer } from "./install-mcp.js"; const cliVersion = VERSION as string; const cliTag = cliVersion.includes("v4-beta") ? "v4-beta" : "latest"; @@ -46,6 +56,7 @@ const InitCommandOptions = CommonCommandOptions.extend({ pkgArgs: z.string().optional(), gitRef: z.string().default("main"), javascript: z.boolean().default(false), + yes: z.boolean().default(false), }); type InitCommandOptions = z.infer; @@ -77,6 +88,7 @@ export function configureInitCommand(program: Command) { "--pkg-args ", "Additional arguments to pass to the package manager, accepts CSV for multiple args" ) + .option("-y, --yes", "Skip all prompts and use defaults (requires --project-ref)") ) .addOption( new CommandOption( @@ -101,6 +113,50 @@ export async function initCommand(dir: string, options: unknown) { async function _initCommand(dir: string, options: InitCommandOptions) { const span = trace.getSpan(context.active()); + // Validate --yes flag requirements + if (options.yes && !options.projectRef) { + throw new Error("--project-ref is required when using --yes flag"); + } + + const hasSeenMCPInstallPrompt = readConfigHasSeenMCPInstallPrompt(); + + if (!hasSeenMCPInstallPrompt) { + const installChoice = await select({ + message: "Choose how you want to initialize your project:", + options: [ + { + value: "mcp", + label: "Trigger.dev MCP", + hint: "Automatically install the Trigger.dev MCP server and then vibe your way to a new project.", + }, + { value: "cli", label: "CLI", hint: "Continue with the CLI" }, + ], + }); + + writeConfigHasSeenMCPInstallPrompt(true); + + const continueWithCLI = isCancel(installChoice) || installChoice === "cli"; + + if (!continueWithCLI) { + log.step("Welcome to the Trigger.dev MCP server install wizard πŸ§™"); + + const [installError] = await tryCatch( + installMcpServer({ + yolo: false, + tag: options.tag, + logLevel: options.logLevel, + }) + ); + + if (installError) { + outro(`Failed to install MCP server: ${installError.message}`); + return; + } + + return; + } + } + intro("Initializing project"); const cwd = resolve(process.cwd(), dir); @@ -167,7 +223,11 @@ async function _initCommand(dir: string, options: InitCommandOptions) { // Install @trigger.dev/sdk package if (!options.skipPackageInstall) { - await installPackages(dir, options); + await installPackages( + cwd, + options.tag, + new CLIInstallPackagesOutputter(options.logLevel, options.tag) + ); } else { log.info("Skipping package installation"); } @@ -193,7 +253,7 @@ async function _initCommand(dir: string, options: InitCommandOptions) { `${authorization.dashboardUrl}/projects/v3/${selectedProject.externalRef}` ); - log.success("Successfully initialized project for Trigger.dev v3 🫑"); + log.success("Successfully initialized your Trigger.dev project 🫑"); log.info("Next steps:"); log.info( ` 1. To start developing, run ${chalk.green( @@ -223,14 +283,44 @@ async function createTriggerDir( try { const defaultValue = join(dir, "src", "trigger"); - const location = await text({ - message: "Where would you like to create the Trigger.dev directory?", - defaultValue: defaultValue, - placeholder: defaultValue, - }); + let location: string; + let example: string; - if (isCancel(location)) { - throw new OutroCommandError(); + if (options.yes) { + // Use defaults when --yes flag is set + location = defaultValue; + example = "simple"; + } else { + const locationPrompt = await text({ + message: "Where would you like to create the Trigger.dev directory?", + defaultValue: defaultValue, + placeholder: defaultValue, + }); + + if (isCancel(locationPrompt)) { + throw new OutroCommandError(); + } + + location = locationPrompt; + + const exampleSelection = await select({ + message: `Choose an example to create in the ${location} directory`, + options: [ + { value: "simple", label: "Simple (Hello World)" }, + { value: "schedule", label: "Scheduled Task" }, + { + value: "none", + label: "None", + hint: "skip creating an example", + }, + ], + }); + + if (isCancel(exampleSelection)) { + throw new OutroCommandError(); + } + + example = exampleSelection as string; } // Ensure that the path is always relative by stripping leading '/' if present @@ -248,25 +338,6 @@ async function createTriggerDir( throw new Error(`Directory already exists at ${triggerDir}`); } - const exampleSelection = await select({ - message: `Choose an example to create in the ${location} directory`, - options: [ - { value: "simple", label: "Simple (Hello World)" }, - { value: "schedule", label: "Scheduled Task" }, - { - value: "none", - label: "None", - hint: "skip creating an example", - }, - ], - }); - - if (isCancel(exampleSelection)) { - throw new OutroCommandError(); - } - - const example = exampleSelection as string; - span.setAttributes({ "cli.example": example, }); @@ -424,54 +495,84 @@ async function addConfigFileToTsConfig(tsconfigPath: string, options: InitComman }); } -async function installPackages(dir: string, options: InitCommandOptions) { - return await tracer.startActiveSpan("installPackages", async (span) => { - const projectDir = resolve(process.cwd(), dir); +export interface InstallPackagesOutputter { + startSDK: () => void; + installedSDK: () => void; + startBuild: () => void; + installedBuild: () => void; + stoppedWithError: () => void; +} - const installSpinner = spinner(); - const packageManager = await detectPackageManager(projectDir); +class CLIInstallPackagesOutputter implements InstallPackagesOutputter { + private installSpinner: ReturnType; - try { - span.setAttributes({ - "cli.projectDir": projectDir, - "cli.packageManager": packageManager?.name, - "cli.tag": options.tag, - }); + constructor( + private readonly logLevel: LogLevel, + private readonly tag: string + ) { + this.installSpinner = spinner(); + } - installSpinner.start(`Adding @trigger.dev/sdk@${options.tag}`); + startSDK() { + this.installSpinner.start(`Adding @trigger.dev/sdk@${this.tag}`); + } - await addDependency(`@trigger.dev/sdk@${options.tag}`, { cwd: projectDir, silent: true }); + installedSDK() { + this.installSpinner.stop(`@trigger.dev/sdk@${this.tag} installed`); + } - installSpinner.stop(`@trigger.dev/sdk@${options.tag} installed`); + startBuild() { + this.installSpinner.start(`Adding @trigger.dev/build@${this.tag} to devDependencies`); + } - installSpinner.start(`Adding @trigger.dev/build@${options.tag} to devDependencies`); + installedBuild() { + this.installSpinner.stop(`@trigger.dev/build@${this.tag} installed`); + } - await addDevDependency(`@trigger.dev/build@${options.tag}`, { - cwd: projectDir, - silent: true, - }); + stoppedWithError() { + if (this.logLevel === "debug") { + this.installSpinner.stop(`Failed to install @trigger.dev/sdk@${this.tag}.`); + } else { + this.installSpinner.stop( + `Failed to install @trigger.dev/sdk@${this.tag}. Rerun command with --log-level debug for more details.` + ); + } + } +} - installSpinner.stop(`@trigger.dev/build@${options.tag} installed`); +class SilentInstallPackagesOutputter implements InstallPackagesOutputter { + startSDK() {} + installedSDK() {} + startBuild() {} + installedBuild() {} + stoppedWithError() {} +} - span.end(); - } catch (e) { - if (options.logLevel === "debug") { - installSpinner.stop(`Failed to install @trigger.dev/sdk@${options.tag}.`); - } else { - installSpinner.stop( - `Failed to install @trigger.dev/sdk@${options.tag}. Rerun command with --log-level debug for more details.` - ); - } +export async function installPackages( + projectDir: string, + tag: string, + outputter: InstallPackagesOutputter = new SilentInstallPackagesOutputter() +) { + try { + outputter.startSDK(); - if (!(e instanceof SkipCommandError)) { - recordSpanException(span, e); - } + await addDependency(`@trigger.dev/sdk@${tag}`, { cwd: projectDir, silent: true }); - span.end(); + outputter.installedSDK(); - throw e; - } - }); + outputter.startBuild(); + + await addDevDependency(`@trigger.dev/build@${tag}`, { + cwd: projectDir, + silent: true, + }); + + outputter.installedBuild(); + } catch (e) { + outputter.stoppedWithError(); + + throw e; + } } async function writeConfigFile( diff --git a/packages/cli-v3/src/commands/install-mcp.ts b/packages/cli-v3/src/commands/install-mcp.ts new file mode 100644 index 0000000000..c1d36374f7 --- /dev/null +++ b/packages/cli-v3/src/commands/install-mcp.ts @@ -0,0 +1,698 @@ +import { confirm, intro, isCancel, log, multiselect, select } from "@clack/prompts"; +import chalk from "chalk"; +import { Command } from "commander"; +import { extname } from "node:path"; +import { z } from "zod"; +import { OutroCommandError, wrapCommandAction } from "../cli/common.js"; +import { cliLink } from "../utilities/cliOutput.js"; +import { writeConfigHasSeenMCPInstallPrompt } from "../utilities/configFiles.js"; +import { + expandTilde, + safeReadJSONCFile, + safeReadTomlFile, + writeJSONFile, + writeTomlFile, +} from "../utilities/fileSystem.js"; +import { printStandloneInitialBanner } from "../utilities/initialBanner.js"; +import { VERSION } from "../version.js"; +import { spinner } from "../utilities/windows.js"; + +const cliVersion = VERSION as string; +const cliTag = cliVersion.includes("v4-beta") ? "v4-beta" : "latest"; + +const clients = [ + "claude-code", + "cursor", + "vscode", + "zed", + "windsurf", + "gemini-cli", + "crush", + "cline", + "openai-codex", + "opencode", + "amp", + "ruler", +] as const; +const scopes = ["user", "project", "local"] as const; + +type ClientScopes = { + [key in (typeof clients)[number]]: { + [key in (typeof scopes)[number]]?: string; + }; +}; + +type ClientLabels = { + [key in (typeof clients)[number]]: string; +}; + +const clientScopes: ClientScopes = { + "claude-code": { + user: "~/.claude.json", + project: "./.mcp.json", + local: "~/.claude.json", + }, + cursor: { + user: "~/.cursor/mcp.json", + project: "./.cursor/mcp.json", + }, + vscode: { + user: "~/Library/Application Support/Code/User/mcp.json", + project: "./.vscode/mcp.json", + }, + zed: { + user: "~/.config/zed/settings.json", + }, + windsurf: { + user: "~/.codeium/windsurf/mcp_config.json", + }, + "gemini-cli": { + user: "~/.gemini/settings.json", + project: "./.gemini/settings.json", + }, + crush: { + user: "~/.config/crush/crush.json", + project: "./crush.json", + local: "./.crush.json", + }, + cline: { + user: "~/Library/Application Support/Code/User/globalStorage/saoudrizwan.claude-dev/settings/cline_mcp_settings.json", + }, + amp: { + user: "~/.config/amp/settings.json", + }, + "openai-codex": { + user: "~/.codex/config.toml", + }, + opencode: { + user: "~/.config/opencode/opencode.json", + project: "./opencode.json", + }, + ruler: { + project: "./.ruler/mcp.json", + }, +}; + +const clientLabels: ClientLabels = { + "claude-code": "Claude Code", + cursor: "Cursor", + vscode: "VSCode", + zed: "Zed", + windsurf: "Windsurf", + "gemini-cli": "Gemini CLI", + crush: "Charm Crush", + cline: "Cline", + "openai-codex": "OpenAI Codex CLI", + amp: "Sourcegraph AMP", + opencode: "opencode", + ruler: "Ruler", +}; + +type SupportedClients = (typeof clients)[number]; +type ResolvedClients = SupportedClients | "unsupported"; + +const InstallMcpCommandOptions = z.object({ + projectRef: z.string().optional(), + tag: z.string().default(cliVersion), + devOnly: z.boolean().optional(), + yolo: z.boolean().default(false), + scope: z.enum(scopes).optional(), + client: z.enum(clients).array().optional(), + logFile: z.string().optional(), + apiUrl: z.string().optional(), + logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).default("log"), +}); + +type InstallMcpCommandOptions = z.infer; + +export function configureInstallMcpCommand(program: Command) { + return program + .command("install-mcp") + .description("Install the Trigger.dev MCP server") + .option( + "-p, --project-ref ", + "Scope the mcp server to a specific Trigger.dev project by providing its project ref" + ) + .option( + "-t, --tag ", + "The version of the trigger.dev CLI package to use for the MCP server", + cliTag + ) + .option("--dev-only", "Restrict the MCP server to the dev environment only") + .option("--yolo", "Install the MCP server into all supported clients") + .option("--scope ", "Choose the scope of the MCP server, either user or project") + .option( + "--client ", + "Choose the client (or clients) to install the MCP server into. We currently support: " + + clients.join(", ") + ) + .option("--log-file ", "Configure the MCP server to write logs to a file") + .option( + "-a, --api-url ", + "Configure the MCP server to specify a custom Trigger.dev API URL" + ) + .option( + "-l, --log-level ", + "The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.", + "log" + ) + .action(async (options) => { + await printStandloneInitialBanner(true); + await installMcpCommand(options); + }); +} + +export async function installMcpCommand(options: unknown) { + return await wrapCommandAction( + "installMcpCommand", + InstallMcpCommandOptions, + options, + async (opts) => { + return await _installMcpCommand(opts); + } + ); +} + +async function _installMcpCommand(options: InstallMcpCommandOptions) { + intro("Welcome to the Trigger.dev MCP server install wizard πŸ§™"); + + await installMcpServer(options); +} + +type InstallMcpServerResults = Array; + +type InstallMcpServerResult = { + configPath: string; + clientName: (typeof clients)[number]; + scope: McpServerScope; +}; + +export async function installMcpServer( + options: InstallMcpCommandOptions +): Promise { + const opts = InstallMcpCommandOptions.parse(options); + + writeConfigHasSeenMCPInstallPrompt(true); + + const devOnly = await resolveDevOnly(opts); + + opts.devOnly = devOnly; + + const clientNames = await resolveClients(opts); + + if (clientNames.length === 1 && clientNames.includes("unsupported")) { + return handleUnsupportedClientOnly(opts); + } + + const results = []; + + for (const clientName of clientNames) { + const result = await installMcpServerForClient(clientName, opts); + + if (result) { + results.push(result); + } + } + + if (results.length > 0) { + log.step("Installed to:"); + for (const r of results) { + const scopeLabel = `${r.scope.scope}`; + log.message(` β€’ ${r.clientName} (${scopeLabel}) β†’ ${chalk.gray(r.configPath)}`); + } + } + + log.info("Next steps:"); + log.message(" 1. Restart your MCP client(s) to load the new configuration."); + log.message( + ' 2. In your client, look for a server named "trigger". It should connect automatically.' + ); + log.message(" 3. Get started with Trigger.dev"); + log.message( + ` Try asking your vibe-coding friend to ${chalk.green("Add trigger.dev to my project")}` + ); + + log.info("More examples:"); + log.message(` β€’ ${chalk.green('"Trigger the hello-world task"')}`); + log.message(` β€’ ${chalk.green('"Can you help me debug the prod run run_1234"')}`); + log.message(` β€’ ${chalk.green('"Deploy my trigger project to staging"')}`); + log.message(` β€’ ${chalk.green('"What trigger task handles uploading files to S3?"')}`); + log.message(` β€’ ${chalk.green('"How do I create a scheduled task in Trigger.dev?"')}`); + log.message(` β€’ ${chalk.green('"Search Trigger.dev docs for ffmpeg examples"')}`); + + log.info("Helpful links:"); + log.message(` β€’ ${cliLink("Trigger.dev docs", "https://trigger.dev/docs")}`); + log.message(` β€’ ${cliLink("MCP docs", "https://trigger.dev/docs/mcp")}`); + log.message( + ` β€’ Need help? ${cliLink( + "Join our Discord", + "https://trigger.dev/discord" + )} or email help@trigger.dev` + ); + + return results; +} + +function handleUnsupportedClientOnly(options: InstallMcpCommandOptions): InstallMcpServerResults { + log.info("Manual MCP server configuration"); + + const args = [`trigger.dev@${options.tag}`, "mcp"]; + + if (options.logFile) { + args.push("--log-file", options.logFile); + } + + if (options.apiUrl) { + args.push("--api-url", options.apiUrl); + } + + if (options.devOnly) { + args.push("--dev-only"); + } + + if (options.projectRef) { + args.push("--project-ref", options.projectRef); + } + + if (options.logLevel && options.logLevel !== "log") { + args.push("--log-level", options.logLevel); + } + + log.message( + "Since your client isn't directly supported yet, you'll need to configure it manually:" + ); + log.message(""); + log.message(`${chalk.yellow("Command:")} ${chalk.green("npx")}`); + log.message(`${chalk.yellow("Arguments:")} ${chalk.green(args.join(" "))}`); + log.message(""); + log.message("Add this MCP server configuration to your client's settings:"); + log.message(` β€’ ${chalk.cyan("Server name:")} trigger`); + log.message(` β€’ ${chalk.cyan("Command:")} npx`); + log.message(` β€’ ${chalk.cyan("Args:")} ${args.map((arg) => `"${arg}"`).join(", ")}`); + log.message(""); + log.message("Most MCP clients use a JSON configuration format like:"); + log.message( + chalk.dim(`{ + "mcpServers": { + "trigger": { + "command": "npx", + "args": [${args.map((arg) => `"${arg}"`).join(", ")}] + } + } +}`) + ); + + return []; +} + +async function installMcpServerForClient( + clientName: ResolvedClients, + options: InstallMcpCommandOptions +) { + if (clientName === "unsupported") { + // This should not happen as unsupported clients are handled separately + // but if it does, provide helpful output + log.message( + `${chalk.yellow("⚠")} Skipping unsupported client - see manual configuration above` + ); + return; + } + + const scope = await resolveScopeForClient(clientName, options); + + // clientSpinner.message(`Installing in ${scope.scope} scope at ${scope.location}`); + + const configPath = await performInstallForClient(clientName, scope, options); + + // clientSpinner.stop(`Successfully installed in ${clientName} (${configPath})`); + + return { configPath, clientName, scope }; +} + +type McpServerConfig = Record | boolean | undefined>; +type McpServerScope = { + scope: (typeof scopes)[number]; + location: string; +}; + +async function performInstallForClient( + clientName: (typeof clients)[number], + scope: McpServerScope, + options: InstallMcpCommandOptions +) { + const config = resolveMcpServerConfig(clientName, options); + const pathComponents = resolveMcpServerConfigJsonPath(clientName, scope); + + return await writeMcpServerConfig(scope.location, pathComponents, config); +} + +async function writeMcpServerConfig( + location: string, + pathComponents: string[], + config: McpServerConfig +) { + const fullPath = expandTilde(location); + + const extension = extname(fullPath); + + switch (extension) { + case ".json": { + let existingConfig = await safeReadJSONCFile(fullPath); + + if (!existingConfig) { + existingConfig = {}; + } + + const newConfig = applyConfigToExistingConfig(existingConfig, pathComponents, config); + + await writeJSONFile(fullPath, newConfig, true); + break; + } + case ".toml": { + let existingConfig = await safeReadTomlFile(fullPath); + + if (!existingConfig) { + existingConfig = {}; + } + + const newConfig = applyConfigToExistingConfig(existingConfig, pathComponents, config); + + await writeTomlFile(fullPath, newConfig); + break; + } + } + + return fullPath; +} + +function applyConfigToExistingConfig( + existingConfig: any, + pathComponents: string[], + config: McpServerConfig +) { + const clonedConfig = structuredClone(existingConfig); + + let currentValueAtPath = clonedConfig; + + for (let i = 0; i < pathComponents.length; i++) { + const currentPathSegment = pathComponents[i]; + + if (!currentPathSegment) { + break; + } + + if (i === pathComponents.length - 1) { + currentValueAtPath[currentPathSegment] = config; + break; + } else { + currentValueAtPath[currentPathSegment] = currentValueAtPath[currentPathSegment] || {}; + currentValueAtPath = currentValueAtPath[currentPathSegment]; + } + } + + return clonedConfig; +} + +function resolveMcpServerConfigJsonPath( + clientName: (typeof clients)[number], + scope: McpServerScope +) { + switch (clientName) { + case "cursor": { + return ["mcpServers", "trigger"]; + } + case "vscode": { + return ["servers", "trigger"]; + } + case "crush": { + return ["mcp", "trigger"]; + } + case "windsurf": { + return ["mcpServers", "trigger"]; + } + case "gemini-cli": { + return ["mcpServers", "trigger"]; + } + case "cline": { + return ["mcpServers", "trigger"]; + } + case "amp": { + return ["amp.mcpServers", "trigger"]; + } + case "zed": { + return ["context_servers", "trigger"]; + } + case "claude-code": { + if (scope.scope === "local") { + const projectPath = process.cwd(); + + return ["projects", projectPath, "mcpServers", "trigger"]; + } else { + return ["mcpServers", "trigger"]; + } + } + case "openai-codex": { + return ["mcp_servers", "trigger"]; + } + case "opencode": { + return ["mcp", "trigger"]; + } + case "ruler": { + return ["mcpServers", "trigger"]; + } + } +} + +function resolveMcpServerConfig( + clientName: (typeof clients)[number], + options: InstallMcpCommandOptions +): McpServerConfig { + const args = [`trigger.dev@${options.tag}`, "mcp"]; + + if (options.logFile) { + args.push("--log-file", options.logFile); + } + + if (options.apiUrl) { + args.push("--api-url", options.apiUrl); + } + + if (options.devOnly) { + args.push("--dev-only"); + } + + if (options.projectRef) { + args.push("--project-ref", options.projectRef); + } + + switch (clientName) { + case "claude-code": { + return { + command: "npx", + args, + }; + } + case "cursor": { + return { + command: "npx", + args, + }; + } + case "vscode": { + return { + command: "npx", + args, + }; + } + case "crush": { + return { + type: "stdio", + command: "npx", + args, + }; + } + case "windsurf": { + return { + command: "npx", + args, + }; + } + case "gemini-cli": { + return { + command: "npx", + args, + }; + } + case "cline": { + return { + command: "npx", + args, + }; + } + case "amp": { + return { + command: "npx", + args, + }; + } + case "openai-codex": { + return { + command: "npx", + args, + }; + } + case "zed": { + return { + source: "custom", + command: "npx", + args, + }; + } + case "opencode": { + return { + type: "local", + command: ["npx", ...args], + enabled: true, + }; + } + case "ruler": { + return { + type: "stdio", + command: "npx", + args, + }; + } + } +} + +async function resolveScopeForClient( + clientName: (typeof clients)[number], + options: InstallMcpCommandOptions +) { + if (options.scope) { + const location = clientScopes[clientName][options.scope]; + + if (!location) { + throw new OutroCommandError( + `The ${clientName} client does not support the ${ + options.scope + } scope, it only supports ${Object.keys(clientScopes[clientName]).join(", ")} scopes` + ); + } + + return { + scope: options.scope, + location, + }; + } + + const scopeOptions = resolveScopeOptionsForClient(clientName); + + if (scopeOptions.length === 1) { + return { + scope: scopeOptions[0]!.value.scope, + location: scopeOptions[0]!.value.location, + }; + } + + const selectedScope = await select({ + message: `Where should the MCP server for ${clientName} be installed?`, + options: scopeOptions, + }); + + if (isCancel(selectedScope)) { + throw new OutroCommandError("No scope selected"); + } + + return selectedScope; +} + +function resolveScopeOptionsForClient(clientName: (typeof clients)[number]): Array<{ + value: { location: string; scope: (typeof scopes)[number] }; + label: string; + hint: string; +}> { + const $clientScopes = clientScopes[clientName]; + + const options = Object.entries($clientScopes).map(([scope, location]) => ({ + value: { location, scope: scope as (typeof scopes)[number] }, + label: scope, + hint: scopeHint(scope as (typeof scopes)[number], location), + })); + + return options; +} + +function scopeHint(scope: (typeof scopes)[number], location: string) { + switch (scope) { + case "user": { + return `Install for your user account on your machine (${location})`; + } + case "project": { + return `Install in the current project shared with your team (${location})`; + } + case "local": { + return `Install in the current project, local to you only (${location})`; + } + } +} + +async function resolveClients(options: InstallMcpCommandOptions): Promise { + if (options.client) { + return options.client; + } + + if (options.yolo) { + return [...clients]; + } + + const selectOptions: Array<{ + value: string; + label: string; + hint?: string; + }> = clients.map((client) => ({ + value: client, + label: clientLabels[client], + })); + + selectOptions.push({ + value: "unsupported", + label: "Unsupported client", + hint: "We don't support this client yet, but you can still install the MCP server manually.", + }); + + const $selectOptions = selectOptions as Array<{ + value: ResolvedClients; + label: string; + hint?: string; + }>; + + const selectedClients = await multiselect({ + message: "Select one or more clients to install the MCP server into", + options: $selectOptions, + required: true, + }); + + if (isCancel(selectedClients)) { + throw new OutroCommandError("No clients selected"); + } + + return selectedClients; +} + +async function resolveDevOnly(options: InstallMcpCommandOptions) { + if (typeof options.devOnly === "boolean") { + return options.devOnly; + } + + const devOnly = await confirm({ + message: "Restrict the MCP server to the dev environment only?", + initialValue: false, + }); + + if (isCancel(devOnly)) { + return false; + } + + return devOnly; +} diff --git a/packages/cli-v3/src/commands/install-rules.ts b/packages/cli-v3/src/commands/install-rules.ts new file mode 100644 index 0000000000..284f2ad73a --- /dev/null +++ b/packages/cli-v3/src/commands/install-rules.ts @@ -0,0 +1,604 @@ +import { confirm, intro, isCancel, log, multiselect, outro } from "@clack/prompts"; +import { ResolvedConfig } from "@trigger.dev/core/v3/build"; +import chalk from "chalk"; +import { Command, Option as CommandOption } from "commander"; +import { join } from "node:path"; +import * as semver from "semver"; +import { z } from "zod"; +import { OutroCommandError, wrapCommandAction } from "../cli/common.js"; +import { loadConfig } from "../config.js"; +import { + GithubRulesManifestLoader, + loadRulesManifest, + LocalRulesManifestLoader, + ManifestVersion, + RulesManifest, + RulesManifestVersionOption, +} from "../rules/manifest.js"; +import { cliLink } from "../utilities/cliOutput.js"; +import { + readConfigHasSeenRulesInstallPrompt, + readConfigLastRulesInstallPromptVersion, + writeConfigHasSeenRulesInstallPrompt, + writeConfigLastRulesInstallPromptVersion, +} from "../utilities/configFiles.js"; +import { pathExists, readFile, safeWriteFile } from "../utilities/fileSystem.js"; +import { printStandloneInitialBanner } from "../utilities/initialBanner.js"; +import { logger } from "../utilities/logger.js"; + +const targets = [ + "claude-code", + "cursor", + "vscode", + "windsurf", + "gemini-cli", + "cline", + "agents.md", + "amp", + "kilo", + "ruler", +] as const; + +type TargetLabels = { + [key in (typeof targets)[number]]: string; +}; + +const targetLabels: TargetLabels = { + "claude-code": "Claude Code", + cursor: "Cursor", + vscode: "VSCode", + windsurf: "Windsurf", + "gemini-cli": "Gemini CLI", + cline: "Cline", + "agents.md": "AGENTS.md (OpenAI Codex CLI, Jules, OpenCode)", + amp: "Sourcegraph AMP", + kilo: "Kilo Code", + ruler: "Ruler", +}; + +type SupportedTargets = (typeof targets)[number]; +type ResolvedTargets = SupportedTargets | "unsupported"; + +const InstallRulesCommandOptions = z.object({ + target: z.enum(targets).array().optional(), + manifestPath: z.string().optional(), + branch: z.string().optional(), + logLevel: z.enum(["debug", "info", "log", "warn", "error", "none"]).optional(), + forceWizard: z.boolean().optional(), +}); + +type InstallRulesCommandOptions = z.infer; + +export function configureInstallRulesCommand(program: Command) { + return program + .command("install-rules") + .description("Install the Trigger.dev Agent rules files") + .option( + "--target ", + "Choose the target (or targets) to install the Trigger.dev rules into. We currently support: " + + targets.join(", ") + ) + .option( + "-l, --log-level ", + "The CLI log level to use (debug, info, log, warn, error, none). This does not effect the log level of your trigger.dev tasks.", + "log" + ) + .addOption( + new CommandOption( + "--manifest-path ", + "The path to the rules manifest file. This is useful if you want to install the rules from a local file." + ).hideHelp() + ) + .addOption( + new CommandOption( + "--branch ", + "The branch to install the rules from, the default is main" + ).hideHelp() + ) + .addOption( + new CommandOption( + "--force-wizard", + "Force the rules install wizard to run even if the rules have already been installed." + ).hideHelp() + ) + .action(async (options) => { + await printStandloneInitialBanner(true); + await installRulesCommand(options); + }); +} + +export async function installRulesCommand(options: unknown) { + return await wrapCommandAction( + "installRulesCommand", + InstallRulesCommandOptions, + options, + async (opts) => { + if (opts.logLevel) { + logger.loggerLevel = opts.logLevel; + } + + return await _installRulesCommand(opts); + } + ); +} + +async function _installRulesCommand(options: InstallRulesCommandOptions) { + if (options.forceWizard) { + await initiateRulesInstallWizard(options); + return; + } + + intro("Welcome to the Trigger.dev Agent rules install wizard "); + + const manifestLoader = options.manifestPath + ? new LocalRulesManifestLoader(options.manifestPath) + : new GithubRulesManifestLoader(options.branch ?? "main"); + + const manifest = await loadRulesManifest(manifestLoader); + + writeConfigLastRulesInstallPromptVersion(manifest.currentVersion); + writeConfigHasSeenRulesInstallPrompt(true); + + await installRules(manifest, options); + + outro("You're all set! "); +} + +type InstallRulesResults = Array; + +type InstallRulesResult = { + configPath: string; + targetName: (typeof targets)[number]; +}; + +export type InstallRulesWizardOptions = { + target?: Array<(typeof targets)[number]>; + manifestPath?: string; + branch?: string; +}; + +export async function initiateRulesInstallWizard(options: InstallRulesWizardOptions) { + const manifestLoader = options.manifestPath + ? new LocalRulesManifestLoader(options.manifestPath) + : new GithubRulesManifestLoader(options.branch ?? "main"); + + const manifest = await loadRulesManifest(manifestLoader); + + const hasSeenRulesInstallPrompt = readConfigHasSeenRulesInstallPrompt(); + + if (!hasSeenRulesInstallPrompt) { + writeConfigHasSeenRulesInstallPrompt(true); + writeConfigLastRulesInstallPromptVersion(manifest.currentVersion); + + const installChoice = await confirm({ + message: "Would you like to install the Trigger.dev code agent rules?", + initialValue: true, + }); + + const skipInstall = isCancel(installChoice) || !installChoice; + + if (skipInstall) { + return; + } + + await installRules(manifest, options); + return; + } + + const lastRulesInstallPromptVersion = readConfigLastRulesInstallPromptVersion(); + + if (!lastRulesInstallPromptVersion) { + writeConfigHasSeenRulesInstallPrompt(true); + writeConfigLastRulesInstallPromptVersion(manifest.currentVersion); + + const installChoice = await confirm({ + message: `A new version of the trigger.dev agent rules is available (${manifest.currentVersion}). Do you want to install it?`, + initialValue: true, + }); + + const skipInstall = isCancel(installChoice) || !installChoice; + + if (skipInstall) { + return; + } + + await installRules(manifest, options); + return; + } + + if (semver.gt(manifest.currentVersion, lastRulesInstallPromptVersion)) { + writeConfigHasSeenRulesInstallPrompt(true); + writeConfigLastRulesInstallPromptVersion(manifest.currentVersion); + + const confirmed = await confirm({ + message: `A new version of the trigger.dev agent rules is available (${lastRulesInstallPromptVersion} β†’ ${chalk.greenBright( + manifest.currentVersion + )}). Do you want to install it?`, + initialValue: true, + }); + + if (isCancel(confirmed) || !confirmed) { + return; + } + + await installRules(manifest, options); + } + + return; +} + +async function installRules(manifest: RulesManifest, opts: InstallRulesWizardOptions) { + const config = await loadConfig({ + cwd: process.cwd(), + }); + + const currentVersion = await manifest.getCurrentVersion(); + + const targetNames = await resolveTargets(opts); + + if (targetNames.length === 1 && targetNames.includes("unsupported")) { + handleUnsupportedTargetOnly(opts); + return; + } + + const results = []; + + for (const targetName of targetNames) { + const result = await installRulesForTarget(targetName, currentVersion, config, opts); + + if (result) { + results.push(result); + } + } + + if (results.length > 0) { + log.step("Installed the following rules files:"); + + for (const r of results) { + const installationsByLocation = r.installations.reduce( + (acc, i) => { + if (!acc[i.location]) { + acc[i.location] = []; + } + + acc[i.location]!.push(i.option); + + return acc; + }, + {} as Record + ); + + const locationOutput = Object.entries(installationsByLocation).map( + ([location]) => `${chalk.greenBright(location)}` + ); + + for (const message of locationOutput) { + log.info(message); + } + } + + log.info( + `${cliLink("Learn how to use our rules", "https://trigger.dev/docs/agents/rules/overview")}` + ); + } +} + +function handleUnsupportedTargetOnly(options: InstallRulesCommandOptions): InstallRulesResults { + log.info( + `${cliLink("Install the rules manually", "https://trigger.dev/docs/agents/rules/overview")}` + ); + + return []; +} + +async function installRulesForTarget( + targetName: ResolvedTargets, + currentVersion: ManifestVersion, + config: ResolvedConfig, + options: InstallRulesCommandOptions +) { + if (targetName === "unsupported") { + // This should not happen as unsupported targets are handled separately + // but if it does, provide helpful output + log.message( + `${chalk.yellow("⚠")} Skipping unsupported target - see manual configuration above` + ); + return; + } + + const result = await performInstallForTarget(targetName, currentVersion, config, options); + + return result; +} + +async function performInstallForTarget( + targetName: (typeof targets)[number], + currentVersion: ManifestVersion, + config: ResolvedConfig, + cmdOptions: InstallRulesCommandOptions +) { + const options = await resolveOptionsForTarget(targetName, currentVersion, cmdOptions); + + const installations = await performInstallOptionsForTarget(targetName, options, config); + + return { + targetName, + installations, + }; +} + +async function performInstallOptionsForTarget( + targetName: (typeof targets)[number], + options: Array, + config: ResolvedConfig +) { + const results = []; + + for (const option of options) { + const result = await performInstallOptionForTarget(targetName, option, config); + results.push(result); + } + + return results; +} + +async function performInstallOptionForTarget( + targetName: (typeof targets)[number], + option: RulesManifestVersionOption, + config: ResolvedConfig +) { + switch (option.installStrategy) { + case "default": { + return performInstallDefaultOptionForTarget(targetName, option, config); + } + case "claude-code-subagent": { + return performInstallClaudeCodeSubagentOptionForTarget(option); + } + default: { + throw new Error(`Unknown install strategy: ${option.installStrategy}`); + } + } +} + +async function performInstallDefaultOptionForTarget( + targetName: (typeof targets)[number], + option: RulesManifestVersionOption, + config: ResolvedConfig +) { + // Get the path to the rules file + const rulesFilePath = resolveRulesFilePathForTargetOption(targetName, option); + const rulesFileContents = await resolveRulesFileContentsForTarget(targetName, option, config); + const mergeStrategy = await resolveRulesFileMergeStrategyForTarget(targetName); + + // Try and read the existing rules file + const rulesFileAbsolutePath = join(process.cwd(), rulesFilePath); + await writeToFile(rulesFileAbsolutePath, rulesFileContents, mergeStrategy, option.name); + + return { option, location: rulesFilePath }; +} + +async function writeToFile( + path: string, + contents: string, + mergeStrategy: "overwrite" | "replace" = "overwrite", + sectionName: string +) { + const exists = await pathExists(path); + + if (exists) { + switch (mergeStrategy) { + case "overwrite": { + await safeWriteFile(path, contents); + break; + } + case "replace": { + const existingContents = await readFile(path); + + const pattern = new RegExp( + `.*?`, + "gs" + ); + + // If the section name is not found, just append the new content + if (!pattern.test(existingContents)) { + await safeWriteFile(path, existingContents + "\n\n" + contents); + break; + } + + const updatedContent = existingContents.replace(pattern, contents); + + await safeWriteFile(path, updatedContent); + break; + } + default: { + throw new Error(`Unknown merge strategy: ${mergeStrategy}`); + } + } + } else { + await safeWriteFile(path, contents); + } +} + +async function performInstallClaudeCodeSubagentOptionForTarget(option: RulesManifestVersionOption) { + const rulesFilePath = ".claude/agents/trigger-dev-task-writer.md"; + const rulesFileContents = option.contents; + + await writeToFile(rulesFilePath, rulesFileContents, "overwrite", option.name); + + return { option, location: rulesFilePath }; +} + +function resolveRulesFilePathForTargetOption( + targetName: (typeof targets)[number], + option: RulesManifestVersionOption +): string { + if (option.installStrategy === "claude-code-subagent") { + return ".claude/agents/trigger-dev-task-writer.md"; + } + + switch (targetName) { + case "claude-code": { + return "CLAUDE.md"; + } + case "cursor": { + return `.cursor/rules/trigger.${option.name}.mdc`; + } + case "vscode": { + return `.github/instructions/trigger-${option.name}.instructions.md`; + } + case "windsurf": { + return `.windsurf/rules/trigger-${option.name}.md`; + } + case "gemini-cli": { + return `GEMINI.md`; + } + case "cline": { + return `.clinerules/trigger-${option.name}.md`; + } + case "agents.md": { + return "AGENTS.md"; + } + case "amp": { + return "AGENT.md"; + } + case "kilo": { + return `.kilocode/rules/trigger-${option.name}.md`; + } + case "ruler": { + return `.ruler/trigger-${option.name}.md`; + } + default: { + throw new Error(`Unknown target: ${targetName}`); + } + } +} + +async function resolveRulesFileMergeStrategyForTarget(targetName: (typeof targets)[number]) { + switch (targetName) { + case "amp": + case "agents.md": + case "gemini-cli": + case "claude-code": { + return "replace"; + } + default: { + return "overwrite"; + } + } +} + +async function resolveRulesFileContentsForTarget( + targetName: (typeof targets)[number], + option: RulesManifestVersionOption, + config: ResolvedConfig +) { + switch (targetName) { + case "cursor": { + return $output( + frontmatter({ + description: option.label, + globs: option.applyTo ?? "**/trigger/**/*.ts", + alwaysApply: false, + }), + option.contents + ); + } + case "vscode": { + return $output( + frontmatter({ + applyTo: option.applyTo ?? "**/trigger/**/*.ts", + }), + option.contents + ); + } + case "windsurf": { + return $output( + frontmatter({ + trigger: "glob", + globs: option.applyTo ?? "**/trigger/**/*.ts", + }), + option.contents + ); + } + default: { + return $output( + ``, + option.contents, + `` + ); + } + } +} + +function frontmatter(data: Record) { + return $output("---", ...Object.entries(data).map(([key, value]) => `${key}: ${value}`), "---"); +} + +function $output(...strings: string[]) { + return strings.map((s) => s).join("\n"); +} + +async function resolveOptionsForTarget( + targetName: (typeof targets)[number], + currentVersion: ManifestVersion, + cmdOptions: InstallRulesCommandOptions +) { + const possibleOptions = currentVersion.options.filter( + (option) => !option.client || option.client === targetName + ); + + const selectedOptions = await multiselect({ + message: `Choose the rules you want to install for ${targetLabels[targetName]}`, + options: possibleOptions.map((option) => ({ + value: option, + label: option.title, + hint: `${option.label} [~${option.tokens} tokens]`, + })), + required: true, + }); + + if (isCancel(selectedOptions)) { + throw new OutroCommandError("No options selected"); + } + + return selectedOptions; +} + +async function resolveTargets(options: InstallRulesCommandOptions): Promise { + if (options.target) { + return options.target; + } + + const selectOptions: Array<{ + value: string; + label: string; + hint?: string; + }> = targets.map((target) => ({ + value: target, + label: targetLabels[target], + })); + + selectOptions.push({ + value: "unsupported", + label: "Unsupported target", + hint: "We don't support this target yet, but you can still install the rules manually.", + }); + + const $selectOptions = selectOptions as Array<{ + value: ResolvedTargets; + label: string; + hint?: string; + }>; + + const selectedTargets = await multiselect({ + message: "Select one or more targets to install the rules into", + options: $selectOptions, + required: true, + }); + + if (isCancel(selectedTargets)) { + throw new OutroCommandError("No targets selected"); + } + + return selectedTargets; +} diff --git a/packages/cli-v3/src/commands/login.ts b/packages/cli-v3/src/commands/login.ts index 953a0c796f..da8b080580 100644 --- a/packages/cli-v3/src/commands/login.ts +++ b/packages/cli-v3/src/commands/login.ts @@ -346,7 +346,7 @@ export async function login(options?: LoginOptions): Promise { }); } -async function getPersonalAccessToken(apiClient: CliApiClient, authorizationCode: string) { +export async function getPersonalAccessToken(apiClient: CliApiClient, authorizationCode: string) { return await tracer.startActiveSpan("getPersonalAccessToken", async (span) => { try { const token = await apiClient.getPersonalAccessToken(authorizationCode); diff --git a/packages/cli-v3/src/commands/mcp.ts b/packages/cli-v3/src/commands/mcp.ts new file mode 100644 index 0000000000..8604a455da --- /dev/null +++ b/packages/cli-v3/src/commands/mcp.ts @@ -0,0 +1,119 @@ +import { intro, outro } from "@clack/prompts"; +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import { VERSION } from "@trigger.dev/core"; +import { tryCatch } from "@trigger.dev/core/utils"; +import { Command, Option as CommandOption } from "commander"; +import { z } from "zod"; +import { CommonCommandOptions, commonOptions, wrapCommandAction } from "../cli/common.js"; +import { CLOUD_API_URL } from "../consts.js"; +import { McpContext } from "../mcp/context.js"; +import { FileLogger } from "../mcp/logger.js"; +import { registerTools } from "../mcp/tools.js"; +import { printStandloneInitialBanner } from "../utilities/initialBanner.js"; +import { logger } from "../utilities/logger.js"; +import { installMcpServer } from "./install-mcp.js"; +import { serverMetadata } from "../mcp/config.js"; +import { initiateRulesInstallWizard } from "./install-rules.js"; + +const McpCommandOptions = CommonCommandOptions.extend({ + projectRef: z.string().optional(), + logFile: z.string().optional(), + devOnly: z.boolean().default(false), + rulesInstallManifestPath: z.string().optional(), + rulesInstallBranch: z.string().optional(), +}); + +export type McpCommandOptions = z.infer; + +export function configureMcpCommand(program: Command) { + return commonOptions( + program + .command("mcp") + .description("Run the MCP server") + .option("-p, --project-ref ", "The project ref to use") + .option( + "--dev-only", + "Only run the MCP server for the dev environment. Attempts to access other environments will fail." + ) + .option("--log-file ", "The file to log to") + .addOption( + new CommandOption( + "--rules-install-manifest-path ", + "The path to the rules install manifest" + ).hideHelp() + ) + .addOption( + new CommandOption( + "--rules-install-branch ", + "The branch to install the rules from" + ).hideHelp() + ) + ).action(async (options) => { + wrapCommandAction("mcp", McpCommandOptions, options, async (opts) => { + await mcpCommand(opts); + }); + }); +} + +export async function mcpCommand(options: McpCommandOptions) { + if (process.stdout.isTTY) { + await printStandloneInitialBanner(true); + + intro("Welcome to the Trigger.dev MCP server install wizard πŸ§™"); + + const [installError] = await tryCatch( + installMcpServer({ + yolo: false, + tag: VERSION as string, + logLevel: "log", + }) + ); + + if (installError) { + outro(`Failed to install MCP server: ${installError.message}`); + return; + } + + await initiateRulesInstallWizard({ + manifestPath: options.rulesInstallManifestPath, + branch: options.rulesInstallBranch, + }); + + return; + } + + logger.loggerLevel = "none"; + + const server = new McpServer( + { + name: serverMetadata.name, + version: serverMetadata.version, + }, + { + instructions: serverMetadata.instructions, + } + ); + + server.server.oninitialized = async () => { + fileLogger?.log("initialized mcp command", { options, argv: process.argv }); + }; + + // Start receiving messages on stdin and sending messages on stdout + const transport = new StdioServerTransport(); + + const fileLogger: FileLogger | undefined = options.logFile + ? new FileLogger(options.logFile, server) + : undefined; + + const context = new McpContext(server, { + projectRef: options.projectRef, + fileLogger, + apiUrl: options.apiUrl ?? CLOUD_API_URL, + profile: options.profile, + }); + + registerTools(context); + + await server.connect(transport); +} diff --git a/packages/cli-v3/src/commands/update.ts b/packages/cli-v3/src/commands/update.ts index 92fd2ec8f8..f67e9bf7db 100644 --- a/packages/cli-v3/src/commands/update.ts +++ b/packages/cli-v3/src/commands/update.ts @@ -1,7 +1,7 @@ import { confirm, intro, isCancel, log, outro } from "@clack/prompts"; import { Command } from "commander"; import { detectPackageManager, installDependencies } from "nypm"; -import { basename, dirname, resolve } from "path"; +import { basename, dirname, join, resolve } from "path"; import { PackageJson, readPackageJSON, type ResolveOptions, resolvePackageJSON } from "pkg-types"; import { z } from "zod"; import { CommonCommandOptions, OutroCommandError, wrapCommandAction } from "../cli/common.js"; @@ -319,7 +319,7 @@ async function getTriggerDependencies( continue; } - const $version = await tryResolveTriggerPackageVersion(name, packageJsonPath); + const $version = await tryResolveTriggerPackageVersion(name, dirname(packageJsonPath)); deps.push({ type, name, version: $version ?? version }); } @@ -328,13 +328,13 @@ async function getTriggerDependencies( return deps; } -async function tryResolveTriggerPackageVersion( +export async function tryResolveTriggerPackageVersion( name: string, - packageJsonPath: string + basedir?: string ): Promise { try { const resolvedPath = nodeResolve.sync(name, { - basedir: dirname(packageJsonPath), + basedir, }); logger.debug(`Resolved ${name} package version path`, { name, resolvedPath }); @@ -342,11 +342,11 @@ async function tryResolveTriggerPackageVersion( const { packageJson } = await getPackageJson(dirname(resolvedPath), { test: (filePath) => { // We need to skip any type-marker files - if (filePath.includes("dist/commonjs")) { + if (filePath.includes(join("dist", "commonjs"))) { return false; } - if (filePath.includes("dist/esm")) { + if (filePath.includes(join("dist", "esm"))) { return false; } diff --git a/packages/cli-v3/src/dev/devOutput.ts b/packages/cli-v3/src/dev/devOutput.ts index f53b6f0e2e..6365eee2ed 100644 --- a/packages/cli-v3/src/dev/devOutput.ts +++ b/packages/cli-v3/src/dev/devOutput.ts @@ -41,11 +41,11 @@ export function startDevOutput(options: DevOutputOptions) { const baseUrl = `${dashboardUrl}/projects/v3/${config.project}`; const rebuildStarted = (...[target]: EventBusEventArgs<"rebuildStarted">) => { - logger.log(chalkGrey("β—‹ Rebuilding background worker…")); + logger.log(chalkGrey("β—‹ Rebuilding local worker…")); }; const buildStarted = (...[target]: EventBusEventArgs<"buildStarted">) => { - logger.log(chalkGrey("β—‹ Building background worker…")); + logger.log(chalkGrey("β—‹ Building local worker…")); }; const buildFailed = (...[target, error]: EventBusEventArgs<"buildFailed">) => { @@ -89,7 +89,7 @@ export function startDevOutput(options: DevOutputOptions) { const runsLink = chalkLink(cliLink("View runs", runsUrl)); const runtime = chalkGrey(`[${worker.build.runtime}]`); - const workerStarted = chalkGrey("Background worker ready"); + const workerStarted = chalkGrey("Local worker ready"); const workerVersion = chalkWorker(worker.serverWorker!.version); logParts.push(workerStarted, runtime, arrow, workerVersion); diff --git a/packages/cli-v3/src/mcp/auth.ts b/packages/cli-v3/src/mcp/auth.ts new file mode 100644 index 0000000000..5079fc8b66 --- /dev/null +++ b/packages/cli-v3/src/mcp/auth.ts @@ -0,0 +1,211 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { env } from "std-env"; +import { CliApiClient } from "../apiClient.js"; +import { CLOUD_API_URL } from "../consts.js"; +import { readAuthConfigProfile, writeAuthConfigProfile } from "../utilities/configFiles.js"; +import { + isPersonalAccessToken, + NotPersonalAccessTokenError, +} from "../utilities/isPersonalAccessToken.js"; +import { LoginResult, LoginResultOk } from "../utilities/session.js"; +import { getPersonalAccessToken } from "../commands/login.js"; +import open from "open"; +import pRetry from "p-retry"; +import { McpContext } from "./context.js"; +import { ApiClient } from "@trigger.dev/core/v3"; + +export type McpAuthOptions = { + server: McpServer; + context: McpContext; + defaultApiUrl?: string; + profile?: string; +}; + +export async function mcpAuth(options: McpAuthOptions): Promise { + const opts = { + defaultApiUrl: CLOUD_API_URL, + ...options, + }; + + const accessTokenFromEnv = env.TRIGGER_ACCESS_TOKEN; + + if (accessTokenFromEnv) { + if (!isPersonalAccessToken(accessTokenFromEnv)) { + throw new NotPersonalAccessTokenError( + "Your TRIGGER_ACCESS_TOKEN is not a Personal Access Token, they start with 'tr_pat_'. You can generate one here: https://cloud.trigger.dev/account/tokens" + ); + } + + const auth = { + accessToken: accessTokenFromEnv, + apiUrl: env.TRIGGER_API_URL ?? opts.defaultApiUrl ?? CLOUD_API_URL, + }; + + const apiClient = new CliApiClient(auth.apiUrl, auth.accessToken); + const userData = await apiClient.whoAmI(); + + if (!userData.success) { + throw new Error(userData.error); + } + + return { + ok: true as const, + profile: options?.profile ?? "default", + userId: userData.data.userId, + email: userData.data.email, + dashboardUrl: userData.data.dashboardUrl, + auth: { + accessToken: auth.accessToken, + apiUrl: auth.apiUrl, + }, + }; + } + + const authConfig = readAuthConfigProfile(options?.profile); + + if (authConfig && authConfig.accessToken) { + const apiClient = new CliApiClient( + authConfig.apiUrl ?? opts.defaultApiUrl, + authConfig.accessToken + ); + const userData = await apiClient.whoAmI(); + + if (!userData.success) { + throw new Error(userData.error); + } + + return { + ok: true as const, + profile: options?.profile ?? "default", + userId: userData.data.userId, + email: userData.data.email, + dashboardUrl: userData.data.dashboardUrl, + auth: { + accessToken: authConfig.accessToken, + apiUrl: authConfig.apiUrl ?? opts.defaultApiUrl, + }, + }; + } + + const apiClient = new CliApiClient(authConfig?.apiUrl ?? opts.defaultApiUrl); + + //generate authorization code + const authorizationCodeResult = await createAuthorizationCode(apiClient); + + const url = new URL(authorizationCodeResult.url); + + url.searchParams.set("source", "mcp"); + + const clientName = options.server.server.getClientVersion()?.name; + + if (clientName) { + url.searchParams.set("clientName", clientName); + } + // Only elicitInput if the client has the elicitation capability + + // Elicit the user to visit the authorization code URL + const allowLogin = await askForLoginPermission(opts.server, url.toString()); + + if (!allowLogin) { + return { + ok: false as const, + error: "User did not allow login", + }; + } + + // Open the authorization code URL in the browser + await open(url.toString()); + + // Poll for the personal access token + const indexResult = await pRetry( + () => getPersonalAccessToken(apiClient, authorizationCodeResult.authorizationCode), + { + //this means we're polling, same distance between each attempt + factor: 1, + retries: 60, + minTimeout: 1000, + } + ); + + writeAuthConfigProfile( + { accessToken: indexResult.token, apiUrl: opts.defaultApiUrl }, + options?.profile + ); + + const client = new CliApiClient(opts.defaultApiUrl, indexResult.token); + const userData = await client.whoAmI(); + + if (!userData.success) { + throw new Error(userData.error); + } + + return { + ok: true as const, + profile: options?.profile ?? "default", + userId: userData.data.userId, + email: userData.data.email, + dashboardUrl: userData.data.dashboardUrl, + auth: { + accessToken: indexResult.token, + apiUrl: opts.defaultApiUrl, + }, + }; +} + +async function createAuthorizationCode(apiClient: CliApiClient) { + const authorizationCodeResult = await apiClient.createAuthorizationCode(); + + if (!authorizationCodeResult.success) { + throw new Error(`Failed to create authorization code\n${authorizationCodeResult.error}`); + } + + return authorizationCodeResult.data; +} + +async function askForLoginPermission(server: McpServer, authorizationCodeUrl: string) { + const capabilities = server.server.getClientCapabilities(); + + if (typeof capabilities?.elicitation !== "object") { + return true; + } + + const result = await server.server.elicitInput({ + message: `You are not currently logged in. Would you like to login now? We'll automatically open the authorization code URL (${authorizationCodeUrl}) in your browser.`, + requestedSchema: { + type: "object", + properties: { + allowLogin: { + type: "boolean", + default: false, + title: "Allow Login", + description: "Whether to allow the user to login", + }, + }, + required: ["allowLogin"], + }, + }); + + return result.action === "accept" && result.content?.allowLogin; +} + +export async function createApiClientWithPublicJWT( + auth: LoginResultOk, + projectRef: string, + envName: string, + scopes: string[], + previewBranch?: string +) { + const cliApiClient = new CliApiClient(auth.auth.apiUrl, auth.auth.accessToken, previewBranch); + + const jwt = await cliApiClient.getJWT(projectRef, envName, { + claims: { + scopes, + }, + }); + + if (!jwt.success) { + return; + } + + return new ApiClient(auth.auth.apiUrl, jwt.data.token); +} diff --git a/packages/cli-v3/src/mcp/capabilities.ts b/packages/cli-v3/src/mcp/capabilities.ts new file mode 100644 index 0000000000..8c4e42581c --- /dev/null +++ b/packages/cli-v3/src/mcp/capabilities.ts @@ -0,0 +1,31 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; + +export function hasRootsCapability(server: McpServer) { + const capabilities = server.server.getClientCapabilities(); + + if (!capabilities) { + return false; + } + + return "roots" in capabilities && typeof capabilities.roots === "object"; +} + +export function hasSamplingCapability(server: McpServer) { + const capabilities = server.server.getClientCapabilities(); + + if (!capabilities) { + return false; + } + + return "sampling" in capabilities && typeof capabilities.sampling === "object"; +} + +export function hasElicitationCapability(server: McpServer) { + const capabilities = server.server.getClientCapabilities(); + + if (!capabilities) { + return false; + } + + return "elicitation" in capabilities && typeof capabilities.elicitation === "object"; +} diff --git a/packages/cli-v3/src/mcp/config.ts b/packages/cli-v3/src/mcp/config.ts new file mode 100644 index 0000000000..bfa16437ec --- /dev/null +++ b/packages/cli-v3/src/mcp/config.ts @@ -0,0 +1,97 @@ +import { VERSION } from "../version.js"; + +export const serverMetadata = { + name: "trigger", + version: VERSION, + instructions: `Trigger.dev MCP server to automate your Trigger.dev projects and answer questions about Trigger.dev by searching the docs. +If you need help setting up Trigger.dev in your project please refer to https://trigger.dev/docs/manual-setup. +If the user asks for help with adding Trigger.dev to their project, please refer to https://trigger.dev/docs/manual-setup. + `, +}; + +export const toolsMetadata = { + search_docs: { + name: "search_docs", + title: "Search Docs", + description: + "Search across the Trigger.dev documentation to find relevant information, code examples, API references, and guides. Use this tool when you need to answer questions about Trigger.dev, find specific documentation, understand how features work, or locate implementation details. The search returns contextual content with titles and direct links to the documentation pages", + }, + list_projects: { + name: "list_projects", + title: "List Projects", + description: + "List all projects for the current user, useful for when searching for a project and for looking up a projectRef", + }, + list_orgs: { + name: "list_orgs", + title: "List Organizations", + description: + "List all organizations for the current user. Useful when looking up an org slug or ID.", + }, + create_project_in_org: { + name: "create_project_in_org", + title: "Create Project in Organization", + description: + "Create a new project in an organization. Only do this if the user wants to add Trigger.dev to an existing project. If there is already a trigger.config.ts file present, then you should not create a new project.", + }, + initialize_project: { + name: "initialize_project", + title: "Initialize Project", + description: + "Initialize Trigger.dev in your project. This will create a new project in the organization you select and add Trigger.dev to your project.", + }, + get_tasks: { + name: "get_tasks", + title: "Get Tasks", + description: + "Get all tasks in the project. Useful when searching for a task and for looking up a task identifier/slug", + }, + get_current_worker: { + name: "get_current_worker", + title: "Get Current Worker", + description: + "Get the current worker for the project. Useful when searching for a task and for looking up a task identifier/slug and payload schema, or looking for the latest version in a specific environment.", + }, + trigger_task: { + name: "trigger_task", + title: "Trigger Task", + description: + "Trigger a task in the project. Use the get_tasks tool to get a list of tasks and ask the user to select one if it's not clear which one to use.", + }, + get_run_details: { + name: "get_run_details", + title: "Get Run Details", + description: + "Get the details of a run. The run ID is the ID of the run that was triggered. It starts with run_", + }, + cancel_run: { + name: "cancel_run", + title: "Cancel Run", + description: + "Cancel a run. The run ID is the ID of the run that was triggered. It starts with run_", + }, + list_runs: { + name: "list_runs", + title: "List Runs", + description: + "List all runs for a project. Use this tool when you need to search for a run or list all runs for a project.", + }, + deploy: { + name: "deploy", + title: "Deploy", + description: + "Deploy a project. Use this tool when you need to deploy a project. This will trigger a deployment for the project. This is a long running operation and including a progress token will allow you to display the progress to the user.", + }, + list_deploys: { + name: "list_deploys", + title: "List Deploys", + description: + "List all deploys for a project. Use this tool when you need to search for a deploy or list all deploys for a project.", + }, + list_preview_branches: { + name: "list_preview_branches", + title: "List Preview Branches", + description: + "List all preview branches for a project. Use this tool when you need to search for a preview branch or list all preview branches for a project.", + }, +}; diff --git a/packages/cli-v3/src/mcp/context.ts b/packages/cli-v3/src/mcp/context.ts new file mode 100644 index 0000000000..75f6abd2a3 --- /dev/null +++ b/packages/cli-v3/src/mcp/context.ts @@ -0,0 +1,187 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { tryCatch } from "@trigger.dev/core/utils"; +import { ApiClient } from "@trigger.dev/core/v3"; +import path from "node:path"; +import { CliApiClient } from "../apiClient.js"; +import { loadConfig } from "../config.js"; +import { mcpAuth } from "./auth.js"; +import { + hasElicitationCapability, + hasRootsCapability, + hasSamplingCapability, +} from "./capabilities.js"; +import { FileLogger } from "./logger.js"; +import { fileURLToPath } from "node:url"; + +export type McpContextOptions = { + projectRef?: string; + fileLogger?: FileLogger; + apiUrl?: string; + profile?: string; + devOnly?: boolean; +}; + +export class McpContext { + public readonly server: McpServer; + public readonly options: McpContextOptions; + + constructor(server: McpServer, options: McpContextOptions) { + this.server = server; + this.options = options; + } + + get logger() { + return this.options.fileLogger; + } + + public async getAuth() { + const auth = await mcpAuth({ + server: this.server, + defaultApiUrl: this.options.apiUrl, + profile: this.options.profile, + context: this, + }); + + if (!auth.ok) { + throw new Error(auth.error); + } + + return auth; + } + + public async getCliApiClient(branch?: string) { + const auth = await this.getAuth(); + + return new CliApiClient(auth.auth.apiUrl, auth.auth.accessToken, branch); + } + + public async getApiClient(options: { + projectRef: string; + environment: string; + scopes: string[]; + branch?: string; + }) { + const cliApiClient = await this.getCliApiClient(options.branch); + + const jwt = await cliApiClient.getJWT(options.projectRef, options.environment, { + claims: { + scopes: options.scopes, + }, + }); + + if (!jwt.success) { + throw new Error( + `Could not get the authentication token for the project ${options.projectRef} in the ${options.environment} environment. Please try again.` + ); + } + + return new ApiClient(cliApiClient.apiURL, jwt.data.token); + } + + public async getCwd() { + if (!this.hasRootsCapability) { + return undefined; + } + + const response = await this.server.server.listRoots(); + + if (response.roots.length >= 1) { + return response.roots[0]?.uri ? fileURLToPath(response.roots[0].uri) : undefined; + } + + return undefined; + } + + public async getProjectRef(options: { projectRef?: string; cwd?: string }) { + if (options.projectRef) { + return options.projectRef; + } + + const projectDir = await this.getProjectDir({ cwd: options.cwd }); + + if (!projectDir.ok) { + throw new Error(projectDir.error); + } + + const [_, config] = await tryCatch(loadConfig({ cwd: projectDir.cwd })); + + if ( + config?.configFile && + typeof config.project === "string" && + config.project.startsWith("proj_") + ) { + return config.project; + } + + throw new Error("No project ref found. Please provide a projectRef."); + } + + public async getProjectDir({ cwd }: { cwd?: string }) { + // If cwd is a path to the actual trigger.config.ts file, then we should set the cwd to the directory of the file + let $cwd = cwd ? (path.extname(cwd) !== "" ? path.dirname(cwd) : cwd) : undefined; + + function isRelativePath(filePath: string) { + return !path.isAbsolute(filePath); + } + + if (!cwd) { + if (!this.hasRootsCapability) { + return { + ok: false, + error: + "The current MCP server does not support the roots capability, so please call the tool again with a projectRef or an absolute path as cwd parameter", + }; + } + + $cwd = await this.getCwd(); + } else if (isRelativePath(cwd)) { + if (!this.hasRootsCapability) { + return { + ok: false, + error: + "The current MCP server does not support the roots capability, so please call the tool again with a projectRef or an absolute path as cwd parameter", + }; + } + + const resolvedCwd = await this.getCwd(); + + if (!resolvedCwd) { + return { + ok: false, + error: "No current working directory found. Please provide a projectRef or a cwd.", + }; + } + + $cwd = path.resolve(resolvedCwd, cwd); + } + + if (!$cwd) { + return { + ok: false, + error: "No current working directory found. Please provide a projectRef or a cwd.", + }; + } + + return { + ok: true, + cwd: $cwd, + }; + } + + public async getDashboardUrl(path: string) { + const auth = await this.getAuth(); + return `${auth.dashboardUrl}${path}`; + } + + public get hasRootsCapability() { + return hasRootsCapability(this.server); + } + + public get hasSamplingCapability() { + return hasSamplingCapability(this.server); + } + + public get hasElicitationCapability() { + return hasElicitationCapability(this.server); + } +} diff --git a/packages/cli-v3/src/mcp/formatters.ts b/packages/cli-v3/src/mcp/formatters.ts new file mode 100644 index 0000000000..12b3cf05dd --- /dev/null +++ b/packages/cli-v3/src/mcp/formatters.ts @@ -0,0 +1,400 @@ +import { + ListRunResponseItem, + RetrieveRunResponse, + RetrieveRunTraceResponseBody, +} from "@trigger.dev/core/v3/schemas"; +import type { CursorPageResponse } from "@trigger.dev/core/v3/zodfetch"; + +const DEFAULT_MAX_TRACE_LINES = 500; + +export function formatRun(run: RetrieveRunResponse): string { + const lines: string[] = []; + + // Header with basic info + lines.push(`Run ${run.id}`); + lines.push(`Task: ${run.taskIdentifier}`); + lines.push(`Status: ${formatStatus(run.status)}`); + + // Timing information + const timing = formatTiming(run); + if (timing) { + lines.push(`Timing: ${timing}`); + } + + // Duration and cost + if (run.durationMs > 0) { + lines.push(`Duration: ${formatDuration(run.durationMs)}`); + } + + if (run.costInCents > 0) { + lines.push(`Cost: $${(run.costInCents / 100).toFixed(4)}`); + } + + // Attempt count + if (run.attemptCount > 1) { + lines.push(`Attempts: ${run.attemptCount}`); + } + + // Version and trigger info + if (run.version) { + lines.push(`Version: ${run.version}`); + } + + // Tags + if (run.tags && run.tags.length > 0) { + lines.push(`Tags: ${run.tags.join(", ")}`); + } + + // Error information + if (run.error) { + lines.push(`Error: ${run.error.name || "Error"}: ${run.error.message}`); + if (run.error.stackTrace) { + lines.push(`Stack: ${run.error.stackTrace.split("\n")[0]}`); // First line only + } + } + + // Related runs + const relatedInfo = formatRelatedRuns(run.relatedRuns); + if (relatedInfo) { + lines.push(relatedInfo); + } + + // Schedule info + if (run.schedule) { + lines.push(`Schedule: ${run.schedule.generator.expression} (${run.schedule.id})`); + } + + // Batch info + if (run.batchId) { + lines.push(`Batch: ${run.batchId}`); + } + + // Test flag + if (run.isTest) { + lines.push(`Test run`); + } + + // TTL info + if (run.ttl) { + lines.push(`TTL: ${run.ttl}`); + } + + // Payload and Output data + if (run.payload) { + lines.push(`Payload: ${JSON.stringify(run.payload, null, 2)}`); + } else if (run.payloadPresignedUrl) { + lines.push(`Payload: (large payload available via presigned URL: ${run.payloadPresignedUrl})`); + } + + if (run.output) { + lines.push(`Output: ${JSON.stringify(run.output, null, 2)}`); + } else if (run.outputPresignedUrl) { + lines.push(`Output: (large output available via presigned URL: ${run.outputPresignedUrl})`); + } + + // Metadata + if (run.metadata && Object.keys(run.metadata).length > 0) { + lines.push(`Metadata: ${Object.keys(run.metadata).length} fields`); + } + + return lines.join("\n"); +} + +function formatStatus(status: string): string { + return status.toLowerCase().replace(/_/g, " "); +} + +function formatTiming(run: RetrieveRunResponse): string | null { + const parts: string[] = []; + + parts.push(`created ${formatDateTime(run.createdAt)}`); + + if (run.startedAt) { + parts.push(`started ${formatDateTime(run.startedAt)}`); + } + + if (run.finishedAt) { + parts.push(`finished ${formatDateTime(run.finishedAt)}`); + } else if (run.delayedUntil) { + parts.push(`delayed until ${formatDateTime(run.delayedUntil)}`); + } + + return parts.length > 0 ? parts.join(", ") : null; +} + +function formatDateTime(date: Date | undefined): string { + if (!date) return "unknown"; + + try { + return date + .toISOString() + .replace("T", " ") + .replace(/\.\d{3}Z$/, " UTC"); + } catch { + return "unknown"; + } +} + +function formatDuration(durationMs: number): string { + if (durationMs < 1000) return `${durationMs}ms`; + if (durationMs < 60000) return `${(durationMs / 1000).toFixed(1)}s`; + if (durationMs < 3600000) return `${(durationMs / 60000).toFixed(1)}m`; + return `${(durationMs / 3600000).toFixed(1)}h`; +} + +function formatRelatedRuns(relatedRuns: RetrieveRunResponse["relatedRuns"]): string | null { + const parts: string[] = []; + + if (relatedRuns.parent) { + parts.push(`parent: ${relatedRuns.parent.id} (${relatedRuns.parent.status.toLowerCase()})`); + } + + if (relatedRuns.root && relatedRuns.root.id !== relatedRuns.parent?.id) { + parts.push(`root: ${relatedRuns.root.id} (${relatedRuns.root.status.toLowerCase()})`); + } + + if (relatedRuns.children && relatedRuns.children.length > 0) { + const childStatuses = relatedRuns.children.reduce( + (acc, child) => { + acc[child.status.toLowerCase()] = (acc[child.status.toLowerCase()] || 0) + 1; + return acc; + }, + {} as Record + ); + + const statusSummary = Object.entries(childStatuses) + .map(([status, count]) => `${count} ${status}`) + .join(", "); + + parts.push(`children: ${relatedRuns.children.length} runs (${statusSummary})`); + } + + return parts.length > 0 ? `Related: ${parts.join("; ")}` : null; +} + +export function formatRunTrace( + trace: RetrieveRunTraceResponseBody["trace"], + maxTraceLines: number = DEFAULT_MAX_TRACE_LINES +): string { + const lines: string[] = []; + + lines.push(`Trace ID: ${trace.traceId}`); + lines.push(""); + + // Format the root span and its children recursively + const reachedMaxLines = formatSpan(trace.rootSpan, lines, 0, maxTraceLines); + + if (reachedMaxLines) { + lines.push(`(truncated logs to ${maxTraceLines} lines)`); + } + + return lines.join("\n"); +} + +function formatSpan( + span: RetrieveRunTraceResponseBody["trace"]["rootSpan"], + lines: string[], + depth: number, + maxLines: number +): boolean { + if (lines.length >= maxLines) { + return true; + } + + const indent = " ".repeat(depth); + const prefix = depth === 0 ? "└─" : "β”œβ”€"; + + // Format span header + const statusIndicator = getStatusIndicator(span.data); + const duration = formatDuration(span.data.duration); + const startTime = formatDateTime(span.data.startTime); + + lines.push(`${indent}${prefix} ${span.message} ${statusIndicator}`); + lines.push(`${indent} Duration: ${duration}`); + lines.push(`${indent} Started: ${startTime}`); + + if (span.data.taskSlug) { + lines.push(`${indent} Task: ${span.data.taskSlug}`); + } + + if (span.data.taskPath) { + lines.push(`${indent} Path: ${span.data.taskPath}`); + } + + if (span.data.queueName) { + lines.push(`${indent} Queue: ${span.data.queueName}`); + } + + if (span.data.machinePreset) { + lines.push(`${indent} Machine: ${span.data.machinePreset}`); + } + + if (span.data.workerVersion) { + lines.push(`${indent} Worker: ${span.data.workerVersion}`); + } + + // Show properties if they exist + if (span.data.properties && Object.keys(span.data.properties).length > 0) { + lines.push( + `${indent} Properties: ${JSON.stringify(span.data.properties, null, 2).replace( + /\n/g, + "\n" + indent + " " + )}` + ); + } + + // Show output if it exists + if (span.data.output) { + lines.push( + `${indent} Output: ${JSON.stringify(span.data.output, null, 2).replace( + /\n/g, + "\n" + indent + " " + )}` + ); + } + + // Show events if they exist and are meaningful + if (span.data.events && span.data.events.length > 0) { + lines.push(`${indent} Events: ${span.data.events.length} events`); + // Optionally show first few events for context + const maxEvents = 3; + for (let i = 0; i < Math.min(span.data.events.length, maxEvents); i++) { + const event = span.data.events[i]; + if (typeof event === "object" && event !== null) { + const eventStr = JSON.stringify(event, null, 2).replace(/\n/g, "\n" + indent + " "); + lines.push(`${indent} [${i + 1}] ${eventStr}`); + } + } + if (span.data.events.length > maxEvents) { + lines.push(`${indent} ... and ${span.data.events.length - maxEvents} more events`); + } + } + + // Add spacing between spans + if (span.children && span.children.length > 0) { + lines.push(""); + } + + // Recursively format children + if (span.children) { + const reachedMaxLines = span.children.some((child, index) => { + const reachedMaxLines = formatSpan(child, lines, depth + 1, maxLines); + // Add spacing between sibling spans (except for the last one) + if (index < span.children.length - 1 && !reachedMaxLines) { + lines.push(""); + } + + return reachedMaxLines; + }); + + return reachedMaxLines; + } + + return false; +} + +function getStatusIndicator( + spanData: RetrieveRunTraceResponseBody["trace"]["rootSpan"]["data"] +): string { + if (spanData.isCancelled) return "[CANCELLED]"; + if (spanData.isError) return "[ERROR]"; + if (spanData.isPartial) return "[PARTIAL]"; + return "[COMPLETED]"; +} + +export function formatRunList(runsPage: CursorPageResponse): string { + const lines: string[] = []; + + // Header with count info + const totalRuns = runsPage.data.length; + lines.push(`Found ${totalRuns} run${totalRuns === 1 ? "" : "s"}`); + lines.push(""); + + if (totalRuns === 0) { + lines.push("No runs found."); + return lines.join("\n"); + } + + // Format each run in a compact table-like format + runsPage.data.forEach((run, index) => { + lines.push(`${index + 1}. ${formatRunSummary(run)}`); + }); + + // Pagination info + lines.push(""); + const paginationInfo = []; + if (runsPage.pagination.previous) { + paginationInfo.push("← Previous page available"); + } + if (runsPage.pagination.next) { + paginationInfo.push("Next page available β†’"); + } + + if (paginationInfo.length > 0) { + lines.push(`Pagination: ${paginationInfo.join(" | ")}`); + if (runsPage.pagination.next) { + lines.push(`Next cursor: ${runsPage.pagination.next}`); + } + if (runsPage.pagination.previous) { + lines.push(`Previous cursor: ${runsPage.pagination.previous}`); + } + } + + return lines.join("\n"); +} + +function formatRunSummary(run: ListRunResponseItem): string { + const parts: string[] = []; + + // Basic info: ID, task, status + parts.push(`${run.id}`); + parts.push(`${run.taskIdentifier}`); + parts.push(`${formatStatus(run.status)}`); + + // Environment + parts.push(`env:${run.env.name}`); + + // Timing - show the most relevant time + let timeInfo = ""; + if (run.finishedAt) { + timeInfo = `finished ${formatDateTime(run.finishedAt)}`; + } else if (run.startedAt) { + timeInfo = `started ${formatDateTime(run.startedAt)}`; + } else if (run.delayedUntil) { + timeInfo = `delayed until ${formatDateTime(run.delayedUntil)}`; + } else { + timeInfo = `created ${formatDateTime(run.createdAt)}`; + } + parts.push(timeInfo); + + // Duration if available + if (run.durationMs > 0) { + parts.push(`took ${formatDuration(run.durationMs)}`); + } + + // Cost if significant + if (run.costInCents > 0) { + parts.push(`$${(run.costInCents / 100).toFixed(4)}`); + } + + // Tags if present + if (run.tags && run.tags.length > 0) { + const tagStr = + run.tags.length > 2 + ? `${run.tags.slice(0, 2).join(", ")}+${run.tags.length - 2}` + : run.tags.join(", "); + parts.push(`tags:[${tagStr}]`); + } + + // Test flag + if (run.isTest) { + parts.push("[TEST]"); + } + + // Version if available + if (run.version) { + parts.push(`v${run.version}`); + } + + return parts.join(" | "); +} diff --git a/packages/cli-v3/src/mcp/logger.ts b/packages/cli-v3/src/mcp/logger.ts new file mode 100644 index 0000000000..b30576a331 --- /dev/null +++ b/packages/cli-v3/src/mcp/logger.ts @@ -0,0 +1,47 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { appendFileSync } from "node:fs"; +import util from "node:util"; + +export class FileLogger { + private filePath: string; + private server: McpServer; + + constructor(filePath: string, server: McpServer) { + this.filePath = filePath; + this.server = server; + } + + log(message: string, ...args: unknown[]) { + const logMessage = `[${new Date().toISOString()}][${this.formatServerInfo()}] ${message} - ${util.inspect( + args, + { + depth: null, + colors: false, + } + )}\n`; + appendFileSync(this.filePath, logMessage); + } + + private formatServerInfo() { + return `${this.formatClientName()} ${this.formatClientVersion()} ${this.formatClientCapabilities()}`; + } + + private formatClientName() { + const clientName = this.server.server.getClientVersion()?.name; + return `client=${clientName ?? "unknown"}`; + } + + private formatClientVersion() { + const clientVersion = this.server.server.getClientVersion(); + + return `version=${clientVersion?.version ?? "unknown"}`; + } + + private formatClientCapabilities() { + const clientCapabilities = this.server.server.getClientCapabilities(); + + const keys = Object.keys(clientCapabilities ?? {}); + + return `capabilities=${keys.join(",")}`; + } +} diff --git a/packages/cli-v3/src/mcp/mintlifyClient.ts b/packages/cli-v3/src/mcp/mintlifyClient.ts new file mode 100644 index 0000000000..16fe41b411 --- /dev/null +++ b/packages/cli-v3/src/mcp/mintlifyClient.ts @@ -0,0 +1,74 @@ +export async function performSearch(query: string, signal: AbortSignal) { + const body = callToolBody("Search", { query }); + + const response = await fetch("https://trigger.dev/docs/mcp", { + method: "POST", + headers: { + "Content-Type": "application/json", + Accept: "application/json, text/event-stream", + "MCP-Protocol-Version": "2025-06-18", + }, + signal, + body: JSON.stringify(body), + }); + + const data = await parseResponse(response); + return data; +} + +async function parseResponse(response: Response) { + if (response.headers.get("content-type")?.includes("text/event-stream")) { + return parseSSEResponse(response); + } else { + return parseJSONResponse(response); + } +} + +async function parseJSONResponse(response: Response) { + const data = await response.json(); + return data; +} + +// Get the first data: event and return the parsed JSON of the event +async function parseSSEResponse(response: Response) { + const reader = response.body?.getReader(); + const decoder = new TextDecoder(); + + if (!reader) { + throw new Error("No reader found"); + } + + let buffer = ""; + + while (true) { + const { value, done } = await reader.read(); + if (done) throw new Error("SSE stream closed before data arrived"); + + buffer += decoder.decode(value, { stream: true }); + const events = buffer.split("\n\n"); // SSE delimiter + buffer = events.pop()!; // keep incomplete + + for (const evt of events) { + for (const line of evt.split("\n")) { + if (line.startsWith("data:")) { + const json = line.slice(5).trim(); + return JSON.parse(json); // βœ… got it + } + } + } + } + + throw new Error("No data: event found"); +} + +function callToolBody(tool: string, args: Record) { + return { + jsonrpc: "2.0", + id: 1, + method: "tools/call", + params: { + name: tool, + arguments: args, + }, + }; +} diff --git a/packages/cli-v3/src/mcp/schemas.ts b/packages/cli-v3/src/mcp/schemas.ts new file mode 100644 index 0000000000..b98faca0da --- /dev/null +++ b/packages/cli-v3/src/mcp/schemas.ts @@ -0,0 +1,203 @@ +import { + ApiDeploymentListParams, + MachinePresetName, + RunStatus, +} from "@trigger.dev/core/v3/schemas"; +import { z } from "zod"; + +export const ProjectRefSchema = z + .string() + .describe( + "The trigger.dev project ref, starts with proj_. We will attempt to automatically detect the project ref if running inside a directory that includes a trigger.config.ts file, or if you pass the --project-ref option to the MCP server." + ) + .optional(); + +export const CreateProjectInOrgInput = z.object({ + orgParam: z + .string() + .describe( + "The organization to create the project in, can either be the organization slug or the ID. Use the list_orgs tool to get a list of organizations and ask the user to select one." + ), + name: z.string().describe("The name of the project to create."), +}); + +export type CreateProjectInOrgInput = z.output; + +export const InitializeProjectInput = z.object({ + orgParam: z + .string() + .describe( + "The organization to create the project in, can either be the organization slug or the ID. Use the list_orgs tool to get a list of organizations and ask the user to select one." + ), + projectRef: ProjectRefSchema, + projectName: z + .string() + .describe( + "The name of the project to create. If projectRef is not provided, we will use this name to create a new project in the organization you select." + ), + cwd: z.string().describe("The current working directory of the project").optional(), +}); + +export type InitializeProjectInput = z.output; + +export const CommonProjectsInput = z.object({ + projectRef: ProjectRefSchema, + configPath: z + .string() + .describe( + "The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the root dir (like in a monorepo setup). If not provided, we will try to find the config file in the current working directory" + ) + .optional(), + environment: z + .enum(["dev", "staging", "prod", "preview"]) + .describe("The environment to get tasks for") + .default("dev"), + branch: z + .string() + .describe("The branch to get tasks for, only used for preview environments") + .optional(), +}); + +export type CommonProjectsInput = z.output; + +export const TriggerTaskInput = CommonProjectsInput.extend({ + taskId: z + .string() + .describe( + "The ID/slug of the task to trigger. Use the get_tasks tool to get a list of tasks and ask the user to select one if it's not clear which one to use." + ), + payload: z + .any() + .describe( + "The payload to trigger the task with. Should match the task's payload schema. Not a JSON string, but the actual payload object" + ), + options: z + .object({ + queue: z + .object({ + name: z + .string() + .describe( + "The name of the queue to trigger the task in, by default will use the queue configured in the task" + ), + }) + .optional(), + delay: z + .string() + .or(z.coerce.date()) + .describe("The delay before the task run is executed") + .optional(), + idempotencyKey: z.string().describe("The idempotency key to use for the task run").optional(), + machine: MachinePresetName.describe("The machine preset to use for the task run").optional(), + maxAttempts: z + .number() + .int() + .describe("The maximum number of attempts to retry the task run") + .optional(), + maxDuration: z + .number() + .describe("The maximum duration in seconds of the task run") + .optional(), + tags: z + .array(z.string()) + .describe( + "Tags to add to the task run. Must be less than 128 characters and cannot have more than 5" + ) + .optional(), + ttl: z + .string() + .or(z.number().nonnegative().int()) + .describe( + "The time to live of the task run. If the run doesn't start executing within this time, it will be automatically cancelled." + ) + .default("10m"), + }) + .optional(), +}); + +export type TriggerTaskInput = z.output; + +export const CommonRunsInput = CommonProjectsInput.extend({ + runId: z.string().describe("The ID of the run to get the details of, starts with run_"), +}); + +export type CommonRunsInput = z.output; + +export const GetRunDetailsInput = CommonRunsInput.extend({ + maxTraceLines: z + .number() + .int() + .describe("The maximum number of lines to show in the trace. Defaults to 500") + .optional(), +}); + +export type GetRunDetailsInput = z.output; + +export const ListRunsInput = CommonProjectsInput.extend({ + cursor: z.string().describe("The cursor to use for pagination, starts with run_").optional(), + limit: z + .number() + .int() + .describe("The number of runs to list in a single page. Up to 100") + .optional(), + status: RunStatus.describe("Filter for runs with this run status").optional(), + taskIdentifier: z.string().describe("Filter for runs that match this task identifier").optional(), + version: z + .string() + .describe("Filter for runs that match this version, e.g. 20250808.3") + .optional(), + tag: z.string().describe("Filter for runs that include this tag").optional(), + from: z.string().describe("Filter for runs created after this ISO 8601 timestamp").optional(), + to: z.string().describe("Filter for runs created before this ISO 8601 timestamp").optional(), + period: z + .string() + .describe("Filter for runs created in the last N time period. e.g. 7d, 30d, 365d") + .optional(), + machine: MachinePresetName.describe("Filter for runs that match this machine preset").optional(), +}); + +export type ListRunsInput = z.output; + +export const CommonDeployInput = CommonProjectsInput.omit({ + environment: true, +}).extend({ + environment: z + .enum(["staging", "prod", "preview"]) + .describe("The environment to trigger the task in") + .default("prod"), +}); + +export type CommonDeployInput = z.output; + +export const DeployInput = CommonDeployInput.extend({ + skipPromotion: z + .boolean() + .describe("Skip promoting the deployment to the current deployment for the environment") + .optional(), + skipSyncEnvVars: z + .boolean() + .describe("Skip syncing environment variables when using the syncEnvVars extension") + .optional(), + skipUpdateCheck: z + .boolean() + .describe("Skip checking for @trigger.dev package updates") + .optional(), +}); + +export type DeployInput = z.output; + +export const ListDeploysInput = CommonDeployInput.extend(ApiDeploymentListParams); + +export type ListDeploysInput = z.output; + +export const ListPreviewBranchesInput = z.object({ + projectRef: ProjectRefSchema, + configPath: z + .string() + .describe( + "The path to the trigger.config.ts file. Only used when the trigger.config.ts file is not at the root dir (like in a monorepo setup). If not provided, we will try to find the config file in the current working directory" + ) + .optional(), +}); + +export type ListPreviewBranchesInput = z.output; diff --git a/packages/cli-v3/src/mcp/tools.ts b/packages/cli-v3/src/mcp/tools.ts new file mode 100644 index 0000000000..8bcb8280e0 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools.ts @@ -0,0 +1,49 @@ +import { McpContext } from "./context.js"; +import { deployTool, listDeploysTool } from "./tools/deploys.js"; +import { searchDocsTool } from "./tools/docs.js"; +import { + createProjectInOrgTool, + initializeProjectTool, + listOrgsTool, + listProjectsTool, +} from "./tools/orgs.js"; +import { listPreviewBranchesTool } from "./tools/previewBranches.js"; +import { cancelRunTool, getRunDetailsTool, listRunsTool } from "./tools/runs.js"; +import { getCurrentWorker, triggerTaskTool } from "./tools/tasks.js"; +import { respondWithError } from "./utils.js"; + +export function registerTools(context: McpContext) { + const tools = [ + searchDocsTool, + listOrgsTool, + listProjectsTool, + createProjectInOrgTool, + initializeProjectTool, + getCurrentWorker, + triggerTaskTool, + listRunsTool, + getRunDetailsTool, + cancelRunTool, + deployTool, + listDeploysTool, + listPreviewBranchesTool, + ]; + + for (const tool of tools) { + context.server.registerTool( + tool.name, + { + annotations: { title: tool.title }, + description: tool.description, + inputSchema: tool.inputSchema, + }, + async (input, extra) => { + try { + return tool.handler(input, { ...extra, ctx: context }); + } catch (error) { + return respondWithError(error); + } + } + ); + } +} diff --git a/packages/cli-v3/src/mcp/tools/deploys.ts b/packages/cli-v3/src/mcp/tools/deploys.ts new file mode 100644 index 0000000000..ab09659a54 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/deploys.ts @@ -0,0 +1,225 @@ +import { DeployInput, ListDeploysInput } from "../schemas.js"; +import { toolsMetadata } from "../config.js"; +import { ToolMeta } from "../types.js"; +import { respondWithError, toolHandler } from "../utils.js"; +import { McpContext } from "../context.js"; +import { x } from "tinyexec"; +import { getPackageJson, tryResolveTriggerPackageVersion } from "../../commands/update.js"; +import { VERSION } from "../../version.js"; +import { resolveSync as esmResolve } from "mlly"; +import { fileURLToPath } from "node:url"; +import stripAnsi from "strip-ansi"; + +export const deployTool = { + name: toolsMetadata.deploy.name, + title: toolsMetadata.deploy.title, + description: toolsMetadata.deploy.description, + inputSchema: DeployInput.shape, + handler: toolHandler(DeployInput.shape, async (input, { ctx, createProgressTracker, _meta }) => { + ctx.logger?.log("calling deploy", { input }); + + if (ctx.options.devOnly) { + return respondWithError( + `This MCP server is only available for the dev environment. The deploy command is not allowed with the --dev-only flag.` + ); + } + + const cwd = await ctx.getProjectDir({ cwd: input.configPath }); + + if (!cwd.ok) { + return respondWithError(cwd.error); + } + + const auth = await ctx.getAuth(); + + const args = ["deploy", "--env", input.environment, "--api-url", auth.auth.apiUrl]; + + if (input.environment === "preview" && input.branch) { + args.push("--branch", input.branch); + } + + if (ctx.options.profile) { + args.push("--profile", ctx.options.profile); + } + + if (input.skipPromotion) { + args.push("--skip-promotion"); + } + + if (input.skipSyncEnvVars) { + args.push("--skip-sync-env-vars"); + } + + if (input.skipUpdateCheck) { + args.push("--skip-update-check"); + } + + const [nodePath, cliPath] = await resolveCLIExec(ctx, cwd.cwd); + + ctx.logger?.log("deploy process args", { + nodePath, + cliPath, + args, + meta: _meta, + }); + + const progressTracker = createProgressTracker(100); + await progressTracker.updateProgress( + 5, + `Starting deploy to ${input.environment}${input.branch ? ` on branch ${input.branch}` : ""}` + ); + + const deployProcess = x(nodePath, [cliPath, ...args], { + nodeOptions: { + cwd: cwd.cwd, + env: { + TRIGGER_MCP_SERVER: "1", + }, + }, + }); + + const logs = []; + + for await (const line of deployProcess) { + const lineWithoutAnsi = stripAnsi(line); + + const buildingVersion = lineWithoutAnsi.match(/Building version (\d+\.\d+)/); + + if (buildingVersion) { + await progressTracker.incrementProgress(1, `Building version ${buildingVersion[1]}`); + } else { + await progressTracker.incrementProgress(1); + } + + logs.push(stripAnsi(line)); + } + + await progressTracker.complete("Deploy complete"); + + ctx.logger?.log("deploy deployProcess", { + logs, + }); + + if (deployProcess.exitCode !== 0) { + return respondWithError(logs.join("\n")); + } + + return { + content: [{ type: "text", text: logs.join("\n") }], + }; + }), +}; + +export const listDeploysTool = { + name: toolsMetadata.list_deploys.name, + title: toolsMetadata.list_deploys.title, + description: toolsMetadata.list_deploys.description, + inputSchema: ListDeploysInput.shape, + handler: toolHandler(ListDeploysInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling list_deploys", { input }); + + if (ctx.options.devOnly) { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const apiClient = await ctx.getApiClient({ + projectRef, + environment: input.environment, + scopes: ["read:deployments"], + branch: input.branch, + }); + + const result = await apiClient.listDeployments(input); + + return { + content: [{ type: "text", text: JSON.stringify(result, null, 2) }], + }; + }), +}; + +async function resolveCLIExec(context: McpContext, cwd?: string): Promise<[string, string]> { + // Lets first try to get the version of the CLI package + const installedCLI = await tryResolveTriggerCLIPath(context, cwd); + + if (installedCLI) { + context.logger?.log("resolve_cli_exec installedCLI", { installedCLI }); + + return [process.argv[0] ?? "node", installedCLI.path]; + } + + const sdkVersion = await tryResolveTriggerPackageVersion("@trigger.dev/sdk", cwd); + + if (!sdkVersion) { + context.logger?.log("resolve_cli_exec no sdk version found", { cwd }); + + return [process.argv[0] ?? "npx", process.argv[1] ?? "trigger.dev@latest"]; + } + + if (sdkVersion === VERSION) { + context.logger?.log("resolve_cli_exec sdk version is the same as the current version", { + sdkVersion, + }); + + if (typeof process.argv[0] === "string" && typeof process.argv[1] === "string") { + return [process.argv[0], process.argv[1]]; + } + + return ["npx", "trigger.dev@latest"]; + } + + return ["npx", `trigger.dev@${sdkVersion}`]; +} + +async function tryResolveTriggerCLIPath( + context: McpContext, + basedir?: string +): Promise< + | { + path: string; + version: string; + } + | undefined +> { + try { + const resolvedPathFileURI = esmResolve("trigger.dev", { + url: basedir, + }); + + const resolvedPath = fileURLToPath(resolvedPathFileURI); + + context.logger?.log("resolve_cli_exec resolvedPathFileURI", { resolvedPathFileURI }); + + const { packageJson } = await getPackageJson(resolvedPath, { + test: (filePath) => { + // We need to skip any type-marker files + if (filePath.includes("dist/commonjs")) { + return false; + } + + if (filePath.includes("dist/esm")) { + return false; + } + + return true; + }, + }); + + if (packageJson.version) { + context.logger?.log("resolve_cli_exec packageJson", { packageJson }); + + return { path: resolvedPath, version: packageJson.version }; + } + + return; + } catch (error) { + context.logger?.log("resolve_cli_exec error", { error }); + return undefined; + } +} diff --git a/packages/cli-v3/src/mcp/tools/docs.ts b/packages/cli-v3/src/mcp/tools/docs.ts new file mode 100644 index 0000000000..7b5a9d27e8 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/docs.ts @@ -0,0 +1,20 @@ +import { z } from "zod"; +import { toolsMetadata } from "../config.js"; +import { toolHandler } from "../utils.js"; +import { performSearch } from "../mintlifyClient.js"; + +export const searchDocsTool = { + name: toolsMetadata.search_docs.name, + title: toolsMetadata.search_docs.title, + description: toolsMetadata.search_docs.description, + inputSchema: { + query: z.string(), + }, + handler: toolHandler({ query: z.string() }, async (input, { ctx, signal }) => { + ctx.logger?.log("calling search_docs", { input }); + + const results = await performSearch(input.query, signal); + + return results.result; + }), +}; diff --git a/packages/cli-v3/src/mcp/tools/orgs.ts b/packages/cli-v3/src/mcp/tools/orgs.ts new file mode 100644 index 0000000000..9f8244b586 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/orgs.ts @@ -0,0 +1,243 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { GetProjectsResponseBody } from "@trigger.dev/core/v3"; +import { toolsMetadata } from "../config.js"; +import { CreateProjectInOrgInput, InitializeProjectInput } from "../schemas.js"; +import { ToolMeta } from "../types.js"; +import { respondWithError, toolHandler } from "../utils.js"; +import { loadConfig } from "../../config.js"; +import { tryCatch } from "@trigger.dev/core/utils"; + +export const listOrgsTool = { + name: toolsMetadata.list_orgs.name, + title: toolsMetadata.list_orgs.title, + description: toolsMetadata.list_orgs.description, + inputSchema: {}, + handler: async (input: unknown, { ctx }: ToolMeta): Promise => { + ctx.logger?.log("calling list_orgs", { input }); + + const cliApiClient = await ctx.getCliApiClient(); + + const orgs = await cliApiClient.getOrgs(); + + if (!orgs.success) { + return respondWithError(orgs.error); + } + + ctx.logger?.log("list_orgs", { orgs: orgs.data }); + + const contents = orgs.data.map((org) => { + return `- ${org.title} (id=${org.id}) (slug=${org.slug}) (createdAt=${org.createdAt})`; + }); + + return { + content: [{ type: "text", text: contents.join("\n") }], + }; + }, +}; + +export const listProjectsTool = { + name: toolsMetadata.list_projects.name, + title: toolsMetadata.list_projects.title, + description: toolsMetadata.list_projects.description, + inputSchema: {}, + handler: async (input: unknown, { ctx }: ToolMeta): Promise => { + ctx.logger?.log("calling list_projects", { input }); + + const cliApiClient = await ctx.getCliApiClient(); + + const projects = await cliApiClient.getProjects(); + + if (!projects.success) { + return respondWithError(projects.error); + } + + ctx.logger?.log("list_projects", { projects: projects.data }); + + const groupedByOrg = projects.data.reduce( + (acc, project) => { + if (!project.organization) { + return acc; + } + + acc[project.organization.id] = acc[project.organization.id] || { + organization: project.organization, + projects: [], + }; + acc[project.organization.id]!.projects.push(project); + + return acc; + }, + {} as Record< + string, + { + organization: GetProjectsResponseBody[number]["organization"]; + projects: GetProjectsResponseBody[number][]; + } + > + ); + + const contents = Object.values(groupedByOrg) + .map((org) => { + const parts = [ + `## Organization ${org.organization.title} (id=${org.organization.id}) (slug=${org.organization.slug}) projects:`, + ]; + + for (const project of org.projects) { + parts.push( + `- ${project.name} (projectRef=${project.externalRef}) (slug=${project.slug}) (createdAt=${project.createdAt})` + ); + } + + return parts.join("\n"); + }) + .join("\n"); + + return { + content: [ + { + type: "text", + text: contents, + }, + ], + }; + }, +}; + +export const createProjectInOrgTool = { + name: toolsMetadata.create_project_in_org.name, + title: toolsMetadata.create_project_in_org.title, + description: toolsMetadata.create_project_in_org.description, + inputSchema: CreateProjectInOrgInput.shape, + handler: toolHandler(CreateProjectInOrgInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling create_project_in_org", { input }); + + const cliApiClient = await ctx.getCliApiClient(); + + const project = await cliApiClient.createProject(input.orgParam, { + name: input.name, + }); + + if (!project.success) { + return respondWithError(project.error); + } + + ctx.logger?.log("create_project_in_org", { project: project.data }); + + const contents = [ + `Project created successfully: ${project.data.name} (projectRef=${project.data.externalRef}) (slug=${project.data.slug}) (createdAt=${project.data.createdAt})`, + ]; + + return { + content: [{ type: "text", text: contents.join("\n") }], + }; + }), +}; + +export const initializeProjectTool = { + name: toolsMetadata.initialize_project.name, + title: toolsMetadata.initialize_project.title, + description: toolsMetadata.initialize_project.description, + inputSchema: InitializeProjectInput.shape, + handler: toolHandler(InitializeProjectInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling initialize_project", { input }); + + let projectRef: string | undefined = input.projectRef; + + if (!projectRef) { + const cwd = input.cwd ?? (await ctx.getCwd()); + + if (!cwd) { + return respondWithError( + "No current working directory found. Please provide a projectRef or a cwd." + ); + } + + // Try to load the config file + const [_, config] = await tryCatch(loadConfig({ cwd })); + + if (config?.configFile) { + if (typeof config.project === "string" && config.project.startsWith("proj_")) { + ctx.logger?.log("initialize_project existing project", { + config, + projectRef: config.project, + }); + + return { + content: [ + { + type: "text", + text: `We found an existing trigger.config.ts file in the current working directory. Skipping initialization.`, + }, + ], + }; + } else { + return respondWithError( + "Could not find the project ref in the config file. Please provide a projectRef." + ); + } + } + + const cliApiClient = await ctx.getCliApiClient(); + + const project = await cliApiClient.createProject(input.orgParam, { + name: input.projectName, + }); + + if (!project.success) { + return respondWithError( + `Failed to create project ${input.projectName} in organization ${input.orgParam}: ${project.error}` + ); + } + + ctx.logger?.log("initialize_project new project", { + project: project.data, + }); + + projectRef = project.data.externalRef; + } + + const cliApiClient = await ctx.getCliApiClient(); + + const projectEnv = await cliApiClient.getProjectEnv({ + projectRef: projectRef, + env: "dev", + }); + + const manualSetupGuide = await getManualSetupGuide( + projectRef, + projectEnv.success ? projectEnv.data.apiKey : undefined, + projectEnv.success ? projectEnv.data.apiUrl : undefined + ); + + return { + content: [ + { + type: "text", + text: manualSetupGuide, + }, + ], + }; + }), +}; + +async function getManualSetupGuide(projectRef: string, apiKey?: string, apiUrl?: string) { + const response = await fetch("https://trigger.dev/docs/manual-setup.md"); + let text = await response.text(); + + text = text.replace("", projectRef); + + text = text.replace("tr_dev_xxxxxxxxxx", apiKey ?? "tr_dev_xxxxxxxxxx"); + text = text.replace( + "https://your-trigger-instance.com", + apiUrl ?? "https://your-trigger-instance.com" + ); + + return ` +Use the following manual setup guide to initialize Trigger.dev in your project. Make sure to use the correct project ref: ${projectRef}, and the following environment variables: + +TRIGGER_PROJECT_REF=${projectRef} +TRIGGER_SECRET_KEY=${apiKey ?? "tr_dev_xxxxxxxxxx"} +${apiUrl ? `TRIGGER_API_URL=${apiUrl}` : ""} + +${text}`; +} diff --git a/packages/cli-v3/src/mcp/tools/previewBranches.ts b/packages/cli-v3/src/mcp/tools/previewBranches.ts new file mode 100644 index 0000000000..abadda29ab --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/previewBranches.ts @@ -0,0 +1,35 @@ +import { ListPreviewBranchesInput } from "../schemas.js"; +import { toolsMetadata } from "../config.js"; +import { ToolMeta } from "../types.js"; +import { respondWithError, toolHandler } from "../utils.js"; + +export const listPreviewBranchesTool = { + name: toolsMetadata.list_preview_branches.name, + title: toolsMetadata.list_preview_branches.title, + description: toolsMetadata.list_preview_branches.description, + inputSchema: ListPreviewBranchesInput.shape, + handler: toolHandler(ListPreviewBranchesInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling list_preview_branches", { input }); + + if (ctx.options.devOnly) { + return respondWithError(`This MCP server is only available for the dev environment. `); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const cliApiClient = await ctx.getCliApiClient(); + + const branches = await cliApiClient.listBranches(projectRef); + + if (!branches.success) { + return respondWithError(branches.error); + } + + return { + content: [{ type: "text", text: JSON.stringify(branches.data, null, 2) }], + }; + }), +}; diff --git a/packages/cli-v3/src/mcp/tools/runs.ts b/packages/cli-v3/src/mcp/tools/runs.ts new file mode 100644 index 0000000000..8a13603a23 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/runs.ts @@ -0,0 +1,151 @@ +import { toolsMetadata } from "../config.js"; +import { formatRun, formatRunList, formatRunTrace } from "../formatters.js"; +import { CommonRunsInput, GetRunDetailsInput, ListRunsInput } from "../schemas.js"; +import { respondWithError, toolHandler } from "../utils.js"; + +export const getRunDetailsTool = { + name: toolsMetadata.get_run_details.name, + title: toolsMetadata.get_run_details.title, + description: toolsMetadata.get_run_details.description, + inputSchema: GetRunDetailsInput.shape, + handler: toolHandler(GetRunDetailsInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling get_run_details", { input }); + + if (ctx.options.devOnly && input.environment !== "dev") { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const apiClient = await ctx.getApiClient({ + projectRef, + environment: input.environment, + scopes: [`read:runs:${input.runId}`], + branch: input.branch, + }); + + const [runResult, traceResult] = await Promise.all([ + apiClient.retrieveRun(input.runId), + apiClient.retrieveRunTrace(input.runId), + ]); + + const formattedRun = formatRun(runResult); + const formattedTrace = formatRunTrace(traceResult.trace, input.maxTraceLines); + + const runUrl = await ctx.getDashboardUrl(`/projects/v3/${projectRef}/runs/${runResult.id}`); + + const content = [ + "## Run Details", + formattedRun, + "", + "## Run Trace", + formattedTrace, + "", + `[View in dashboard](${runUrl})`, + ]; + + return { + content: [ + { + type: "text", + text: content.join("\n"), + }, + ], + }; + }), +}; + +export const cancelRunTool = { + name: toolsMetadata.cancel_run.name, + title: toolsMetadata.cancel_run.title, + description: toolsMetadata.cancel_run.description, + inputSchema: CommonRunsInput.shape, + handler: toolHandler(CommonRunsInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling cancel_run", { input }); + + if (ctx.options.devOnly && input.environment !== "dev") { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const apiClient = await ctx.getApiClient({ + projectRef, + environment: input.environment, + scopes: [`write:runs:${input.runId}`, `read:runs:${input.runId}`], + branch: input.branch, + }); + + await apiClient.cancelRun(input.runId); + + const retrieveResult = await apiClient.retrieveRun(input.runId); + + const runUrl = await ctx.getDashboardUrl( + `/projects/v3/${projectRef}/runs/${retrieveResult.id}` + ); + + return { + content: [{ type: "text", text: JSON.stringify({ ...retrieveResult, runUrl }, null, 2) }], + }; + }), +}; + +export const listRunsTool = { + name: toolsMetadata.list_runs.name, + title: toolsMetadata.list_runs.title, + description: toolsMetadata.list_runs.description, + inputSchema: ListRunsInput.shape, + handler: toolHandler(ListRunsInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling list_runs", { input }); + + if (ctx.options.devOnly && input.environment !== "dev") { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const apiClient = await ctx.getApiClient({ + projectRef, + environment: input.environment, + scopes: ["read:runs"], + branch: input.branch, + }); + + const $from = typeof input.from === "string" ? new Date(input.from) : undefined; + const $to = typeof input.to === "string" ? new Date(input.to) : undefined; + + const result = await apiClient.listRuns({ + after: input.cursor, + limit: input.limit, + status: input.status, + taskIdentifier: input.taskIdentifier, + version: input.version, + tag: input.tag, + from: $from, + to: $to, + period: input.period, + machine: input.machine, + }); + + const formattedRuns = formatRunList(result); + + return { + content: [{ type: "text", text: formattedRuns }], + }; + }), +}; diff --git a/packages/cli-v3/src/mcp/tools/tasks.ts b/packages/cli-v3/src/mcp/tools/tasks.ts new file mode 100644 index 0000000000..15e8d40295 --- /dev/null +++ b/packages/cli-v3/src/mcp/tools/tasks.ts @@ -0,0 +1,158 @@ +import { toolsMetadata } from "../config.js"; +import { CommonProjectsInput, TriggerTaskInput } from "../schemas.js"; +import { ToolMeta } from "../types.js"; +import { respondWithError, toolHandler } from "../utils.js"; + +export const getCurrentWorker = { + name: toolsMetadata.get_current_worker.name, + title: toolsMetadata.get_current_worker.title, + description: toolsMetadata.get_current_worker.description, + inputSchema: CommonProjectsInput.shape, + handler: toolHandler(CommonProjectsInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling get_current_worker", { input }); + + if (ctx.options.devOnly && input.environment !== "dev") { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const cliApiClient = await ctx.getCliApiClient(input.branch); + + const workerResult = await cliApiClient.getWorkerByTag( + projectRef, + input.environment, + "current" + ); + + if (!workerResult.success) { + return respondWithError(workerResult.error); + } + + const { worker, urls } = workerResult.data; + + const contents = [ + `Current worker for ${input.environment} is ${worker.version} using ${worker.sdkVersion} of the SDK.`, + ]; + + if (worker.tasks.length > 0) { + contents.push(`The worker has ${worker.tasks.length} tasks registered:`); + + for (const task of worker.tasks) { + if (task.payloadSchema) { + contents.push( + `- ${task.slug} in ${task.filePath} (payload schema: ${JSON.stringify( + task.payloadSchema + )})` + ); + } else { + contents.push(`- ${task.slug} in ${task.filePath}`); + } + } + } else { + contents.push(`The worker has no tasks registered.`); + } + + contents.push(`\n`); + contents.push(`URLs:`); + contents.push(`- Runs: ${urls.runs}`); + contents.push(`\n`); + contents.push( + `You can use the list_runs tool with the version ${worker.version} to get the list of runs for this worker.` + ); + + if ( + typeof worker.sdkVersion === "string" && + typeof worker.cliVersion === "string" && + worker.sdkVersion !== worker.cliVersion + ) { + contents.push( + `WARNING: The SDK version (${worker.sdkVersion}) is different from the CLI version (${worker.cliVersion}). This might cause issues with the task execution. Make sure to pin the CLI and the SDK versions to ${worker.sdkVersion}.` + ); + } + + return { + content: [{ type: "text", text: contents.join("\n") }], + }; + }), +}; + +export const triggerTaskTool = { + name: toolsMetadata.trigger_task.name, + title: toolsMetadata.trigger_task.title, + description: toolsMetadata.trigger_task.description, + inputSchema: TriggerTaskInput.shape, + handler: toolHandler(TriggerTaskInput.shape, async (input, { ctx }) => { + ctx.logger?.log("calling trigger_task", { input }); + + if (ctx.options.devOnly && input.environment !== "dev") { + return respondWithError( + `This MCP server is only available for the dev environment. You tried to access the ${input.environment} environment. Remove the --dev-only flag to access other environments.` + ); + } + + const projectRef = await ctx.getProjectRef({ + projectRef: input.projectRef, + cwd: input.configPath, + }); + + const apiClient = await ctx.getApiClient({ + projectRef, + environment: input.environment, + scopes: ["write:tasks"], + branch: input.branch, + }); + + ctx.logger?.log("triggering task", { input }); + + let payload = input.payload; + + if (typeof payload === "string") { + try { + payload = JSON.parse(payload); + } catch { + ctx.logger?.log("payload is not a valid JSON string, using as is", { payload }); + } + } + + const result = await apiClient.triggerTask(input.taskId, { + payload, + options: input.options, + }); + + const taskRunUrl = await ctx.getDashboardUrl(`/projects/v3/${projectRef}/runs/${result.id}`); + + const contents = [ + `Task ${input.taskId} triggered and run with ID created: ${result.id}.`, + `View the run in the dashboard: ${taskRunUrl}`, + `You can also use the get_run_details tool to get the details of the run.`, + ]; + + if (input.environment === "dev") { + const cliApiClient = await ctx.getCliApiClient(input.branch); + const devStatus = await cliApiClient.getDevStatus(projectRef); + const isConnected = devStatus.success ? devStatus.data.isConnected : false; + const connectionMessage = isConnected + ? undefined + : "The dev CLI is not connected to this project, because it is not currently running. Make sure to run the dev command to execute triggered tasks."; + + if (connectionMessage) { + contents.push(connectionMessage); + } + } + + return { + content: [ + { + type: "text", + text: contents.join("\n"), + }, + ], + }; + }), +}; diff --git a/packages/cli-v3/src/mcp/types.ts b/packages/cli-v3/src/mcp/types.ts new file mode 100644 index 0000000000..697a2d0dfc --- /dev/null +++ b/packages/cli-v3/src/mcp/types.ts @@ -0,0 +1,7 @@ +import { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import { ServerNotification, ServerRequest } from "@modelcontextprotocol/sdk/types.js"; +import { McpContext } from "./context.js"; + +export type ToolMeta = RequestHandlerExtra & { + ctx: McpContext; +}; diff --git a/packages/cli-v3/src/mcp/utils.ts b/packages/cli-v3/src/mcp/utils.ts new file mode 100644 index 0000000000..b783365e67 --- /dev/null +++ b/packages/cli-v3/src/mcp/utils.ts @@ -0,0 +1,134 @@ +import type { CallToolResult, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import { ToolMeta } from "./types.js"; + +export function respondWithError(error: unknown): CallToolResult { + return { + isError: true, + content: [ + { + type: "text", + text: JSON.stringify({ error: enumerateError(error) }), + }, + ], + }; +} + +function enumerateError(error: unknown) { + if (!error) { + return error; + } + + if (typeof error !== "object") { + return error; + } + + const newError: Record = {}; + + const errorProps = ["name", "message"] as const; + + for (const prop of errorProps) { + if (prop in error) { + newError[prop] = (error as Record)[prop]; + } + } + + return newError; +} + +export type ToolHandlerMeta = ToolMeta & { + createProgressTracker: (total: number) => ProgressTracker; +}; + +export function toolHandler( + shape: TInputShape, + handler: ( + input: z.output>, + meta: ToolHandlerMeta + ) => Promise +) { + return async (input: unknown, extra: ToolMeta) => { + const parsedInput = z.object(shape).safeParse(input); + + if (!parsedInput.success) { + return respondWithError(parsedInput.error); + } + + function createProgressTracker(total: number) { + return new ProgressTracker(total, extra.sendNotification, extra._meta?.progressToken); + } + + return handler(parsedInput.data, { ...extra, createProgressTracker }); + }; +} + +class ProgressTracker { + private progress: number = 0; + private progressToken: string | number | undefined; + private total: number; + private message: string; + private sendNotification: (notification: ServerNotification) => Promise; + + constructor( + total: number, + sendNotification: (notification: ServerNotification) => Promise, + progressToken?: string | number + ) { + this.message = ""; + this.progressToken = progressToken; + this.progress = 0; + this.total = total; + this.sendNotification = sendNotification; + } + + async updateProgress(progress: number, message?: string) { + this.progress = progress; + + if (message) { + this.message = message; + } + + await this.#sendNotification(progress, this.message); + } + + async incrementProgress(increment: number, message?: string) { + this.progress += increment; + + // make sure the progress is never greater than the total + this.progress = Math.min(this.progress, this.total); + + if (message) { + this.message = message; + } + + await this.#sendNotification(this.progress, this.message); + } + + async complete(message?: string) { + this.progress = this.total; + if (message) { + this.message = message; + } + await this.#sendNotification(this.progress, this.message); + } + + getProgress() { + return this.progress; + } + + async #sendNotification(progress: number, message: string) { + if (!this.progressToken) { + return; + } + + await this.sendNotification({ + method: "notifications/progress", + params: { + progress, + total: this.total, + message: this.message, + progressToken: this.progressToken, + }, + }); + } +} diff --git a/packages/cli-v3/src/rules/install.ts b/packages/cli-v3/src/rules/install.ts new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/packages/cli-v3/src/rules/install.ts @@ -0,0 +1 @@ + diff --git a/packages/cli-v3/src/rules/manifest.ts b/packages/cli-v3/src/rules/manifest.ts new file mode 100644 index 0000000000..f3bf73ba95 --- /dev/null +++ b/packages/cli-v3/src/rules/manifest.ts @@ -0,0 +1,162 @@ +import { readFile } from "fs/promises"; +import { dirname, join } from "path"; +import { z } from "zod"; +import { RulesFileInstallStrategy } from "./types.js"; + +const RulesManifestDataSchema = z.object({ + name: z.string(), + description: z.string(), + currentVersion: z.string(), + versions: z.record( + z.string(), + z.object({ + options: z.array( + z.object({ + name: z.string(), + title: z.string(), + label: z.string(), + path: z.string(), + tokens: z.number(), + client: z.string().optional(), + installStrategy: z.string().optional(), + applyTo: z.string().optional(), + }) + ), + }) + ), +}); + +type RulesManifestData = z.infer; + +export type RulesManifestVersionOption = { + name: string; + title: string; + label: string; + contents: string; + tokens: number; + client: string | undefined; + installStrategy: RulesFileInstallStrategy; + applyTo: string | undefined; +}; + +export type ManifestVersion = { + version: string; + options: Array; +}; + +export class RulesManifest { + constructor( + private readonly manifest: RulesManifestData, + private readonly loader: RulesManifestLoader + ) {} + + get name() { + return this.manifest.name; + } + + get description() { + return this.manifest.description; + } + + get currentVersion() { + return this.manifest.currentVersion; + } + + async getCurrentVersion(): Promise { + const version = this.versions[this.manifest.currentVersion]; + + if (!version) { + throw new Error(`Version ${this.manifest.currentVersion} not found in manifest`); + } + + const options = await Promise.all( + version.options.map(async (option) => { + const contents = await this.loader.loadRulesFile(option.path); + + // Omit path + const { path, installStrategy, ...rest } = option; + + const $installStrategy = RulesFileInstallStrategy.safeParse(installStrategy ?? "default"); + + // Skip variants with invalid install strategies + if (!$installStrategy.success) { + return; + } + + return { ...rest, contents, installStrategy: $installStrategy.data }; + }) + ); + + return { + version: this.manifest.currentVersion, + options: options.filter(Boolean) as Array, + }; + } + + get versions() { + return this.manifest.versions; + } +} + +export async function loadRulesManifest(loader: RulesManifestLoader): Promise { + const content = await loader.loadManifestContent(); + + return new RulesManifest(RulesManifestDataSchema.parse(JSON.parse(content)), loader); +} + +export interface RulesManifestLoader { + loadManifestContent(): Promise; + loadRulesFile(relativePath: string): Promise; +} + +export class GithubRulesManifestLoader implements RulesManifestLoader { + constructor(private readonly branch: string = "main") {} + + async loadManifestContent(): Promise { + const response = await fetch( + `https://raw.githubusercontent.com/triggerdotdev/trigger.dev/refs/heads/${this.branch}/rules/manifest.json` + ); + + if (!response.ok) { + throw new Error(`Failed to load rules manifest: ${response.status} ${response.statusText}`); + } + + return response.text(); + } + + async loadRulesFile(relativePath: string): Promise { + const response = await fetch( + `https://raw.githubusercontent.com/triggerdotdev/trigger.dev/refs/heads/${this.branch}/rules/${relativePath}` + ); + + if (!response.ok) { + throw new Error( + `Failed to load rules file: ${relativePath} - ${response.status} ${response.statusText}` + ); + } + + return response.text(); + } +} + +export class LocalRulesManifestLoader implements RulesManifestLoader { + constructor(private readonly path: string) {} + + async loadManifestContent(): Promise { + try { + return await readFile(this.path, "utf8"); + } catch (error) { + throw new Error(`Failed to load rules manifest: ${this.path} - ${error}`); + } + } + + async loadRulesFile(relativePath: string): Promise { + const path = join(dirname(this.path), relativePath); + + try { + return await readFile(path, "utf8"); + } catch (error) { + throw new Error(`Failed to load rules file: ${relativePath} - ${error}`); + } + } +} diff --git a/packages/cli-v3/src/rules/types.ts b/packages/cli-v3/src/rules/types.ts new file mode 100644 index 0000000000..70682c251a --- /dev/null +++ b/packages/cli-v3/src/rules/types.ts @@ -0,0 +1,4 @@ +import { z } from "zod"; + +export const RulesFileInstallStrategy = z.enum(["default", "claude-code-subagent"]); +export type RulesFileInstallStrategy = z.infer; diff --git a/packages/cli-v3/src/utilities/configFiles.ts b/packages/cli-v3/src/utilities/configFiles.ts index 6e1c9052e7..dfbdbf5df6 100644 --- a/packages/cli-v3/src/utilities/configFiles.ts +++ b/packages/cli-v3/src/utilities/configFiles.ts @@ -29,6 +29,13 @@ const CliConfigFile = z.object({ version: z.literal(2), currentProfile: z.string().default(DEFFAULT_PROFILE), profiles: z.record(CliConfigProfileSettings), + settings: z + .object({ + hasSeenMCPInstallPrompt: z.boolean().optional(), + hasSeenRulesInstallPrompt: z.boolean().optional(), + lastRulesInstallPromptVersion: z.string().optional(), + }) + .optional(), }); type CliConfigFile = z.infer; @@ -50,6 +57,10 @@ function getBlankConfig(): CliConfigFile { version: 2, currentProfile: DEFFAULT_PROFILE, profiles: {}, + settings: { + hasSeenMCPInstallPrompt: false, + hasSeenRulesInstallPrompt: false, + }, }; } @@ -93,6 +104,52 @@ export function readAuthConfigProfile( } } +export function readConfigHasSeenMCPInstallPrompt(): boolean { + const config = getConfig(); + return typeof config.settings?.hasSeenMCPInstallPrompt === "boolean" + ? config.settings.hasSeenMCPInstallPrompt + : false; +} + +export function writeConfigHasSeenMCPInstallPrompt(hasSeenMCPInstallPrompt: boolean) { + const config = getConfig(); + config.settings = { + ...config.settings, + hasSeenMCPInstallPrompt, + }; + writeAuthConfigFile(config); +} + +export function readConfigHasSeenRulesInstallPrompt(): boolean { + const config = getConfig(); + return typeof config.settings?.hasSeenRulesInstallPrompt === "boolean" + ? config.settings.hasSeenRulesInstallPrompt + : false; +} + +export function writeConfigHasSeenRulesInstallPrompt(hasSeenRulesInstallPrompt: boolean) { + const config = getConfig(); + config.settings = { + ...config.settings, + hasSeenRulesInstallPrompt, + }; + writeAuthConfigFile(config); +} + +export function readConfigLastRulesInstallPromptVersion(): string | undefined { + const config = getConfig(); + return config.settings?.lastRulesInstallPromptVersion; +} + +export function writeConfigLastRulesInstallPromptVersion(version: string) { + const config = getConfig(); + config.settings = { + ...config.settings, + lastRulesInstallPromptVersion: version, + }; + writeAuthConfigFile(config); +} + export function deleteAuthConfigProfile(profile: string = DEFFAULT_PROFILE) { const config = getConfig(); diff --git a/packages/cli-v3/src/utilities/fileSystem.ts b/packages/cli-v3/src/utilities/fileSystem.ts index b3957122fb..2de037582c 100644 --- a/packages/cli-v3/src/utilities/fileSystem.ts +++ b/packages/cli-v3/src/utilities/fileSystem.ts @@ -1,8 +1,9 @@ import fsSync from "fs"; import fsModule, { writeFile } from "fs/promises"; import fs from "node:fs"; -import { tmpdir } from "node:os"; +import { homedir, tmpdir } from "node:os"; import pathModule from "node:path"; +import { parseJSONC, stringifyJSONC, parseTOML, stringifyTOML } from "confbox"; // Creates a file at the given path, if the directory doesn't exist it will be created export async function createFile( @@ -50,6 +51,22 @@ export async function readFile(path: string) { return await fsModule.readFile(path, "utf8"); } +export function expandTilde(filePath: string) { + if (typeof filePath !== "string") { + throw new TypeError("Path must be a string"); + } + + if (filePath === "~") { + return homedir(); + } + + if (filePath.startsWith("~/")) { + return pathModule.resolve(homedir(), filePath.slice(2)); + } + + return pathModule.resolve(filePath); +} + export async function readJSONFile(path: string) { const fileContents = await fsModule.readFile(path, "utf8"); @@ -71,7 +88,13 @@ export async function safeReadJSONFile(path: string) { } export async function writeJSONFile(path: string, json: any, pretty = false) { - await writeFile(path, JSON.stringify(json, undefined, pretty ? 2 : undefined), "utf8"); + await safeWriteFile(path, JSON.stringify(json, undefined, pretty ? 2 : undefined)); +} + +// Will create the directory if it doesn't exist +export async function safeWriteFile(path: string, contents: string) { + await fsModule.mkdir(pathModule.dirname(path), { recursive: true }); + await fsModule.writeFile(path, contents); } export function readJSONFileSync(path: string) { @@ -98,3 +121,31 @@ export async function createTempDir(): Promise { return directory; } + +export async function safeReadTomlFile(path: string) { + const fileExists = await pathExists(path); + + if (!fileExists) return; + + const fileContents = await readFile(path); + + return parseTOML(fileContents.replace(/\r\n/g, "\n")); +} + +export async function writeTomlFile(path: string, toml: any) { + await safeWriteFile(path, stringifyTOML(toml)); +} + +export async function safeReadJSONCFile(path: string) { + const fileExists = await pathExists(path); + + if (!fileExists) return; + + const fileContents = await readFile(path); + + return parseJSONC(fileContents.replace(/\r\n/g, "\n")); +} + +export async function writeJSONCFile(path: string, json: any) { + await safeWriteFile(path, stringifyJSONC(json)); +} diff --git a/packages/core/CHANGELOG.md b/packages/core/CHANGELOG.md index fdbbe8cdf0..cb143236b9 100644 --- a/packages/core/CHANGELOG.md +++ b/packages/core/CHANGELOG.md @@ -1,5 +1,7 @@ # internal-platform +## 4.0.1 + ## 4.0.0 ### Major Changes diff --git a/packages/core/package.json b/packages/core/package.json index f6f511d68e..6e13a57663 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/core", - "version": "4.0.0", + "version": "4.0.1", "description": "Core code used across the Trigger.dev SDK and platform", "license": "MIT", "publishConfig": { @@ -187,7 +187,6 @@ "execa": "^8.0.1", "humanize-duration": "^3.27.3", "jose": "^5.4.0", - "lodash.get": "^4.4.2", "nanoid": "3.3.8", "prom-client": "^15.1.0", "socket.io": "4.7.4", diff --git a/packages/core/src/logger.ts b/packages/core/src/logger.ts index 1e7a811bcb..3d5620a9d2 100644 --- a/packages/core/src/logger.ts +++ b/packages/core/src/logger.ts @@ -71,7 +71,9 @@ export class Logger { this.#structuredLog(console.error, message, "error", ...args); - if (Logger.onError) { + const ignoreError = args.some((arg) => arg?.ignoreError); + + if (Logger.onError && !ignoreError) { Logger.onError(message, ...args); } } diff --git a/packages/core/src/v3/apiClient/index.ts b/packages/core/src/v3/apiClient/index.ts index 4eab7d0089..fdd4bfc5e5 100644 --- a/packages/core/src/v3/apiClient/index.ts +++ b/packages/core/src/v3/apiClient/index.ts @@ -3,6 +3,9 @@ import { VERSION } from "../../version.js"; import { generateJWT } from "../jwt.js"; import { AddTagsRequestBody, + ApiDeploymentListOptions, + ApiDeploymentListResponseItem, + ApiDeploymentListSearchParams, BatchTaskRunExecutionResult, BatchTriggerTaskV3RequestBody, BatchTriggerTaskV3Response, @@ -27,6 +30,7 @@ import { RetrieveBatchV2Response, RetrieveQueueParam, RetrieveRunResponse, + RetrieveRunTraceResponseBody, ScheduleObject, TaskRunExecutionResult, TriggerTaskRequestBody, @@ -339,6 +343,18 @@ export class ApiClient { ); } + retrieveRunTrace(runId: string, requestOptions?: ZodFetchOptions) { + return zodfetch( + RetrieveRunTraceResponseBody, + `${this.baseUrl}/api/v1/runs/${runId}/trace`, + { + method: "GET", + headers: this.#getHeaders(false), + }, + mergeRequestOptions(this.defaultRequestOptions, requestOptions) + ); + } + listRuns( query?: ListRunsQueryParams, requestOptions?: ZodFetchOptions @@ -960,6 +976,41 @@ export class ApiClient { ); } + listDeployments(options?: ApiDeploymentListOptions, requestOptions?: ZodFetchOptions) { + const searchParams = new URLSearchParams(); + + if (options?.status) { + searchParams.append("status", options.status); + } + + if (options?.period) { + searchParams.append("period", options.period); + } + + if (options?.from) { + searchParams.append("from", options.from); + } + + if (options?.to) { + searchParams.append("to", options.to); + } + + return zodfetchCursorPage( + ApiDeploymentListResponseItem, + `${this.baseUrl}/api/v1/deployments`, + { + query: searchParams, + after: options?.cursor, + limit: options?.limit, + }, + { + method: "GET", + headers: this.#getHeaders(false), + }, + mergeRequestOptions(this.defaultRequestOptions, requestOptions) + ); + } + async fetchStream( runId: string, streamKey: string, diff --git a/packages/core/src/v3/config.ts b/packages/core/src/v3/config.ts index b5caba64b9..71c4cd6521 100644 --- a/packages/core/src/v3/config.ts +++ b/packages/core/src/v3/config.ts @@ -316,7 +316,7 @@ export type TriggerConfig = { tsconfigPath?: string; /** - * CA Cert file to be added to NODE_EXTRA_CA_CERT environment variable in, useful in use with self signed cert in the trigger.dev environment. + * CA Cert file to be added to NODE_EXTRA_CA_CERT environment variable, useful in use with self signed cert in the trigger.dev environment. * * @example "./certs/ca.crt" * Note: must start with "./" and be relative to the project root. diff --git a/packages/core/src/v3/isomorphic/dates.ts b/packages/core/src/v3/isomorphic/dates.ts new file mode 100644 index 0000000000..53a0542d7e --- /dev/null +++ b/packages/core/src/v3/isomorphic/dates.ts @@ -0,0 +1,35 @@ +/** + * Attempts to parse a string into a valid Date. + * + * Supported formats: + * - ISO and RFC date strings (e.g. "2025-08-18", "2025-08-18T12:34:56Z") + * - Natural language dates supported by JS Date (e.g. "August 18, 2025") + * - Epoch seconds (10-digit numeric string, e.g. "1629302400") + * - Epoch milliseconds (13-digit numeric string, e.g. "1629302400000") + * + * @param input The string to parse. + * @returns A valid Date object, or undefined if parsing fails. + */ +export function parseDate(input: string): Date | undefined { + if (typeof input !== "string") return undefined; + + // Handle pure numeric strings as epoch values + if (/^\d+$/.test(input)) { + const num = Number(input); + + if (input.length === 10) { + // Epoch seconds + return new Date(num * 1000); + } else if (input.length === 13) { + // Epoch milliseconds + return new Date(num); + } else { + // Unsupported numeric length + return undefined; + } + } + + // Handle general date strings + const date = new Date(input); + return isNaN(date.getTime()) ? undefined : date; +} diff --git a/packages/core/src/v3/isomorphic/index.ts b/packages/core/src/v3/isomorphic/index.ts index 8e15c36d2a..d220acd515 100644 --- a/packages/core/src/v3/isomorphic/index.ts +++ b/packages/core/src/v3/isomorphic/index.ts @@ -4,3 +4,4 @@ export * from "./maxDuration.js"; export * from "./queueName.js"; export * from "./consts.js"; export * from "./traceContext.js"; +export * from "./dates.js"; diff --git a/packages/core/src/v3/schemas/api.ts b/packages/core/src/v3/schemas/api.ts index 7fde77c41c..da9f776568 100644 --- a/packages/core/src/v3/schemas/api.ts +++ b/packages/core/src/v3/schemas/api.ts @@ -29,7 +29,11 @@ export type WhoAmIResponse = z.infer; export const GetProjectResponseBody = z.object({ id: z.string(), - externalRef: z.string(), + externalRef: z + .string() + .describe( + "The external reference for the project, also known as the project ref, a unique identifier starting with proj_" + ), name: z.string(), slug: z.string(), createdAt: z.coerce.date(), @@ -47,6 +51,27 @@ export const GetProjectsResponseBody = z.array(GetProjectResponseBody); export type GetProjectsResponseBody = z.infer; +export const GetOrgsResponseBody = z.array( + z.object({ + id: z.string(), + title: z.string(), + slug: z.string(), + createdAt: z.coerce.date(), + }) +); + +export type GetOrgsResponseBody = z.infer; + +export const CreateProjectRequestBody = z.object({ + name: z + .string() + .trim() + .min(1, "Name is required") + .max(255, "Name must be less than 255 characters"), +}); + +export type CreateProjectRequestBody = z.infer; + export const GetProjectEnvResponse = z.object({ apiKey: z.string(), name: z.string(), @@ -56,6 +81,49 @@ export const GetProjectEnvResponse = z.object({ export type GetProjectEnvResponse = z.infer; +// Zod schema for the response body type +export const GetWorkerTaskResponse = z.object({ + id: z.string(), + slug: z.string(), + filePath: z.string(), + triggerSource: z.string(), + createdAt: z.coerce.date(), + payloadSchema: z.any().nullish(), +}); + +export const GetWorkerByTagResponse = z.object({ + worker: z.object({ + id: z.string(), + version: z.string(), + engine: z.string().nullish(), + sdkVersion: z.string().nullish(), + cliVersion: z.string().nullish(), + tasks: z.array(GetWorkerTaskResponse), + }), + urls: z.object({ + runs: z.string(), + }), +}); + +export type GetWorkerByTagResponse = z.infer; + +export const GetJWTRequestBody = z.object({ + claims: z + .object({ + scopes: z.array(z.string()).default([]), + }) + .optional(), + expirationTime: z.union([z.number(), z.string()]).optional(), +}); + +export type GetJWTRequestBody = z.infer; + +export const GetJWTResponse = z.object({ + token: z.string(), +}); + +export type GetJWTResponse = z.infer; + export const CreateBackgroundWorkerRequestBody = z.object({ localOnly: z.boolean(), metadata: BackgroundWorkerMetadata, @@ -1078,3 +1146,123 @@ export function timeoutError(timeout: Date) { message: `Waitpoint timed out at ${timeout.toISOString()}`, }; } + +const ApiDeploymentCommonShape = { + from: z.string().describe("The date to start the search from, in ISO 8601 format").optional(), + to: z.string().describe("The date to end the search, in ISO 8601 format").optional(), + period: z.string().describe("The period to search within (e.g. 1d, 7d, 3h, etc.)").optional(), + status: z + .enum(["PENDING", "BUILDING", "DEPLOYING", "DEPLOYED", "FAILED", "CANCELED", "TIMED_OUT"]) + .describe("Filter deployments that are in this status") + .optional(), +}; + +const ApiDeploymentListPaginationCursor = z + .string() + .describe("The deployment ID to start the search from, to get the next page") + .optional(); + +const ApiDeploymentListPaginationLimit = z.coerce + .number() + .describe("The number of deployments to return, defaults to 20 (max 100)") + .min(1, "Limit must be at least 1") + .max(100, "Limit must be less than 100") + .optional(); + +export const ApiDeploymentListParams = { + ...ApiDeploymentCommonShape, + cursor: ApiDeploymentListPaginationCursor, + limit: ApiDeploymentListPaginationLimit, +}; + +export const ApiDeploymentListOptions = z.object(ApiDeploymentListParams); + +export type ApiDeploymentListOptions = z.infer; + +export const ApiDeploymentListSearchParams = z.object({ + ...ApiDeploymentCommonShape, + "page[after]": ApiDeploymentListPaginationCursor, + "page[size]": ApiDeploymentListPaginationLimit, +}); + +export type ApiDeploymentListSearchParams = z.infer; + +export const ApiDeploymentListResponseItem = z.object({ + id: z.string(), + createdAt: z.coerce.date(), + shortCode: z.string(), + version: z.string(), + runtime: z.string(), + runtimeVersion: z.string(), + status: z.enum([ + "PENDING", + "BUILDING", + "DEPLOYING", + "DEPLOYED", + "FAILED", + "CANCELED", + "TIMED_OUT", + ]), + deployedAt: z.coerce.date().optional(), + git: z.record(z.any()).optional(), + error: DeploymentErrorData.optional(), +}); + +export type ApiDeploymentListResponseItem = z.infer; + +export const ApiBranchListResponseBody = z.object({ + branches: z.array( + z.object({ + id: z.string(), + name: z.string(), + createdAt: z.coerce.date(), + updatedAt: z.coerce.date(), + git: z.record(z.any()).optional(), + isPaused: z.boolean(), + }) + ), +}); + +export type ApiBranchListResponseBody = z.infer; + +export const RetrieveRunTraceSpanSchema = z.object({ + id: z.string(), + parentId: z.string().optional(), + message: z.string(), + data: z.object({ + runId: z.string(), + taskSlug: z.string().optional(), + taskPath: z.string().optional(), + events: z.array(z.any()).optional(), + startTime: z.coerce.date(), + duration: z.number(), + isError: z.boolean(), + isPartial: z.boolean(), + isCancelled: z.boolean(), + level: z.string(), + environmentType: z.string(), + workerVersion: z.string().optional(), + queueName: z.string().optional(), + machinePreset: z.string().optional(), + properties: z.record(z.any()).optional(), + output: z.unknown().optional(), + }), +}); + +export type RetrieveRunTraceSpan = z.infer & { + children: Array; +}; + +export const RetrieveRunTraceSpan: z.ZodType = + RetrieveRunTraceSpanSchema.extend({ + children: z.lazy(() => RetrieveRunTraceSpan.array()), + }); + +export const RetrieveRunTraceResponseBody = z.object({ + trace: z.object({ + traceId: z.string(), + rootSpan: RetrieveRunTraceSpan, + }), +}); + +export type RetrieveRunTraceResponseBody = z.infer; diff --git a/packages/core/src/v3/utils/ioSerialization.ts b/packages/core/src/v3/utils/ioSerialization.ts index b3049187e8..103260b85c 100644 --- a/packages/core/src/v3/utils/ioSerialization.ts +++ b/packages/core/src/v3/utils/ioSerialization.ts @@ -12,7 +12,7 @@ import { SemanticInternalAttributes } from "../semanticInternalAttributes.js"; import { TriggerTracer } from "../tracer.js"; import { zodfetch } from "../zodfetch.js"; import { flattenAttributes } from "./flattenAttributes.js"; -import get from "lodash.get"; +import { JSONHeroPath } from "@jsonhero/path"; export type IOPacket = { data?: string | undefined; @@ -536,7 +536,7 @@ export async function replaceSuperJsonPayload(original: string, newPayload: stri .map(([key]) => key); const overridenUndefinedKeys = originalUndefinedKeys.filter( - (key) => get(newPayloadObject, key) !== undefined + (key) => getKeyFromObject(newPayloadObject, key) !== undefined ); overridenUndefinedKeys.forEach((key) => { @@ -551,3 +551,9 @@ export async function replaceSuperJsonPayload(original: string, newPayload: stri return superjson.deserialize(newSuperJson); } + +function getKeyFromObject(object: unknown, key: string) { + const jsonHeroPath = new JSONHeroPath(key); + + return jsonHeroPath.first(object); +} diff --git a/packages/python/CHANGELOG.md b/packages/python/CHANGELOG.md index ea8132ebe5..4d74e4788b 100644 --- a/packages/python/CHANGELOG.md +++ b/packages/python/CHANGELOG.md @@ -1,5 +1,14 @@ # @trigger.dev/python +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/build@4.0.1` + - `@trigger.dev/core@4.0.1` + - `@trigger.dev/sdk@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/python/package.json b/packages/python/package.json index 8252b150ef..b79a0de1b1 100644 --- a/packages/python/package.json +++ b/packages/python/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/python", - "version": "4.0.0", + "version": "4.0.1", "description": "Python runtime and build extension for Trigger.dev", "license": "MIT", "publishConfig": { @@ -45,7 +45,7 @@ "check-exports": "attw --pack ." }, "dependencies": { - "@trigger.dev/core": "workspace:4.0.0", + "@trigger.dev/core": "workspace:4.0.1", "tinyexec": "^0.3.2" }, "devDependencies": { @@ -56,12 +56,12 @@ "tsx": "4.17.0", "esbuild": "^0.23.0", "@arethetypeswrong/cli": "^0.15.4", - "@trigger.dev/build": "workspace:4.0.0", - "@trigger.dev/sdk": "workspace:4.0.0" + "@trigger.dev/build": "workspace:4.0.1", + "@trigger.dev/sdk": "workspace:4.0.1" }, "peerDependencies": { - "@trigger.dev/sdk": "workspace:^4.0.0", - "@trigger.dev/build": "workspace:^4.0.0" + "@trigger.dev/sdk": "workspace:^4.0.1", + "@trigger.dev/build": "workspace:^4.0.1" }, "engines": { "node": ">=18.20.0" diff --git a/packages/react-hooks/CHANGELOG.md b/packages/react-hooks/CHANGELOG.md index bf0e028dfb..682e9da8c0 100644 --- a/packages/react-hooks/CHANGELOG.md +++ b/packages/react-hooks/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/react-hooks +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/react-hooks/package.json b/packages/react-hooks/package.json index ae7a3d7508..6adee1476c 100644 --- a/packages/react-hooks/package.json +++ b/packages/react-hooks/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/react-hooks", - "version": "4.0.0", + "version": "4.0.1", "description": "trigger.dev react hooks", "license": "MIT", "publishConfig": { @@ -37,7 +37,7 @@ "check-exports": "attw --pack ." }, "dependencies": { - "@trigger.dev/core": "workspace:^4.0.0", + "@trigger.dev/core": "workspace:^4.0.1", "swr": "^2.2.5" }, "devDependencies": { diff --git a/packages/redis-worker/CHANGELOG.md b/packages/redis-worker/CHANGELOG.md index ae77455cbe..235b1f4c8f 100644 --- a/packages/redis-worker/CHANGELOG.md +++ b/packages/redis-worker/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/redis-worker +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/redis-worker/package.json b/packages/redis-worker/package.json index d3e395a3f3..e616cc9d9c 100644 --- a/packages/redis-worker/package.json +++ b/packages/redis-worker/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/redis-worker", - "version": "4.0.0", + "version": "4.0.1", "description": "Redis worker for trigger.dev", "license": "MIT", "publishConfig": { @@ -23,7 +23,7 @@ "test": "vitest --sequence.concurrent=false --no-file-parallelism" }, "dependencies": { - "@trigger.dev/core": "workspace:4.0.0", + "@trigger.dev/core": "workspace:4.0.1", "lodash.omit": "^4.5.0", "nanoid": "^5.0.7", "p-limit": "^6.2.0", diff --git a/packages/redis-worker/src/worker.ts b/packages/redis-worker/src/worker.ts index a5e77d3a35..92880d1f07 100644 --- a/packages/redis-worker/src/worker.ts +++ b/packages/redis-worker/src/worker.ts @@ -36,6 +36,8 @@ export type WorkerCatalog = { retry?: RetryOptions; cron?: string; jitterInMs?: number; + /** Defaults to true. If false, errors will not be logged. */ + logErrors?: boolean; }; }; @@ -541,12 +543,12 @@ class Worker { const catalogItem = this.options.catalog[job as any]; const handler = this.jobs[job as any]; if (!handler) { - this.logger.error(`No handler found for job type: ${job}`); + this.logger.error(`Worker no handler found for job type: ${job}`); return; } if (!catalogItem) { - this.logger.error(`No catalog item found for job type: ${job}`); + this.logger.error(`Worker no catalog item found for job type: ${job}`); return; } @@ -590,7 +592,10 @@ class Worker { } ).catch(async (error) => { const errorMessage = error instanceof Error ? error.message : String(error); - this.logger.error(`Error processing item:`, { + + const shouldLogError = catalogItem.logErrors ?? true; + + const logAttributes = { name: this.options.name, id, job, @@ -598,7 +603,14 @@ class Worker { visibilityTimeoutMs, error, errorMessage, - }); + }; + + if (shouldLogError) { + this.logger.error(`Worker error processing item`, logAttributes); + } else { + this.logger.info(`Worker failed to process item`, logAttributes); + } + // Attempt requeue logic. try { const newAttempt = attempt + 1; @@ -609,15 +621,17 @@ class Worker { const retryDelay = calculateNextRetryDelay(retrySettings, newAttempt); if (!retryDelay) { - this.logger.error(`Item ${id} reached max attempts. Moving to DLQ.`, { - name: this.options.name, - id, - job, - item, - visibilityTimeoutMs, - attempt: newAttempt, - errorMessage, - }); + if (shouldLogError) { + this.logger.error(`Worker item reached max attempts. Moving to DLQ.`, { + ...logAttributes, + attempt: newAttempt, + }); + } else { + this.logger.info(`Worker item reached max attempts. Moving to DLQ.`, { + ...logAttributes, + attempt: newAttempt, + }); + } await this.queue.moveToDeadLetterQueue(id, errorMessage); @@ -629,7 +643,7 @@ class Worker { } const retryDate = new Date(Date.now() + retryDelay); - this.logger.info(`Requeuing failed item ${id} with delay`, { + this.logger.info(`Worker requeuing failed item with delay`, { name: this.options.name, id, job, @@ -649,7 +663,7 @@ class Worker { }); } catch (requeueError) { this.logger.error( - `Failed to requeue item ${id}. It will be retried after the visibility timeout.`, + `Worker failed to requeue item. It will be retried after the visibility timeout.`, { name: this.options.name, id, diff --git a/packages/rsc/CHANGELOG.md b/packages/rsc/CHANGELOG.md index b5c57f862a..aa9e4f7684 100644 --- a/packages/rsc/CHANGELOG.md +++ b/packages/rsc/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/rsc +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/rsc/package.json b/packages/rsc/package.json index 310e2438cc..71064e8929 100644 --- a/packages/rsc/package.json +++ b/packages/rsc/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/rsc", - "version": "4.0.0", + "version": "4.0.1", "description": "trigger.dev rsc", "license": "MIT", "publishConfig": { @@ -37,14 +37,14 @@ "check-exports": "attw --pack ." }, "dependencies": { - "@trigger.dev/core": "workspace:^4.0.0", + "@trigger.dev/core": "workspace:^4.0.1", "mlly": "^1.7.1", "react": "19.0.0-rc.1", "react-dom": "19.0.0-rc.1" }, "devDependencies": { "@arethetypeswrong/cli": "^0.15.4", - "@trigger.dev/build": "workspace:^4.0.0", + "@trigger.dev/build": "workspace:^4.0.1", "@types/node": "^20.14.14", "@types/react": "*", "@types/react-dom": "*", diff --git a/packages/schema-to-json/CHANGELOG.md b/packages/schema-to-json/CHANGELOG.md index 96fba56fb2..8aed2bc5eb 100644 --- a/packages/schema-to-json/CHANGELOG.md +++ b/packages/schema-to-json/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/schema-to-json +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Patch Changes diff --git a/packages/schema-to-json/package.json b/packages/schema-to-json/package.json index b300255535..7f3461dea3 100644 --- a/packages/schema-to-json/package.json +++ b/packages/schema-to-json/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/schema-to-json", - "version": "4.0.0", + "version": "4.0.1", "description": "Convert various schema validation libraries to JSON Schema", "license": "MIT", "publishConfig": { diff --git a/packages/trigger-sdk/CHANGELOG.md b/packages/trigger-sdk/CHANGELOG.md index aad363e80d..f24de1f72f 100644 --- a/packages/trigger-sdk/CHANGELOG.md +++ b/packages/trigger-sdk/CHANGELOG.md @@ -1,5 +1,12 @@ # @trigger.dev/sdk +## 4.0.1 + +### Patch Changes + +- Updated dependencies: + - `@trigger.dev/core@4.0.1` + ## 4.0.0 ### Major Changes diff --git a/packages/trigger-sdk/package.json b/packages/trigger-sdk/package.json index 6709da1d6d..71b42c74b9 100644 --- a/packages/trigger-sdk/package.json +++ b/packages/trigger-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@trigger.dev/sdk", - "version": "4.0.0", + "version": "4.0.1", "description": "trigger.dev Node.JS SDK", "license": "MIT", "publishConfig": { @@ -51,7 +51,7 @@ "dependencies": { "@opentelemetry/api": "1.9.0", "@opentelemetry/semantic-conventions": "1.36.0", - "@trigger.dev/core": "workspace:4.0.0", + "@trigger.dev/core": "workspace:4.0.1", "chalk": "^5.2.0", "cronstrue": "^2.21.0", "debug": "^4.3.4", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6decddb309..35e5c83469 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -166,6 +166,9 @@ importers: apps/supervisor: dependencies: + '@aws-sdk/client-ecr': + specifier: ^3.839.0 + version: 3.839.0 '@kubernetes/client-node': specifier: ^1.0.0 version: 1.0.0(patch_hash=s75bgwaoixupmywtvgoy5ruszq) @@ -1236,7 +1239,7 @@ importers: packages/build: dependencies: '@trigger.dev/core': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../core pkg-types: specifier: ^1.1.3 @@ -1267,13 +1270,13 @@ importers: packages/cli-v3: dependencies: '@clack/prompts': - specifier: ^0.10.0 - version: 0.10.1 + specifier: 0.11.0 + version: 0.11.0 '@depot/cli': specifier: 0.0.1-cli.2.80.0 version: 0.0.1-cli.2.80.0 '@modelcontextprotocol/sdk': - specifier: ^1.6.1 + specifier: ^1.17.0 version: 1.17.1(supports-color@10.0.0) '@opentelemetry/api': specifier: 1.9.0 @@ -1300,13 +1303,13 @@ importers: specifier: 1.36.0 version: 1.36.0 '@trigger.dev/build': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../build '@trigger.dev/core': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../core '@trigger.dev/schema-to-json': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../schema-to-json ansi-escapes: specifier: ^7.0.0 @@ -1329,6 +1332,9 @@ importers: commander: specifier: ^9.4.1 version: 9.5.0 + confbox: + specifier: ^0.2.2 + version: 0.2.2 defu: specifier: ^6.1.4 version: 6.1.4 @@ -1419,6 +1425,9 @@ importers: std-env: specifier: ^3.7.0 version: 3.7.0 + strip-ansi: + specifier: ^7.1.0 + version: 7.1.0 supports-color: specifier: ^10.0.0 version: 10.0.0 @@ -1567,9 +1576,6 @@ importers: jose: specifier: ^5.4.0 version: 5.4.0 - lodash.get: - specifier: ^4.4.2 - version: 4.4.2 nanoid: specifier: 3.3.8 version: 3.3.8 @@ -1650,7 +1656,7 @@ importers: packages/python: dependencies: '@trigger.dev/core': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../core tinyexec: specifier: ^0.3.2 @@ -1660,10 +1666,10 @@ importers: specifier: ^0.15.4 version: 0.15.4 '@trigger.dev/build': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../build '@trigger.dev/sdk': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../trigger-sdk '@types/node': specifier: 20.14.14 @@ -1687,7 +1693,7 @@ importers: packages/react-hooks: dependencies: '@trigger.dev/core': - specifier: workspace:^4.0.0 + specifier: workspace:^4.0.1 version: link:../core react: specifier: ^18.0 || ^19.0 || ^19.0.0-rc @@ -1721,7 +1727,7 @@ importers: packages/redis-worker: dependencies: '@trigger.dev/core': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../core cron-parser: specifier: ^4.9.0 @@ -1764,7 +1770,7 @@ importers: packages/rsc: dependencies: '@trigger.dev/core': - specifier: workspace:^4.0.0 + specifier: workspace:^4.0.1 version: link:../core mlly: specifier: ^1.7.1 @@ -1780,7 +1786,7 @@ importers: specifier: ^0.15.4 version: 0.15.4 '@trigger.dev/build': - specifier: workspace:^4.0.0 + specifier: workspace:^4.0.1 version: link:../build '@types/node': specifier: ^20.14.14 @@ -1856,7 +1862,7 @@ importers: specifier: 1.36.0 version: 1.36.0 '@trigger.dev/core': - specifier: workspace:4.0.0 + specifier: workspace:4.0.1 version: link:../core chalk: specifier: ^5.2.0 @@ -3156,7 +3162,7 @@ packages: resolution: {integrity: sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==} dependencies: '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.451.0 + '@aws-sdk/types': 3.840.0 tslib: 1.14.1 dev: false @@ -3223,7 +3229,7 @@ packages: /@aws-crypto/util@3.0.0: resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==} dependencies: - '@aws-sdk/types': 3.451.0 + '@aws-sdk/types': 3.840.0 '@aws-sdk/util-utf8-browser': 3.259.0 tslib: 1.14.1 dev: false @@ -3231,7 +3237,7 @@ packages: /@aws-crypto/util@5.2.0: resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} dependencies: - '@aws-sdk/types': 3.714.0 + '@aws-sdk/types': 3.840.0 '@smithy/util-utf8': 2.0.2 tslib: 2.8.1 dev: false @@ -5829,17 +5835,17 @@ packages: resolution: {integrity: sha512-hBzuU5+JjB2cqNZyszkDHZgOSrUUT8V3dhgRl8Q9Gp6dAj/H5+KILGjbhDpc3Iy9qmqlm/akuOI2ut9VUtzJxQ==} dev: true - /@clack/core@0.4.2: - resolution: {integrity: sha512-NYQfcEy8MWIxrT5Fj8nIVchfRFA26yYKJcvBS7WlUIlw2OmQOY9DhGGXMovyI5J5PpxrCPGkgUi207EBrjpBvg==} + /@clack/core@0.5.0: + resolution: {integrity: sha512-p3y0FIOwaYRUPRcMO7+dlmLh8PSRcrjuTndsiA0WAFbWES0mLZlrjVoBRZ9DzkPFJZG6KGkJmoEAY0ZcVWTkow==} dependencies: picocolors: 1.1.1 sisteransi: 1.0.5 dev: false - /@clack/prompts@0.10.1: - resolution: {integrity: sha512-Q0T02vx8ZM9XSv9/Yde0jTmmBQufZhPJfYAg2XrrrxWWaZgq1rr8nU8Hv710BQ1dhoP8rtY7YUdpGej2Qza/cw==} + /@clack/prompts@0.11.0: + resolution: {integrity: sha512-pMN5FcrEw9hUkZA4f+zLlzivQSeQf5dRGJjSUbvVYDLvpKCdQx5OaknvKzgbtXOizhP+SJJJjqEbOe55uKKfAw==} dependencies: - '@clack/core': 0.4.2 + '@clack/core': 0.5.0 picocolors: 1.1.1 sisteransi: 1.0.5 dev: false @@ -23373,6 +23379,10 @@ packages: /confbox@0.1.8: resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + /confbox@0.2.2: + resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==} + dev: false + /config-chain@1.1.13: resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} dependencies: @@ -28717,11 +28727,6 @@ packages: resolution: {integrity: sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==} dev: true - /lodash.get@4.4.2: - resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} - deprecated: This package is deprecated. Use the optional chaining (?.) operator instead. - dev: false - /lodash.groupby@4.6.0: resolution: {integrity: sha512-5dcWxm23+VAoz+awKmBaiBvzox8+RqMgFhi7UvX9DHZr2HdxHXM/Wrf8cfKpsW37RNrvtPn6hSwNqurSILbmJw==} dev: false diff --git a/rules/4.0.0/advanced-tasks.md b/rules/4.0.0/advanced-tasks.md new file mode 100644 index 0000000000..f6ecac3035 --- /dev/null +++ b/rules/4.0.0/advanced-tasks.md @@ -0,0 +1,451 @@ +# Trigger.dev Advanced Tasks (v4) + +**Advanced patterns and features for writing tasks** + +## Tags & Organization + +```ts +import { task, tags } from "@trigger.dev/sdk"; + +export const processUser = task({ + id: "process-user", + run: async (payload: { userId: string; orgId: string }, { ctx }) => { + // Add tags during execution + await tags.add(`user_${payload.userId}`); + await tags.add(`org_${payload.orgId}`); + + return { processed: true }; + }, +}); + +// Trigger with tags +await processUser.trigger( + { userId: "123", orgId: "abc" }, + { tags: ["priority", "user_123", "org_abc"] } // Max 10 tags per run +); + +// Subscribe to tagged runs +for await (const run of runs.subscribeToRunsWithTag("user_123")) { + console.log(`User task ${run.id}: ${run.status}`); +} +``` + +**Tag Best Practices:** + +- Use prefixes: `user_123`, `org_abc`, `video:456` +- Max 10 tags per run, 1-64 characters each +- Tags don't propagate to child tasks automatically + +## Concurrency & Queues + +```ts +import { task, queue } from "@trigger.dev/sdk"; + +// Shared queue for related tasks +const emailQueue = queue({ + name: "email-processing", + concurrencyLimit: 5, // Max 5 emails processing simultaneously +}); + +// Task-level concurrency +export const oneAtATime = task({ + id: "sequential-task", + queue: { concurrencyLimit: 1 }, // Process one at a time + run: async (payload) => { + // Critical section - only one instance runs + }, +}); + +// Per-user concurrency +export const processUserData = task({ + id: "process-user-data", + run: async (payload: { userId: string }) => { + // Override queue with user-specific concurrency + await childTask.trigger(payload, { + queue: { + name: `user-${payload.userId}`, + concurrencyLimit: 2, + }, + }); + }, +}); + +export const emailTask = task({ + id: "send-email", + queue: emailQueue, // Use shared queue + run: async (payload: { to: string }) => { + // Send email logic + }, +}); +``` + +## Error Handling & Retries + +```ts +import { task, retry, AbortTaskRunError } from "@trigger.dev/sdk"; + +export const resilientTask = task({ + id: "resilient-task", + retry: { + maxAttempts: 10, + factor: 1.8, // Exponential backoff multiplier + minTimeoutInMs: 500, + maxTimeoutInMs: 30_000, + randomize: false, + }, + catchError: async ({ error, ctx }) => { + // Custom error handling + if (error.code === "FATAL_ERROR") { + throw new AbortTaskRunError("Cannot retry this error"); + } + + // Log error details + console.error(`Task ${ctx.task.id} failed:`, error); + + // Allow retry by returning nothing + return { retryAt: new Date(Date.now() + 60000) }; // Retry in 1 minute + }, + run: async (payload) => { + // Retry specific operations + const result = await retry.onThrow( + async () => { + return await unstableApiCall(payload); + }, + { maxAttempts: 3 } + ); + + // Conditional HTTP retries + const response = await retry.fetch("https://api.example.com", { + retry: { + maxAttempts: 5, + condition: (response, error) => { + return response?.status === 429 || response?.status >= 500; + }, + }, + }); + + return result; + }, +}); +``` + +## Machines & Performance + +```ts +export const heavyTask = task({ + id: "heavy-computation", + machine: { preset: "large-2x" }, // 8 vCPU, 16 GB RAM + maxDuration: 1800, // 30 minutes timeout + run: async (payload, { ctx }) => { + // Resource-intensive computation + if (ctx.machine.preset === "large-2x") { + // Use all available cores + return await parallelProcessing(payload); + } + + return await standardProcessing(payload); + }, +}); + +// Override machine when triggering +await heavyTask.trigger(payload, { + machine: { preset: "medium-1x" }, // Override for this run +}); +``` + +**Machine Presets:** + +- `micro`: 0.25 vCPU, 0.25 GB RAM +- `small-1x`: 0.5 vCPU, 0.5 GB RAM (default) +- `small-2x`: 1 vCPU, 1 GB RAM +- `medium-1x`: 1 vCPU, 2 GB RAM +- `medium-2x`: 2 vCPU, 4 GB RAM +- `large-1x`: 4 vCPU, 8 GB RAM +- `large-2x`: 8 vCPU, 16 GB RAM + +## Idempotency + +```ts +import { task, idempotencyKeys } from "@trigger.dev/sdk"; + +export const paymentTask = task({ + id: "process-payment", + retry: { + maxAttempts: 3, + }, + run: async (payload: { orderId: string; amount: number }) => { + // Automatically scoped to this task run, so if the task is retried, the idempotency key will be the same + const idempotencyKey = await idempotencyKeys.create(`payment-${payload.orderId}`); + + // Ensure payment is processed only once + await chargeCustomer.trigger(payload, { + idempotencyKey, + idempotencyKeyTTL: "24h", // Key expires in 24 hours + }); + }, +}); + +// Payload-based idempotency +import { createHash } from "node:crypto"; + +function createPayloadHash(payload: any): string { + const hash = createHash("sha256"); + hash.update(JSON.stringify(payload)); + return hash.digest("hex"); +} + +export const deduplicatedTask = task({ + id: "deduplicated-task", + run: async (payload) => { + const payloadHash = createPayloadHash(payload); + const idempotencyKey = await idempotencyKeys.create(payloadHash); + + await processData.trigger(payload, { idempotencyKey }); + }, +}); +``` + +## Metadata & Progress Tracking + +```ts +import { task, metadata } from "@trigger.dev/sdk"; + +export const batchProcessor = task({ + id: "batch-processor", + run: async (payload: { items: any[] }, { ctx }) => { + const totalItems = payload.items.length; + + // Initialize progress metadata + metadata + .set("progress", 0) + .set("totalItems", totalItems) + .set("processedItems", 0) + .set("status", "starting"); + + const results = []; + + for (let i = 0; i < payload.items.length; i++) { + const item = payload.items[i]; + + // Process item + const result = await processItem(item); + results.push(result); + + // Update progress + const progress = ((i + 1) / totalItems) * 100; + metadata + .set("progress", progress) + .increment("processedItems", 1) + .append("logs", `Processed item ${i + 1}/${totalItems}`) + .set("currentItem", item.id); + } + + // Final status + metadata.set("status", "completed"); + + return { results, totalProcessed: results.length }; + }, +}); + +// Update parent metadata from child task +export const childTask = task({ + id: "child-task", + run: async (payload, { ctx }) => { + // Update parent task metadata + metadata.parent.set("childStatus", "processing"); + metadata.root.increment("childrenCompleted", 1); + + return { processed: true }; + }, +}); +``` + +## Advanced Triggering + +### Frontend Triggering (React) + +```tsx +"use client"; +import { useTaskTrigger } from "@trigger.dev/react-hooks"; +import type { myTask } from "../trigger/tasks"; + +function TriggerButton({ accessToken }: { accessToken: string }) { + const { submit, handle, isLoading } = useTaskTrigger("my-task", { accessToken }); + + return ( + + ); +} +``` + +### Large Payloads + +```ts +// For payloads > 512KB (max 10MB) +export const largeDataTask = task({ + id: "large-data-task", + run: async (payload: { dataUrl: string }) => { + // Trigger.dev automatically handles large payloads + // For > 10MB, use external storage + const response = await fetch(payload.dataUrl); + const largeData = await response.json(); + + return { processed: largeData.length }; + }, +}); + +// Best practice: Use presigned URLs for very large files +await largeDataTask.trigger({ + dataUrl: "https://s3.amazonaws.com/bucket/large-file.json?presigned=true", +}); +``` + +### Advanced Options + +```ts +await myTask.trigger(payload, { + delay: "2h30m", // Delay execution + ttl: "24h", // Expire if not started within 24 hours + priority: 100, // Higher priority (time offset in seconds) + tags: ["urgent", "user_123"], + metadata: { source: "api", version: "v2" }, + queue: { + name: "priority-queue", + concurrencyLimit: 10, + }, + idempotencyKey: "unique-operation-id", + idempotencyKeyTTL: "1h", + machine: { preset: "large-1x" }, + maxAttempts: 5, +}); +``` + +## Hidden Tasks + +```ts +// Hidden task - not exported, only used internally +const internalProcessor = task({ + id: "internal-processor", + run: async (payload: { data: string }) => { + return { processed: payload.data.toUpperCase() }; + }, +}); + +// Public task that uses hidden task +export const publicWorkflow = task({ + id: "public-workflow", + run: async (payload: { input: string }) => { + // Use hidden task internally + const result = await internalProcessor.triggerAndWait({ + data: payload.input, + }); + + if (result.ok) { + return { output: result.output.processed }; + } + + throw new Error("Internal processing failed"); + }, +}); +``` + +## Logging & Tracing + +```ts +import { task, logger } from "@trigger.dev/sdk"; + +export const tracedTask = task({ + id: "traced-task", + run: async (payload, { ctx }) => { + logger.info("Task started", { userId: payload.userId }); + + // Custom trace with attributes + const user = await logger.trace( + "fetch-user", + async (span) => { + span.setAttribute("user.id", payload.userId); + span.setAttribute("operation", "database-fetch"); + + const userData = await database.findUser(payload.userId); + span.setAttribute("user.found", !!userData); + + return userData; + }, + { userId: payload.userId } + ); + + logger.debug("User fetched", { user: user.id }); + + try { + const result = await processUser(user); + logger.info("Processing completed", { result }); + return result; + } catch (error) { + logger.error("Processing failed", { + error: error.message, + userId: payload.userId, + }); + throw error; + } + }, +}); +``` + +## Usage Monitoring + +```ts +import { task, usage } from "@trigger.dev/sdk"; + +export const monitoredTask = task({ + id: "monitored-task", + run: async (payload) => { + // Get current run cost + const currentUsage = await usage.getCurrent(); + logger.info("Current cost", { + costInCents: currentUsage.costInCents, + durationMs: currentUsage.durationMs, + }); + + // Measure specific operation + const { result, compute } = await usage.measure(async () => { + return await expensiveOperation(payload); + }); + + logger.info("Operation cost", { + costInCents: compute.costInCents, + durationMs: compute.durationMs, + }); + + return result; + }, +}); +``` + +## Run Management + +```ts +// Cancel runs +await runs.cancel("run_123"); + +// Replay runs with same payload +await runs.replay("run_123"); + +// Retrieve run with cost details +const run = await runs.retrieve("run_123"); +console.log(`Cost: ${run.costInCents} cents, Duration: ${run.durationMs}ms`); +``` + +## Best Practices + +- **Concurrency**: Use queues to prevent overwhelming external services +- **Retries**: Configure exponential backoff for transient failures +- **Idempotency**: Always use for payment/critical operations +- **Metadata**: Track progress for long-running tasks +- **Machines**: Match machine size to computational requirements +- **Tags**: Use consistent naming patterns for filtering +- **Large Payloads**: Use external storage for files > 10MB +- **Error Handling**: Distinguish between retryable and fatal errors + +Design tasks to be stateless, idempotent, and resilient to failures. Use metadata for state tracking and queues for resource management. diff --git a/rules/4.0.0/basic-tasks.md b/rules/4.0.0/basic-tasks.md new file mode 100644 index 0000000000..6e30ff1c71 --- /dev/null +++ b/rules/4.0.0/basic-tasks.md @@ -0,0 +1,185 @@ +# Trigger.dev Basic Tasks (v4) + +**MUST use `@trigger.dev/sdk` (v4), NEVER `client.defineJob`** + +## Basic Task + +```ts +import { task } from "@trigger.dev/sdk"; + +export const processData = task({ + id: "process-data", + retry: { + maxAttempts: 10, + factor: 1.8, + minTimeoutInMs: 500, + maxTimeoutInMs: 30_000, + randomize: false, + }, + run: async (payload: { userId: string; data: any[] }) => { + // Task logic - runs for long time, no timeouts + console.log(`Processing ${payload.data.length} items for user ${payload.userId}`); + return { processed: payload.data.length }; + }, +}); +``` + +## Schema Task (with validation) + +```ts +import { schemaTask } from "@trigger.dev/sdk"; +import { z } from "zod"; + +export const validatedTask = schemaTask({ + id: "validated-task", + schema: z.object({ + name: z.string(), + age: z.number(), + email: z.string().email(), + }), + run: async (payload) => { + // Payload is automatically validated and typed + return { message: `Hello ${payload.name}, age ${payload.age}` }; + }, +}); +``` + +## Scheduled Task + +```ts +import { schedules } from "@trigger.dev/sdk"; + +const dailyReport = schedules.task({ + id: "daily-report", + cron: "0 9 * * *", // Daily at 9:00 AM UTC + // or with timezone: cron: { pattern: "0 9 * * *", timezone: "America/New_York" }, + run: async (payload) => { + console.log("Scheduled run at:", payload.timestamp); + console.log("Last run was:", payload.lastTimestamp); + console.log("Next 5 runs:", payload.upcoming); + + // Generate daily report logic + return { reportGenerated: true, date: payload.timestamp }; + }, +}); +``` + +## Triggering Tasks + +### From Backend Code + +```ts +import { tasks } from "@trigger.dev/sdk"; +import type { processData } from "./trigger/tasks"; + +// Single trigger +const handle = await tasks.trigger("process-data", { + userId: "123", + data: [{ id: 1 }, { id: 2 }], +}); + +// Batch trigger +const batchHandle = await tasks.batchTrigger("process-data", [ + { payload: { userId: "123", data: [{ id: 1 }] } }, + { payload: { userId: "456", data: [{ id: 2 }] } }, +]); +``` + +### From Inside Tasks (with Result handling) + +```ts +export const parentTask = task({ + id: "parent-task", + run: async (payload) => { + // Trigger and continue + const handle = await childTask.trigger({ data: "value" }); + + // Trigger and wait - returns Result object, NOT task output + const result = await childTask.triggerAndWait({ data: "value" }); + if (result.ok) { + console.log("Task output:", result.output); // Actual task return value + } else { + console.error("Task failed:", result.error); + } + + // Quick unwrap (throws on error) + const output = await childTask.triggerAndWait({ data: "value" }).unwrap(); + + // Batch trigger and wait + const results = await childTask.batchTriggerAndWait([ + { payload: { data: "item1" } }, + { payload: { data: "item2" } }, + ]); + + for (const run of results) { + if (run.ok) { + console.log("Success:", run.output); + } else { + console.log("Failed:", run.error); + } + } + }, +}); + +export const childTask = task({ + id: "child-task", + run: async (payload: { data: string }) => { + return { processed: payload.data }; + }, +}); +``` + +> Never wrap triggerAndWait or batchTriggerAndWait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks. + +## Waits + +```ts +import { task, wait } from "@trigger.dev/sdk"; + +export const taskWithWaits = task({ + id: "task-with-waits", + run: async (payload) => { + console.log("Starting task"); + + // Wait for specific duration + await wait.for({ seconds: 30 }); + await wait.for({ minutes: 5 }); + await wait.for({ hours: 1 }); + await wait.for({ days: 1 }); + + // Wait until specific date + await wait.until({ date: new Date("2024-12-25") }); + + // Wait for token (from external system) + await wait.forToken({ + token: "user-approval-token", + timeoutInSeconds: 3600, // 1 hour timeout + }); + + console.log("All waits completed"); + return { status: "completed" }; + }, +}); +``` + +> Never wrap wait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks. + +## Key Points + +- **Result vs Output**: `triggerAndWait()` returns a `Result` object with `ok`, `output`, `error` properties - NOT the direct task output +- **Type safety**: Use `import type` for task references when triggering from backend +- **Waits > 5 seconds**: Automatically checkpointed, don't count toward compute usage + +## NEVER Use (v2 deprecated) + +```ts +// BREAKS APPLICATION +client.defineJob({ + id: "job-id", + run: async (payload, io) => { + /* ... */ + }, +}); +``` + +Use v4 SDK (`@trigger.dev/sdk`), check `result.ok` before accessing `result.output` diff --git a/rules/4.0.0/claude-code-agent.md b/rules/4.0.0/claude-code-agent.md new file mode 100644 index 0000000000..db3663e97e --- /dev/null +++ b/rules/4.0.0/claude-code-agent.md @@ -0,0 +1,238 @@ +--- +name: trigger-dev-expert +description: Use this agent when you need to design, implement, or optimize background jobs and workflows using Trigger.dev framework. This includes creating reliable async tasks, implementing AI workflows, setting up scheduled jobs, structuring complex task hierarchies with subtasks, configuring build extensions for tools like ffmpeg or Puppeteer/Playwright, and handling task schemas with Zod validation. The agent excels at architecting scalable background job solutions with proper error handling, retries, and monitoring.\n\nExamples:\n- \n Context: User needs to create a background job for processing video files\n user: "I need to create a task that processes uploaded videos, extracts thumbnails, and transcodes them"\n assistant: "I'll use the trigger-dev-expert agent to design a robust video processing workflow with proper task structure and ffmpeg configuration"\n \n Since this involves creating background tasks with media processing, the trigger-dev-expert agent is ideal for structuring the workflow and configuring build extensions.\n \n\n- \n Context: User wants to implement a scheduled data sync task\n user: "Create a scheduled task that runs every hour to sync data from our API to the database"\n assistant: "Let me use the trigger-dev-expert agent to create a properly structured scheduled task with error handling"\n \n The user needs a scheduled background task, which is a core Trigger.dev feature that the expert agent specializes in.\n \n\n- \n Context: User needs help with task orchestration\n user: "I have a complex workflow where I need to run multiple AI models in sequence and parallel, how should I structure this?"\n assistant: "I'll engage the trigger-dev-expert agent to architect an efficient task hierarchy using triggerAndWait and batchTriggerAndWait patterns"\n \n Complex task orchestration with subtasks is a specialty of the trigger-dev-expert agent.\n \n +model: inherit +color: green +--- + +You are an elite Trigger.dev framework expert with deep knowledge of building production-grade background job systems. You specialize in designing reliable, scalable workflows using Trigger.dev's async-first architecture. Tasks deployed to Trigger.dev generally run in Node.js 21+ and use the `@trigger.dev/sdk` package, along with the `@trigger.dev/build` package for build extensions and the `trigger.dev` CLI package to run the `dev` server and `deploy` command. + +> Never use `node-fetch` in your code, use the `fetch` function that's built into Node.js. + +## Design Principles + +When creating Trigger.dev solutions, you will: + +- Use the `@trigger.dev/sdk` package to create tasks, ideally using the `schemaTask` function and passing in a Zod or other schema validation library schema to the `schema` property so the task payload can be validated and automatically typed. +- Break complex workflows into subtasks that can be independently retried and made idempotent, but don't overly complicate your tasks with too many subtasks. Sometimes the correct approach is to NOT use a subtask and do things like await Promise.allSettled to do work in parallel so save on costs, as each task gets it's own dedicated process and is charged by the millisecond. +- Always configure the `retry` property in the task definition to set the maximum number of retries, the delay between retries, and the backoff factor. Don't retry too much unless absolutely necessary. +- When triggering a task from inside another task, consider whether to use the `triggerAndWait`/`batchTriggerAndWait` pattern or just the `trigger`/`batchTrigger` function. Use the "andWait" variants when the parent task needs the results of the child task. +- When triggering a task, especially from inside another task, always consider whether to pass the `idempotencyKey` property to the `options` argument. This is especially important when inside another task and that task can be retried and you don't want to redo the work in children tasks (whether waiting for the results or not). +- Use the `logger` system in Trigger.dev to log useful messages at key execution points. +- Group subtasks that are only used from a single other task into the same file as the parent task, and don't export them. + +> Important: Never wrap triggerAndWait or batchTriggerAndWait calls in a Promise.all or Promise.allSettled as this is not supported in Trigger.dev tasks. + +## Triggering tasks + +When triggering a task from outside of a task, like for instance from an API handler in a Next.js route, you will use the `tasks.trigger` function and do a type only import of the task instance, to prevent dependencies inside the task file from leaking into the API handler and possibly causing issues with the build. An example: + +```ts +import { tasks } from "@trigger.dev/sdk"; +import type { processData } from "./trigger/tasks"; + +const handle = await tasks.trigger("process-data", { + userId: "123", + data: [{ id: 1 }, { id: 2 }], +}); +``` + +When triggering tasks from inside another task, if the other task is in a different file, use the pattern above. If the task is in the same file, you can use the task instance directly like so: + +```ts +const handle = await processData.trigger({ + userId: "123", + data: [{ id: 1 }, { id: 2 }], +}); +``` + +There are a bunch of options you can pass as the second argument to the `trigger` or `triggerAndWait` functions that control behavior like the idempotency key, the machine preset, the timeout, and more: + +```ts +import { idempotencyKeys } from "@trigger.dev/sdk"; + +const handle = await processData.trigger( + { + userId: "123", + }, + { + delay: "1h", // Will delay the task by 1 hour + ttl: "10m", // Will automatically cancel the task if not dequeued within 10 minutes + idempotencyKey: await idempotencyKeys.create("my-idempotency-key"), + idempotencyKeyTTL: "1h", + queue: "my-queue", + machine: "small-1x", + maxAttempts: 3, + tags: ["my-tag"], + region: "us-east-1", + } +); +``` + +You can also pass these options when doing a batch trigger for each item: + +```ts +const batchHandle = await processData.batchTrigger([ + { + payload: { userId: "123" }, + options: { + idempotencyKey: await idempotencyKeys.create("my-idempotency-key-1"), + }, + }, + { + payload: { userId: "456" }, + options: { + idempotencyKey: await idempotencyKeys.create("my-idempotency-key-2"), + }, + }, +]); +``` + +When triggering a task without the "andWait" suffix, you will receive a `RunHandle` object that contains the `id` of the run. You can use this with various `runs` SDK functions to get the status of the run, cancel it, etc. + +```ts +import { runs } from "@trigger.dev/sdk"; + +const handle = await processData.trigger({ + userId: "123", +}); + +const run = await runs.retrieve(handle.id); +``` + +When triggering a task with the "andWait" suffix, you will receive a Result type object that contains the result of the task and the output. Before accessing the output, you need to check the `ok` property to see if the task was successful: + +```ts +const result = await processData.triggerAndWait({ + userId: "123", +}); + +if (result.ok) { + const output = result.output; +} else { + const error = result.error; +} + +// Or you can unwrap the result and access the output directly, if the task was not successful, the unwrap will throw an error +const unwrappedOutput = await processData + .triggerAndWait({ + userId: "123", + }) + .unwrap(); + +const batchResult = await processData.batchTriggerAndWait([ + { payload: { userId: "123" } }, + { payload: { userId: "456" } }, +]); + +for (const run of batchResult.runs) { + if (run.ok) { + const output = run.output; + } else { + const error = run.error; + } +} +``` + +## Idempotency keys + +Any time you trigger a task inside another task, you should consider passing an idempotency key to the options argument using the `idempotencyKeys.create` function. This will ensure that the task is only triggered once per task run, even if the parent task is retried. If you want the idempotency key to be scoped globally instead of per task run, you can just pass a string instead of an idempotency key object: + +```ts +const idempotencyKey = await idempotencyKeys.create("my-idempotency-key"); + +const handle = await processData.trigger( + { + userId: "123", + }, + { + idempotencyKey, // Scoped to the current run, across retries + } +); + +const handle = await processData.trigger( + { + userId: "123", + }, + { + idempotencyKey: "my-idempotency-key", // Scoped across all runs + } +); +``` + +Idempotency keys are always also scoped to the task identifier of the task being triggered. This means you can use the same idempotency key for different tasks, and they will not conflict with each other. + +## Machine Presets + +- The default machine preset is `small-1x` which is a 0.5vCPU and 0.5GB of memory. +- The default machine preset can be overridden in the trigger.config.ts file by setting the `machine` property. +- The machine preset for a specific task can be overridden in the task definition by setting the `machine` property. +- You can set the machine preset at trigger time by passing in the `machine` property in the options argument to any of the trigger functions. + +| Preset | vCPU | Memory | Disk space | +| :----------------- | :--- | :----- | :--------- | +| micro | 0.25 | 0.25 | 10GB | +| small-1x (default) | 0.5 | 0.5 | 10GB | +| small-2x | 1 | 1 | 10GB | +| medium-1x | 1 | 2 | 10GB | +| medium-2x | 2 | 4 | 10GB | +| large-1x | 4 | 8 | 10GB | +| large-2x | 8 | 16 | 10GB | + +## Configuration Expertise + +When setting up Trigger.dev projects, you will configure the `trigger.config.ts` file with the following if needed: + +- Build extensions for tools like ffmpeg, Puppeteer, Playwright, and other binary dependencies. An example: + +```ts +import { defineConfig } from "@trigger.dev/sdk"; +import { playwright } from "@trigger.dev/build/extensions/playwright"; +import { ffmpeg, aptGet, additionalFiles } from "@trigger.dev/build/extensions/core"; +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; +import { pythonExtension } from "@trigger.dev/python/extension"; +import { lightpanda } from "@trigger.dev/build/extensions/lightpanda"; +import { esbuildPlugin } from "@trigger.dev/build/extensions"; +import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; + +export default defineConfig({ + project: "", + machine: "small-1x", // optional, default is small-1x + build: { + extensions: [ + playwright(), + ffmpeg(), + aptGet({ packages: ["curl"] }), + prismaExtension({ + version: "5.19.0", // optional, we'll automatically detect the version if not provided + schema: "prisma/schema.prisma", + }), + pythonExtension(), + lightpanda(), + esbuildPlugin( + sentryEsbuildPlugin({ + org: process.env.SENTRY_ORG, + project: process.env.SENTRY_PROJECT, + authToken: process.env.SENTRY_AUTH_TOKEN, + }), + // optional - only runs during the deploy command, and adds the plugin to the end of the list of plugins + { placement: "last", target: "deploy" } + ), + ], + }, +}); +``` + +- Default retry settings for tasks +- Default machine preset + +## Code Quality Standards + +You will produce code that: + +- Uses modern TypeScript with strict type checking +- When catching errors, remember that the type of the error is `unknown` and you need to check `error instanceof Error` to see if it's a real error instance +- Follows Trigger.dev's recommended project structure +- Don't go overboard with error handling +- Write some inline documentation for complex logic +- Uses descriptive task IDs following the pattern: 'domain.action.target' diff --git a/rules/4.0.0/config.md b/rules/4.0.0/config.md new file mode 100644 index 0000000000..33b68554f1 --- /dev/null +++ b/rules/4.0.0/config.md @@ -0,0 +1,346 @@ +# Trigger.dev Configuration (v4) + +**Complete guide to configuring `trigger.config.ts` with build extensions** + +## Basic Configuration + +```ts +import { defineConfig } from "@trigger.dev/sdk"; + +export default defineConfig({ + project: "", // Required: Your project reference + dirs: ["./trigger"], // Task directories + runtime: "node", // "node", "node-22", or "bun" + logLevel: "info", // "debug", "info", "warn", "error" + + // Default retry settings + retries: { + enabledInDev: false, + default: { + maxAttempts: 3, + minTimeoutInMs: 1000, + maxTimeoutInMs: 10000, + factor: 2, + randomize: true, + }, + }, + + // Build configuration + build: { + autoDetectExternal: true, + keepNames: true, + minify: false, + extensions: [], // Build extensions go here + }, + + // Global lifecycle hooks + onStart: async ({ payload, ctx }) => { + console.log("Global task start"); + }, + onSuccess: async ({ payload, output, ctx }) => { + console.log("Global task success"); + }, + onFailure: async ({ payload, error, ctx }) => { + console.log("Global task failure"); + }, +}); +``` + +## Build Extensions + +### Database & ORM + +#### Prisma + +```ts +import { prismaExtension } from "@trigger.dev/build/extensions/prisma"; + +extensions: [ + prismaExtension({ + schema: "prisma/schema.prisma", + version: "5.19.0", // Optional: specify version + migrate: true, // Run migrations during build + directUrlEnvVarName: "DIRECT_DATABASE_URL", + typedSql: true, // Enable TypedSQL support + }), +]; +``` + +#### TypeScript Decorators (for TypeORM) + +```ts +import { emitDecoratorMetadata } from "@trigger.dev/build/extensions/typescript"; + +extensions: [ + emitDecoratorMetadata(), // Enables decorator metadata +]; +``` + +### Scripting Languages + +#### Python + +```ts +import { pythonExtension } from "@trigger.dev/build/extensions/python"; + +extensions: [ + pythonExtension({ + scripts: ["./python/**/*.py"], // Copy Python files + requirementsFile: "./requirements.txt", // Install packages + devPythonBinaryPath: ".venv/bin/python", // Dev mode binary + }), +]; + +// Usage in tasks +const result = await python.runInline(`print("Hello, world!")`); +const output = await python.runScript("./python/script.py", ["arg1"]); +``` + +### Browser Automation + +#### Playwright + +```ts +import { playwright } from "@trigger.dev/build/extensions/playwright"; + +extensions: [ + playwright({ + browsers: ["chromium", "firefox", "webkit"], // Default: ["chromium"] + headless: true, // Default: true + }), +]; +``` + +#### Puppeteer + +```ts +import { puppeteer } from "@trigger.dev/build/extensions/puppeteer"; + +extensions: [puppeteer()]; + +// Environment variable needed: +// PUPPETEER_EXECUTABLE_PATH: "/usr/bin/google-chrome-stable" +``` + +#### Lightpanda + +```ts +import { lightpanda } from "@trigger.dev/build/extensions/lightpanda"; + +extensions: [ + lightpanda({ + version: "latest", // or "nightly" + disableTelemetry: false, + }), +]; +``` + +### Media Processing + +#### FFmpeg + +```ts +import { ffmpeg } from "@trigger.dev/build/extensions/core"; + +extensions: [ + ffmpeg({ version: "7" }), // Static build, or omit for Debian version +]; + +// Automatically sets FFMPEG_PATH and FFPROBE_PATH +// Add fluent-ffmpeg to external packages if using +``` + +#### Audio Waveform + +```ts +import { audioWaveform } from "@trigger.dev/build/extensions/audioWaveform"; + +extensions: [ + audioWaveform(), // Installs Audio Waveform 1.1.0 +]; +``` + +### System & Package Management + +#### System Packages (apt-get) + +```ts +import { aptGet } from "@trigger.dev/build/extensions/core"; + +extensions: [ + aptGet({ + packages: ["ffmpeg", "imagemagick", "curl=7.68.0-1"], // Can specify versions + }), +]; +``` + +#### Additional NPM Packages + +Only use this for installing CLI tools, NOT packages you import in your code. + +```ts +import { additionalPackages } from "@trigger.dev/build/extensions/core"; + +extensions: [ + additionalPackages({ + packages: ["wrangler"], // CLI tools and specific versions + }), +]; +``` + +#### Additional Files + +```ts +import { additionalFiles } from "@trigger.dev/build/extensions/core"; + +extensions: [ + additionalFiles({ + files: ["wrangler.toml", "./assets/**", "./fonts/**"], // Glob patterns supported + }), +]; +``` + +### Environment & Build Tools + +#### Environment Variable Sync + +```ts +import { syncEnvVars } from "@trigger.dev/build/extensions/core"; + +extensions: [ + syncEnvVars(async (ctx) => { + // ctx contains: environment, projectRef, env + return [ + { name: "SECRET_KEY", value: await getSecret(ctx.environment) }, + { name: "API_URL", value: ctx.environment === "prod" ? "api.prod.com" : "api.dev.com" }, + ]; + }), +]; +``` + +#### ESBuild Plugins + +```ts +import { esbuildPlugin } from "@trigger.dev/build/extensions"; +import { sentryEsbuildPlugin } from "@sentry/esbuild-plugin"; + +extensions: [ + esbuildPlugin( + sentryEsbuildPlugin({ + org: process.env.SENTRY_ORG, + project: process.env.SENTRY_PROJECT, + authToken: process.env.SENTRY_AUTH_TOKEN, + }), + { placement: "last", target: "deploy" } // Optional config + ), +]; +``` + +## Custom Build Extensions + +```ts +import { defineConfig } from "@trigger.dev/sdk"; + +const customExtension = { + name: "my-custom-extension", + + externalsForTarget: (target) => { + return ["some-native-module"]; // Add external dependencies + }, + + onBuildStart: async (context) => { + console.log(`Build starting for ${context.target}`); + // Register esbuild plugins, modify build context + }, + + onBuildComplete: async (context, manifest) => { + console.log("Build complete, adding layers"); + // Add build layers, modify deployment + context.addLayer({ + id: "my-layer", + files: [{ source: "./custom-file", destination: "/app/custom" }], + commands: ["chmod +x /app/custom"], + }); + }, +}; + +export default defineConfig({ + project: "my-project", + build: { + extensions: [customExtension], + }, +}); +``` + +## Advanced Configuration + +### Telemetry + +```ts +import { PrismaInstrumentation } from "@prisma/instrumentation"; +import { OpenAIInstrumentation } from "@langfuse/openai"; + +export default defineConfig({ + // ... other config + telemetry: { + instrumentations: [new PrismaInstrumentation(), new OpenAIInstrumentation()], + exporters: [customExporter], // Optional custom exporters + }, +}); +``` + +### Machine & Performance + +```ts +export default defineConfig({ + // ... other config + defaultMachine: "large-1x", // Default machine for all tasks + maxDuration: 300, // Default max duration (seconds) + enableConsoleLogging: true, // Console logging in development +}); +``` + +## Common Extension Combinations + +### Full-Stack Web App + +```ts +extensions: [ + prismaExtension({ schema: "prisma/schema.prisma", migrate: true }), + additionalFiles({ files: ["./public/**", "./assets/**"] }), + syncEnvVars(async (ctx) => [...envVars]), +]; +``` + +### AI/ML Processing + +```ts +extensions: [ + pythonExtension({ + scripts: ["./ai/**/*.py"], + requirementsFile: "./requirements.txt", + }), + ffmpeg({ version: "7" }), + additionalPackages({ packages: ["wrangler"] }), +]; +``` + +### Web Scraping + +```ts +extensions: [ + playwright({ browsers: ["chromium"] }), + puppeteer(), + additionalFiles({ files: ["./selectors.json", "./proxies.txt"] }), +]; +``` + +## Best Practices + +- **Use specific versions**: Pin extension versions for reproducible builds +- **External packages**: Add modules with native addons to the `build.external` array +- **Environment sync**: Use `syncEnvVars` for dynamic secrets +- **File paths**: Use glob patterns for flexible file inclusion +- **Debug builds**: Use `--log-level debug --dry-run` for troubleshooting + +Extensions only affect deployment, not local development. Use `external` array for packages that shouldn't be bundled. diff --git a/rules/4.0.0/realtime.md b/rules/4.0.0/realtime.md new file mode 100644 index 0000000000..24cbb9aac0 --- /dev/null +++ b/rules/4.0.0/realtime.md @@ -0,0 +1,272 @@ +# Trigger.dev Realtime (v4) + +**Real-time monitoring and updates for runs** + +## Core Concepts + +Realtime allows you to: + +- Subscribe to run status changes, metadata updates, and streams +- Build real-time dashboards and UI updates +- Monitor task progress from frontend and backend + +## Authentication + +### Public Access Tokens + +```ts +import { auth } from "@trigger.dev/sdk"; + +// Read-only token for specific runs +const publicToken = await auth.createPublicToken({ + scopes: { + read: { + runs: ["run_123", "run_456"], + tasks: ["my-task-1", "my-task-2"], + }, + }, + expirationTime: "1h", // Default: 15 minutes +}); +``` + +### Trigger Tokens (Frontend only) + +```ts +// Single-use token for triggering tasks +const triggerToken = await auth.createTriggerPublicToken("my-task", { + expirationTime: "30m", +}); +``` + +## Backend Usage + +### Subscribe to Runs + +```ts +import { runs, tasks } from "@trigger.dev/sdk"; + +// Trigger and subscribe +const handle = await tasks.trigger("my-task", { data: "value" }); + +// Subscribe to specific run +for await (const run of runs.subscribeToRun(handle.id)) { + console.log(`Status: ${run.status}, Progress: ${run.metadata?.progress}`); + if (run.status === "COMPLETED") break; +} + +// Subscribe to runs with tag +for await (const run of runs.subscribeToRunsWithTag("user-123")) { + console.log(`Tagged run ${run.id}: ${run.status}`); +} + +// Subscribe to batch +for await (const run of runs.subscribeToBatch(batchId)) { + console.log(`Batch run ${run.id}: ${run.status}`); +} +``` + +### Streams + +```ts +import { task, metadata } from "@trigger.dev/sdk"; + +// Task that streams data +export type STREAMS = { + openai: OpenAI.ChatCompletionChunk; +}; + +export const streamingTask = task({ + id: "streaming-task", + run: async (payload) => { + const completion = await openai.chat.completions.create({ + model: "gpt-4", + messages: [{ role: "user", content: payload.prompt }], + stream: true, + }); + + // Register stream + const stream = await metadata.stream("openai", completion); + + let text = ""; + for await (const chunk of stream) { + text += chunk.choices[0]?.delta?.content || ""; + } + + return { text }; + }, +}); + +// Subscribe to streams +for await (const part of runs.subscribeToRun(runId).withStreams()) { + switch (part.type) { + case "run": + console.log("Run update:", part.run.status); + break; + case "openai": + console.log("Stream chunk:", part.chunk); + break; + } +} +``` + +## React Frontend Usage + +### Installation + +```bash +npm add @trigger.dev/react-hooks +``` + +### Triggering Tasks + +```tsx +"use client"; +import { useTaskTrigger, useRealtimeTaskTrigger } from "@trigger.dev/react-hooks"; +import type { myTask } from "../trigger/tasks"; + +function TriggerComponent({ accessToken }: { accessToken: string }) { + // Basic trigger + const { submit, handle, isLoading } = useTaskTrigger("my-task", { + accessToken, + }); + + // Trigger with realtime updates + const { + submit: realtimeSubmit, + run, + isLoading: isRealtimeLoading, + } = useRealtimeTaskTrigger("my-task", { accessToken }); + + return ( +
+ + + + + {run &&
Status: {run.status}
} +
+ ); +} +``` + +### Subscribing to Runs + +```tsx +"use client"; +import { useRealtimeRun, useRealtimeRunsWithTag } from "@trigger.dev/react-hooks"; +import type { myTask } from "../trigger/tasks"; + +function SubscribeComponent({ runId, accessToken }: { runId: string; accessToken: string }) { + // Subscribe to specific run + const { run, error } = useRealtimeRun(runId, { + accessToken, + onComplete: (run) => { + console.log("Task completed:", run.output); + }, + }); + + // Subscribe to tagged runs + const { runs } = useRealtimeRunsWithTag("user-123", { accessToken }); + + if (error) return
Error: {error.message}
; + if (!run) return
Loading...
; + + return ( +
+
Status: {run.status}
+
Progress: {run.metadata?.progress || 0}%
+ {run.output &&
Result: {JSON.stringify(run.output)}
} + +

Tagged Runs:

+ {runs.map((r) => ( +
+ {r.id}: {r.status} +
+ ))} +
+ ); +} +``` + +### Streams with React + +```tsx +"use client"; +import { useRealtimeRunWithStreams } from "@trigger.dev/react-hooks"; +import type { streamingTask, STREAMS } from "../trigger/tasks"; + +function StreamComponent({ runId, accessToken }: { runId: string; accessToken: string }) { + const { run, streams } = useRealtimeRunWithStreams(runId, { + accessToken, + }); + + const text = streams.openai + .filter((chunk) => chunk.choices[0]?.delta?.content) + .map((chunk) => chunk.choices[0].delta.content) + .join(""); + + return ( +
+
Status: {run?.status}
+
Streamed Text: {text}
+
+ ); +} +``` + +### Wait Tokens + +```tsx +"use client"; +import { useWaitToken } from "@trigger.dev/react-hooks"; + +function WaitTokenComponent({ tokenId, accessToken }: { tokenId: string; accessToken: string }) { + const { complete } = useWaitToken(tokenId, { accessToken }); + + return ; +} +``` + +### SWR Hooks (Fetch Once) + +```tsx +"use client"; +import { useRun } from "@trigger.dev/react-hooks"; +import type { myTask } from "../trigger/tasks"; + +function SWRComponent({ runId, accessToken }: { runId: string; accessToken: string }) { + const { run, error, isLoading } = useRun(runId, { + accessToken, + refreshInterval: 0, // Disable polling (recommended) + }); + + if (isLoading) return
Loading...
; + if (error) return
Error: {error.message}
; + + return
Run: {run?.status}
; +} +``` + +## Run Object Properties + +Key properties available in run subscriptions: + +- `id`: Unique run identifier +- `status`: `QUEUED`, `EXECUTING`, `COMPLETED`, `FAILED`, `CANCELED`, etc. +- `payload`: Task input data (typed) +- `output`: Task result (typed, when completed) +- `metadata`: Real-time updatable data +- `createdAt`, `updatedAt`: Timestamps +- `costInCents`: Execution cost + +## Best Practices + +- **Use Realtime over SWR**: Recommended for most use cases due to rate limits +- **Scope tokens properly**: Only grant necessary read/trigger permissions +- **Handle errors**: Always check for errors in hooks and subscriptions +- **Type safety**: Use task types for proper payload/output typing +- **Cleanup subscriptions**: Backend subscriptions auto-complete, frontend hooks auto-cleanup diff --git a/rules/4.0.0/scheduled-tasks.md b/rules/4.0.0/scheduled-tasks.md new file mode 100644 index 0000000000..7d46a45ad9 --- /dev/null +++ b/rules/4.0.0/scheduled-tasks.md @@ -0,0 +1,117 @@ +# Scheduled tasks (cron) + +Recurring tasks using cron. For one-off future runs, use the **delay** option. + +## Define a scheduled task + +```ts +import { schedules } from "@trigger.dev/sdk"; + +export const task = schedules.task({ + id: "first-scheduled-task", + run: async (payload) => { + payload.timestamp; // Date (scheduled time, UTC) + payload.lastTimestamp; // Date | undefined + payload.timezone; // IANA, e.g. "America/New_York" (default "UTC") + payload.scheduleId; // string + payload.externalId; // string | undefined + payload.upcoming; // Date[] + + payload.timestamp.toLocaleString("en-US", { timeZone: payload.timezone }); + }, +}); +``` + +> Scheduled tasks need at least one schedule attached to run. + +## Attach schedules + +**Declarative (sync on dev/deploy):** + +```ts +schedules.task({ + id: "every-2h", + cron: "0 */2 * * *", // UTC + run: async () => {}, +}); + +schedules.task({ + id: "tokyo-5am", + cron: { pattern: "0 5 * * *", timezone: "Asia/Tokyo", environments: ["PRODUCTION", "STAGING"] }, + run: async () => {}, +}); +``` + +**Imperative (SDK or dashboard):** + +```ts +await schedules.create({ + task: task.id, + cron: "0 0 * * *", + timezone: "America/New_York", // DST-aware + externalId: "user_123", + deduplicationKey: "user_123-daily", // updates if reused +}); +``` + +### Dynamic / multi-tenant example + +```ts +// /trigger/reminder.ts +export const reminderTask = schedules.task({ + id: "todo-reminder", + run: async (p) => { + if (!p.externalId) throw new Error("externalId is required"); + const user = await db.getUser(p.externalId); + await sendReminderEmail(user); + }, +}); +``` + +```ts +// app/reminders/route.ts +export async function POST(req: Request) { + const data = await req.json(); + return Response.json( + await schedules.create({ + task: reminderTask.id, + cron: "0 8 * * *", + timezone: data.timezone, + externalId: data.userId, + deduplicationKey: `${data.userId}-reminder`, + }) + ); +} +``` + +## Cron syntax (no seconds) + +``` +* * * * * +| | | | β”” day of week (0–7 or 1L–7L; 0/7=Sun; L=last) +| | | └── month (1–12) +| | └──── day of month (1–31 or L) +| └────── hour (0–23) +└──────── minute (0–59) +``` + +## When schedules won't trigger + +- **Dev:** only when the dev CLI is running. +- **Staging/Production:** only for tasks in the **latest deployment**. + +## SDK management (quick refs) + +```ts +await schedules.retrieve(id); +await schedules.list(); +await schedules.update(id, { cron: "0 0 1 * *", externalId: "ext", deduplicationKey: "key" }); +await schedules.deactivate(id); +await schedules.activate(id); +await schedules.del(id); +await schedules.timezones(); // list of IANA timezones +``` + +## Dashboard + +Create/attach schedules visually (Task, Cron pattern, Timezone, Optional: External ID, Dedup key, Environments). Test scheduled tasks from the **Test** page. diff --git a/rules/manifest.json b/rules/manifest.json new file mode 100644 index 0000000000..a5b205920d --- /dev/null +++ b/rules/manifest.json @@ -0,0 +1,56 @@ +{ + "name": "trigger.dev", + "description": "Trigger.dev coding agent rules", + "currentVersion": "4.0.0", + "versions": { + "4.0.0": { + "options": [ + { + "name": "basic", + "title": "Basic tasks", + "label": "Only the most important rules for writing basic Trigger.dev tasks", + "path": "4.0.0/basic-tasks.md", + "tokens": 1200 + }, + { + "name": "advanced-tasks", + "title": "Advanced tasks", + "label": "Comprehensive rules to help you write advanced Trigger.dev tasks", + "path": "4.0.0/advanced-tasks.md", + "tokens": 3000 + }, + { + "name": "config", + "title": "Configuring Trigger.dev", + "label": "Configure your Trigger.dev project with a trigger.config.ts file", + "path": "4.0.0/config.md", + "tokens": 1900, + "applyTo": "**/trigger.config.ts" + }, + { + "name": "scheduled-tasks", + "title": "Scheduled Tasks", + "label": "How to write and use scheduled Trigger.dev tasks", + "path": "4.0.0/scheduled-tasks.md", + "tokens": 780 + }, + { + "name": "realtime", + "title": "Realtime", + "label": "How to use realtime in your Trigger.dev tasks and your frontend", + "path": "4.0.0/realtime.md", + "tokens": 1700 + }, + { + "name": "claude-code-agent", + "title": "Claude Code Agent", + "label": "An expert Trigger.dev developer as a Claude Code subagent", + "path": "4.0.0/claude-code-agent.md", + "tokens": 2700, + "client": "claude-code", + "installStrategy": "claude-code-subagent" + } + ] + } + } +} \ No newline at end of file