diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 6e56826..6ad8e28 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -50,6 +50,9 @@ jobs: - name: Install Dependencies working-directory: ./backend run: npm ci + - name: Generate Prisma Client + working-directory: ./backend + run: npx prisma generate - name: Run Tests working-directory: ./backend run: NODE_OPTIONS="--experimental-vm-modules" npm test diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 01eab0b..a114f82 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,6 +20,9 @@ jobs: - name: Install Dependencies working-directory: ./backend run: npm ci + - name: Generate Prisma Client + working-directory: ./backend + run: npx prisma generate - name: Run Tests working-directory: ./backend run: NODE_OPTIONS="--experimental-vm-modules" npm test diff --git a/backend/.env.example b/backend/.env.example new file mode 100644 index 0000000..f0f03d8 --- /dev/null +++ b/backend/.env.example @@ -0,0 +1,12 @@ +# Database connection +# Preset to match the local docker-compose defaults +DATABASE_URL="postgresql://user:password@localhost:5432/workflow_db?schema=public" + +# Environment (development | production | test) +NODE_ENV="development" + +# Logging configuration +LOG_LEVEL="info" + +# Application Port +PORT=4000 \ No newline at end of file diff --git a/backend/prisma/migrations/20260207074430_simplify_id_to_int/migration.sql b/backend/prisma/migrations/20260207074430_simplify_id_to_int/migration.sql deleted file mode 100644 index 70d9f15..0000000 --- a/backend/prisma/migrations/20260207074430_simplify_id_to_int/migration.sql +++ /dev/null @@ -1,26 +0,0 @@ -/* - Warnings: - - - The primary key for the `workflow_runs` table will be changed. If it partially fails, the table could be left without primary key constraint. - - You are about to alter the column `id` on the `workflow_runs` table. The data in that column could be lost. The data in that column will be cast from `BigInt` to `Integer`. - - You are about to alter the column `workflow_id` on the `workflow_runs` table. The data in that column could be lost. The data in that column will be cast from `BigInt` to `Integer`. - - The primary key for the `workflows` table will be changed. If it partially fails, the table could be left without primary key constraint. - - You are about to alter the column `id` on the `workflows` table. The data in that column could be lost. The data in that column will be cast from `BigInt` to `Integer`. - -*/ --- DropForeignKey -ALTER TABLE "workflow_runs" DROP CONSTRAINT "workflow_runs_workflow_id_fkey"; - --- AlterTable -ALTER TABLE "workflow_runs" DROP CONSTRAINT "workflow_runs_pkey", -ALTER COLUMN "id" SET DATA TYPE INTEGER, -ALTER COLUMN "workflow_id" SET DATA TYPE INTEGER, -ADD CONSTRAINT "workflow_runs_pkey" PRIMARY KEY ("id"); - --- AlterTable -ALTER TABLE "workflows" DROP CONSTRAINT "workflows_pkey", -ALTER COLUMN "id" SET DATA TYPE INTEGER, -ADD CONSTRAINT "workflows_pkey" PRIMARY KEY ("id"); - --- AddForeignKey -ALTER TABLE "workflow_runs" ADD CONSTRAINT "workflow_runs_workflow_id_fkey" FOREIGN KEY ("workflow_id") REFERENCES "workflows"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/backend/prisma/migrations/20260206161216_init_schema_v2/migration.sql b/backend/prisma/migrations/20260207172252_init_schema/migration.sql similarity index 81% rename from backend/prisma/migrations/20260206161216_init_schema_v2/migration.sql rename to backend/prisma/migrations/20260207172252_init_schema/migration.sql index 7154d93..82e1ca3 100644 --- a/backend/prisma/migrations/20260206161216_init_schema_v2/migration.sql +++ b/backend/prisma/migrations/20260207172252_init_schema/migration.sql @@ -1,6 +1,9 @@ +-- CreateEnum +CREATE TYPE "RunStatus" AS ENUM ('SUCCESS', 'SKIPPED', 'FAILED'); + -- CreateTable CREATE TABLE "workflows" ( - "id" BIGSERIAL NOT NULL, + "id" SERIAL NOT NULL, "name" VARCHAR(255) NOT NULL, "enabled" BOOLEAN NOT NULL DEFAULT true, "trigger_path" VARCHAR(64) NOT NULL, @@ -13,9 +16,9 @@ CREATE TABLE "workflows" ( -- CreateTable CREATE TABLE "workflow_runs" ( - "id" BIGSERIAL NOT NULL, - "workflow_id" BIGINT NOT NULL, - "status" VARCHAR(20) NOT NULL, + "id" SERIAL NOT NULL, + "workflow_id" INTEGER NOT NULL, + "status" "RunStatus" NOT NULL, "start_time" TIMESTAMP(3) NOT NULL, "end_time" TIMESTAMP(3), "error_message" TEXT, diff --git a/backend/prisma/schema.prisma b/backend/prisma/schema.prisma index 571e6fd..ee090d7 100644 --- a/backend/prisma/schema.prisma +++ b/backend/prisma/schema.prisma @@ -7,6 +7,12 @@ datasource db { provider = "postgresql" } +enum RunStatus { + SUCCESS + SKIPPED + FAILED +} + model Workflow { id Int @id @default(autoincrement()) name String @db.VarChar(255) @@ -23,7 +29,7 @@ model Workflow { model WorkflowRun { id Int @id @default(autoincrement()) workflowId Int @map("workflow_id") - status String @db.VarChar(20) + status RunStatus startTime DateTime @map("start_time") endTime DateTime? @map("end_time") errorMessage String? @map("error_message") diff --git a/backend/scripts/validate-trigger.ts b/backend/scripts/validate-trigger.ts deleted file mode 100644 index 65362b2..0000000 --- a/backend/scripts/validate-trigger.ts +++ /dev/null @@ -1,62 +0,0 @@ -import axios from "axios"; - -const API_URL = "http://localhost:4000"; - -async function testTrigger() { - try { - // 1. Create a Workflow - console.log("Creating workflow..."); - const createRes = await axios.post(`${API_URL}/workflows`, { - name: "Test Trigger Workflow", - enabled: true, - steps: [ - { - type: "filter", - conditions: [{ path: "value", op: "eq", value: 100 }], - }, - { - type: "transform", - ops: [ - { op: "template", to: "message", template: "Value is {{value}}" }, - ], - }, - // Using a httpbin to echo back - { - type: "http_request", - method: "POST", - url: "https://httpbin.org/post", - headers: { "Content-Type": "application/json" }, - body: { - mode: "custom", - value: { - nestedInfo: "{{message}}", - }, - }, - timeoutMs: 5000, - retries: 2, - }, - ], - }); - - const workflow = createRes.data; - console.log("Workflow created:", workflow.id, workflow.triggerPath); - - // 2. Trigger Success - console.log("\nTriggering (Success Case)..."); - const triggerUrl = `${API_URL}/t/${workflow.triggerPath}`; - const successRes = await axios.post(triggerUrl, { value: 100 }); - console.log("Success Response:", successRes.data); - - // 3. Trigger Skipped - console.log("\nTriggering (Skipped Case)..."); - const skippedRes = await axios.post(triggerUrl, { value: 99 }); - console.log("Skipped Response:", skippedRes.data); - } catch (e: any) { - console.error("Test failed:", e.message); - if (e.response) { - console.error("Response:", e.response.data); - } - } -} - -testTrigger(); diff --git a/backend/src/app.ts b/backend/src/app.ts index bf30c3f..f6ef954 100644 --- a/backend/src/app.ts +++ b/backend/src/app.ts @@ -2,6 +2,11 @@ import express from "express"; import cors from "cors"; import { extendZodWithOpenApi } from "@asteasolutions/zod-to-openapi"; import { z } from "zod"; +import workflowRoutes from "./routes/workflow.routes"; +import triggerRoutes from "./routes/trigger.routes"; +import swaggerUi from "swagger-ui-express"; +import { openApiDocument } from "./docs/openapi"; +import { errorHandler } from "./middleware/error-handler"; extendZodWithOpenApi(z); @@ -15,16 +20,10 @@ app.get("/health", (req, res) => { res.json({ status: "ok", timestamp: new Date().toISOString() }); }); -import workflowRoutes from "./routes/workflow.routes"; -import triggerRoutes from "./routes/trigger.routes"; -import swaggerUi from "swagger-ui-express"; -import { openApiDocument } from "./docs/openapi"; - app.use("/api-docs", swaggerUi.serve, swaggerUi.setup(openApiDocument)); app.use("/api/workflows", workflowRoutes); app.use("/t", triggerRoutes); -import { errorHandler } from "./middleware/error-handler"; app.use(errorHandler); export default app; diff --git a/backend/src/lib/errors.ts b/backend/src/lib/errors.ts index 2d22d96..f1961c3 100644 --- a/backend/src/lib/errors.ts +++ b/backend/src/lib/errors.ts @@ -1,11 +1,9 @@ export class AppError extends Error { public readonly statusCode: number; - public readonly isOperational: boolean; constructor(message: string, statusCode: number) { super(message); this.statusCode = statusCode; - this.isOperational = true; Error.captureStackTrace(this, this.constructor); } diff --git a/backend/src/middleware/error-handler.ts b/backend/src/middleware/error-handler.ts index cf9c195..df4f63a 100644 --- a/backend/src/middleware/error-handler.ts +++ b/backend/src/middleware/error-handler.ts @@ -10,11 +10,10 @@ export const errorHandler: ErrorRequestHandler = ( let statusCode = err.statusCode || 500; let message = err.message || "Internal Server Error"; - // Log the error for internal tracking (could use a proper logger here) if (statusCode === 500) { logger.error( { err, req: { method: req.method, url: req.url, body: req.body } }, - `[Admin API Error]: ${message}`, + `[API Error]: ${message}`, ); } diff --git a/backend/src/repositories/workflow-run.repository.ts b/backend/src/repositories/workflow-run.repository.ts index f52ce73..07fd3e4 100644 --- a/backend/src/repositories/workflow-run.repository.ts +++ b/backend/src/repositories/workflow-run.repository.ts @@ -1,9 +1,9 @@ -import { PrismaClient } from "@prisma/client"; +import { PrismaClient, RunStatus } from "@prisma/client"; import prisma from "../lib/prisma"; export interface CreateRunDTO { workflowId: number; - status: "success" | "skipped" | "failed"; + status: RunStatus; startTime: Date; endTime?: Date; errorMessage?: string; @@ -11,7 +11,7 @@ export interface CreateRunDTO { } export interface UpdateRunDTO { - status?: "success" | "skipped" | "failed"; + status?: RunStatus; endTime?: Date; errorMessage?: string; failureMeta?: any; diff --git a/backend/src/schemas/workflow.schema.ts b/backend/src/schemas/workflow.schema.ts index c7c1dc9..b1f07f8 100644 --- a/backend/src/schemas/workflow.schema.ts +++ b/backend/src/schemas/workflow.schema.ts @@ -1,85 +1,99 @@ import { z } from "zod"; +import { + StepType, + FilterOperator, + TransformOperator, + HttpMethod, + HttpBodyMode, +} from "../types/enums"; export const FilterStepSchema = z.object({ - type: z.literal("filter"), - conditions: z.array( - z.object({ - path: z.string(), - op: z.enum(["eq", "neq"]), - value: z.any(), - }), - ), -}); - -export const LogStepSchema = z.object({ - type: z.literal("log"), - message: z.string(), + type: z.literal(StepType.FILTER), + conditions: z + .array( + z.object({ + path: z.string().min(1, { error: "Condition path is required" }), + op: z.enum(FilterOperator, { error: "Invalid filter operator" }), + value: z.unknown(), + }), + ) + .min(1, { error: "At least one condition is required" }), }); export const TransformStepSchema = z.object({ - type: z.literal("transform"), - ops: z.array( - z.union([ - z.object({ - op: z.literal("default"), - path: z.string(), - value: z.any(), - }), - z.object({ - op: z.literal("template"), - to: z.string(), - template: z.string(), - }), - z.object({ - op: z.literal("pick"), - paths: z.array(z.string()), - }), - ]), - ), + type: z.literal(StepType.TRANSFORM), + ops: z + .array( + z.discriminatedUnion("op", [ + z.object({ + op: z.literal(TransformOperator.DEFAULT), + path: z.string().min(1, { error: "Target path is required" }), + value: z.unknown(), + }), + z.object({ + op: z.literal(TransformOperator.TEMPLATE), + to: z.string().min(1, { error: "Target field 'to' is required" }), + template: z.string().min(1, { error: "Template string is required" }), + }), + z.object({ + op: z.literal(TransformOperator.PICK), + paths: z + .array(z.string()) + .min(1, { error: "At least one path to pick is required" }), + }), + ]), + ) + .min(1, { error: "At least one transformation operation is required" }), }); export const HttpRequestStepSchema = z.object({ - type: z.literal("http_request"), - method: z.enum(["GET", "POST", "PUT", "PATCH", "DELETE"]), - url: z.string().url(), + type: z.literal(StepType.HTTP_REQUEST), + method: z.enum(HttpMethod, { error: "Invalid HTTP method" }), + url: z.url({ error: "Invalid URL format" }), headers: z.record(z.string(), z.string()).optional(), body: z - .union([ - z.object({ mode: z.literal("ctx") }), + .discriminatedUnion("mode", [ + z.object({ mode: z.literal(HttpBodyMode.CTX) }), z.object({ - mode: z.literal("custom"), - value: z.record(z.string(), z.any()), + mode: z.literal(HttpBodyMode.CUSTOM), + value: z.record(z.string(), z.unknown()), }), ]) .optional(), timeoutMs: z.number().positive().default(2000), - retries: z.number().min(0).default(3), + retries: z + .number() + .min(0, { error: "Retries cannot be negative" }) + .default(3), }); -export const StepSchema = z.union([ +export const StepSchema = z.discriminatedUnion("type", [ FilterStepSchema, - LogStepSchema, TransformStepSchema, HttpRequestStepSchema, ]); export const CreateWorkflowSchema = z.object({ - name: z.string().min(1, "Workflow name is required"), + name: z.string().min(1, { error: "Workflow name is required" }), enabled: z.boolean().default(true), - steps: z.array(StepSchema).min(1, "Workflow must have at least one step"), + steps: z + .array(StepSchema) + .min(1, { error: "Workflow must have at least one step" }), }); export const UpdateWorkflowSchema = z.object({ - name: z.string().min(1, "Workflow name cannot be empty").optional(), + name: z + .string() + .min(1, { error: "Workflow name cannot be empty" }) + .optional(), enabled: z.boolean().optional(), steps: z .array(StepSchema) - .min(1, "Workflow must have at least one step") + .min(1, { error: "Workflow must have at least one step" }) .optional(), }); export type FilterStep = z.infer; -export type LogStep = z.infer; export type TransformStep = z.infer; export type HttpRequestStep = z.infer; export type WorkflowStep = z.infer; diff --git a/backend/src/services/__tests__/execution.service.test.ts b/backend/src/services/__tests__/execution.service.test.ts index 4a30fdb..4c1a02c 100644 --- a/backend/src/services/__tests__/execution.service.test.ts +++ b/backend/src/services/__tests__/execution.service.test.ts @@ -1,6 +1,13 @@ import { ExecutionService } from "../execution.service"; import { WorkflowRunRepository } from "../../repositories/workflow-run.repository"; import axios from "axios"; +import { RunStatus } from "@prisma/client"; +import { + StepType, + FilterOperator, + TransformOperator, + HttpMethod, +} from "../../types/enums"; jest.mock("../../repositories/workflow-run.repository"); jest.mock("axios"); @@ -24,7 +31,7 @@ describe("ExecutionService", () => { // Default mock for run creation mockRunRepository.create.mockResolvedValue({ id: 1, - status: "success", + status: RunStatus.SUCCESS, } as any); mockRunRepository.update.mockResolvedValue({ id: 1 } as any); }); @@ -33,7 +40,12 @@ describe("ExecutionService", () => { it("should execute a simple log step and succeed", async () => { const workflow = { id: 1, - steps: [{ type: "log", message: "Hello {{name}}" }], + steps: [ + { + type: StepType.TRANSFORM, + ops: [{ op: TransformOperator.DEFAULT, path: "test", value: 1 }], + }, + ], }; const payload = { name: "World" }; @@ -42,11 +54,11 @@ describe("ExecutionService", () => { payload, ); - expect(result.status).toBe("success"); + expect(result.status).toBe(RunStatus.SUCCESS); expect(mockRunRepository.create).toHaveBeenCalled(); expect(mockRunRepository.update).toHaveBeenCalledWith( 1, - expect.objectContaining({ status: "success" }), + expect.objectContaining({ status: RunStatus.SUCCESS }), ); }); @@ -55,10 +67,15 @@ describe("ExecutionService", () => { id: 1, steps: [ { - type: "filter", - conditions: [{ path: "age", op: "eq", value: 25 }], + type: StepType.FILTER, + conditions: [{ path: "age", op: FilterOperator.EQ, value: 25 }], + }, + { + type: StepType.TRANSFORM, + ops: [ + { op: TransformOperator.DEFAULT, path: "skipped", value: true }, + ], }, - { type: "log", message: "This should be skipped" }, ], }; const payload = { age: 30 }; @@ -68,10 +85,10 @@ describe("ExecutionService", () => { payload, ); - expect(result.status).toBe("skipped"); + expect(result.status).toBe(RunStatus.SKIPPED); expect(mockRunRepository.update).toHaveBeenCalledWith( 1, - expect.objectContaining({ status: "skipped" }), + expect.objectContaining({ status: RunStatus.SKIPPED }), ); }); @@ -80,8 +97,10 @@ describe("ExecutionService", () => { id: 1, steps: [ { - type: "transform", - ops: [{ op: "default", path: "city", value: "London" }], + type: StepType.TRANSFORM, + ops: [ + { op: TransformOperator.DEFAULT, path: "city", value: "London" }, + ], }, ], }; @@ -92,7 +111,7 @@ describe("ExecutionService", () => { payload, ); - expect(result.status).toBe("success"); + expect(result.status).toBe(RunStatus.SUCCESS); // Context is modified in place expect(payload).toEqual({ city: "London" }); }); @@ -102,8 +121,8 @@ describe("ExecutionService", () => { id: 1, steps: [ { - type: "http_request", - method: "GET", + type: StepType.HTTP_REQUEST, + method: HttpMethod.GET, url: "http://test.com", retries: 1, }, @@ -117,11 +136,11 @@ describe("ExecutionService", () => { {}, ); - expect(result.status).toBe("failed"); + expect(result.status).toBe(RunStatus.FAILED); expect(mockedAxios).toHaveBeenCalledTimes(2); // Initial try + 1 retry expect(mockRunRepository.update).toHaveBeenCalledWith( 1, - expect.objectContaining({ status: "failed" }), + expect.objectContaining({ status: RunStatus.FAILED }), ); }); }); @@ -136,10 +155,10 @@ describe("ExecutionService", () => { it("executeFilterStep should support eq and neq", () => { const step = { - type: "filter", + type: StepType.FILTER, conditions: [ - { path: "a", op: "eq", value: 1 }, - { path: "b", op: "neq", value: 2 }, + { path: "a", op: FilterOperator.EQ, value: 1 }, + { path: "b", op: FilterOperator.NEQ, value: 2 }, ], }; diff --git a/backend/src/services/execution.service.ts b/backend/src/services/execution.service.ts index ca1e88c..9934933 100644 --- a/backend/src/services/execution.service.ts +++ b/backend/src/services/execution.service.ts @@ -1,9 +1,8 @@ -import { Workflow } from "@prisma/client"; +import { Workflow, RunStatus } from "@prisma/client"; import { WorkflowRunRepository } from "../repositories/workflow-run.repository"; import { WorkflowStep, FilterStep, - LogStep, TransformStep, HttpRequestStep, } from "../schemas/workflow.schema"; @@ -11,6 +10,12 @@ import axios, { AxiosRequestConfig } from "axios"; import get from "lodash/get"; import set from "lodash/set"; import logger from "../lib/logger"; +import { + StepType, + FilterOperator, + TransformOperator, + HttpBodyMode, +} from "../types/enums"; export class ExecutionService { private runRepository: WorkflowRunRepository; @@ -26,7 +31,7 @@ export class ExecutionService { // 2. Create Workflow Run const run = await this.runRepository.create({ workflowId: workflow.id, - status: "success", // optimistic default, will update if fails/skips + status: RunStatus.SUCCESS, // optimistic default, will update if fails/skips startTime: new Date(), }); @@ -40,19 +45,19 @@ export class ExecutionService { if (!shouldContinue) { // Filter step returned false -> SKIPPED await this.runRepository.update(run.id, { - status: "skipped", + status: RunStatus.SKIPPED, endTime: new Date(), }); - return { runId: run.id.toString(), status: "skipped" }; + return { runId: run.id.toString(), status: RunStatus.SKIPPED }; } } // 4. Success await this.runRepository.update(run.id, { - status: "success", + status: RunStatus.SUCCESS, endTime: new Date(), }); - return { runId: run.id.toString(), status: "success" }; + return { runId: run.id.toString(), status: RunStatus.SUCCESS }; } catch (error: any) { // 5. Failure logger.error( @@ -72,7 +77,7 @@ export class ExecutionService { }; await this.runRepository.update(run.id, { - status: "failed", + status: RunStatus.FAILED, endTime: new Date(), errorMessage: error.message, failureMeta, @@ -80,7 +85,7 @@ export class ExecutionService { return { runId: run.id.toString(), - status: "failed", + status: RunStatus.FAILED, error: error.message, }; } @@ -88,15 +93,12 @@ export class ExecutionService { private async executeStep(step: WorkflowStep, ctx: any): Promise { switch (step.type) { - case "filter": + case StepType.FILTER: return this.executeFilterStep(step, ctx); - case "log": - this.executeLogStep(step, ctx); - return true; - case "transform": + case StepType.TRANSFORM: this.executeTransformStep(step, ctx); return true; - case "http_request": + case StepType.HTTP_REQUEST: await this.executeHttpStep(step, ctx); return true; default: @@ -109,35 +111,29 @@ export class ExecutionService { const value = get(ctx, condition.path); const target = condition.value; - if (condition.op === "eq" && value !== target) return false; - if (condition.op === "neq" && value === target) return false; + if (condition.op === FilterOperator.EQ && value !== target) return false; + if (condition.op === FilterOperator.NEQ && value === target) return false; } return true; } - private executeLogStep(step: LogStep, ctx: any): void { - const message = this.applyTemplate(step.message, ctx); - // Structured log - logger.info({ step: "log", message }, `[WORKFLOW_LOG] ${message}`); - } - private executeTransformStep(step: TransformStep, ctx: any): void { for (const op of step.ops) { switch (op.op) { - case "default": { + case TransformOperator.DEFAULT: { const current = get(ctx, op.path); if (current === undefined || current === null || current === "") { set(ctx, op.path, op.value); } break; } - case "template": { + case TransformOperator.TEMPLATE: { // generic template replacement {{var}} const processed = this.applyTemplate(op.template, ctx); set(ctx, op.to, processed); break; } - case "pick": { + case TransformOperator.PICK: { // pick creates a NEW object with only selected paths const newCtx = {}; for (const path of op.paths) { @@ -171,9 +167,9 @@ export class ExecutionService { } // Resolve Body - if (step.body?.mode === "ctx") { + if (step.body?.mode === HttpBodyMode.CTX) { body = ctx; - } else if (step.body?.mode === "custom") { + } else if (step.body?.mode === HttpBodyMode.CUSTOM) { body = this.resolveObjectTemplates(step.body.value, ctx); } diff --git a/backend/src/types/enums.ts b/backend/src/types/enums.ts new file mode 100644 index 0000000..4c3bc8e --- /dev/null +++ b/backend/src/types/enums.ts @@ -0,0 +1,29 @@ +export enum StepType { + FILTER = "filter", + TRANSFORM = "transform", + HTTP_REQUEST = "http_request", +} + +export enum FilterOperator { + EQ = "eq", + NEQ = "neq", +} + +export enum TransformOperator { + DEFAULT = "default", + TEMPLATE = "template", + PICK = "pick", +} + +export enum HttpBodyMode { + CTX = "ctx", + CUSTOM = "custom", +} + +export enum HttpMethod { + GET = "GET", + POST = "POST", + PUT = "PUT", + PATCH = "PATCH", + DELETE = "DELETE", +} diff --git a/frontend/.env.example b/frontend/.env.example new file mode 100644 index 0000000..5cccb44 --- /dev/null +++ b/frontend/.env.example @@ -0,0 +1,2 @@ +# Backend URL +NEXT_PUBLIC_API_URL=http://localhost:4000 \ No newline at end of file diff --git a/frontend/.gitignore b/frontend/.gitignore index 5ef6a52..e72b4d6 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -31,7 +31,7 @@ yarn-error.log* .pnpm-debug.log* # env files (can opt-in for committing if needed) -.env* +.env # vercel .vercel diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx index c908c06..5e2cd5f 100644 --- a/frontend/src/app/layout.tsx +++ b/frontend/src/app/layout.tsx @@ -1,5 +1,6 @@ import type { Metadata } from "next"; import "./globals.css"; +import { Toaster } from "sonner"; import QueryProvider from "@/providers/QueryProvider"; import { ThemeProvider } from "@/providers/ThemeProvider"; @@ -11,8 +12,6 @@ export const metadata: Metadata = { }, }; -import { Toaster } from "sonner"; - export default function RootLayout({ children, }: Readonly<{ diff --git a/frontend/src/components/WorkflowEditor.tsx b/frontend/src/components/WorkflowEditor.tsx index 1d53900..7f6277a 100644 --- a/frontend/src/components/WorkflowEditor.tsx +++ b/frontend/src/components/WorkflowEditor.tsx @@ -11,6 +11,13 @@ import { useTheme } from "@/providers/ThemeProvider"; import { toast } from "sonner"; import { handleError } from "@/lib/error-handler"; +import { + StepType, + TransformOperator, + HttpMethod, + HttpBodyMode, +} from "@/lib/enums"; + interface WorkflowEditorProps { workflow?: Workflow | null; onClose: () => void; @@ -38,22 +45,26 @@ export default function WorkflowEditor({ JSON.stringify( [ { - type: "transform", + type: StepType.TRANSFORM, ops: [ - { op: "default", path: "severity", value: "info" }, { - op: "template", + op: TransformOperator.DEFAULT, + path: "severity", + value: "info", + }, + { + op: TransformOperator.TEMPLATE, to: "msg", template: "Log: {{message}} ({{severity}})", }, ], }, { - type: "http_request", - method: "POST", + type: StepType.HTTP_REQUEST, + method: HttpMethod.POST, url: "https://webhook.site/REPLACE_WITH_YOUR_UUID", body: { - mode: "custom", + mode: HttpBodyMode.CUSTOM, value: { text: "{{msg}}" }, }, }, diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index e25195e..174fe8a 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -10,11 +10,7 @@ const API_BASE = `${process.env.NEXT_PUBLIC_API_URL}/api`; async function handleResponse(res: Response) { if (!res.ok) { const errorData = await res.json().catch(() => ({})); - // Backend returns { message: "..." } in our new error handler - // Older or other parts might return { error: "..." } - const msg = - errorData.message || errorData.error || "An unexpected error occurred"; - throw new Error(msg); + throw errorData; } return res.json(); } @@ -58,8 +54,6 @@ export async function deleteWorkflow(id: string): Promise { }); if (!res.ok) { const errorData = await res.json().catch(() => ({})); - const msg = - errorData.message || errorData.error || "Failed to delete workflow"; - throw new Error(msg); + throw errorData; } } diff --git a/frontend/src/lib/enums.ts b/frontend/src/lib/enums.ts new file mode 100644 index 0000000..2fe3ea7 --- /dev/null +++ b/frontend/src/lib/enums.ts @@ -0,0 +1,35 @@ +export enum StepType { + FILTER = "filter", + TRANSFORM = "transform", + HTTP_REQUEST = "http_request", +} + +export enum FilterOperator { + EQ = "eq", + NEQ = "neq", +} + +export enum TransformOperator { + DEFAULT = "default", + TEMPLATE = "template", + PICK = "pick", +} + +export enum HttpBodyMode { + CTX = "ctx", + CUSTOM = "custom", +} + +export enum HttpMethod { + GET = "GET", + POST = "POST", + PUT = "PUT", + PATCH = "PATCH", + DELETE = "DELETE", +} + +export enum RunStatus { + SUCCESS = "SUCCESS", + SKIPPED = "SKIPPED", + FAILED = "FAILED", +} diff --git a/frontend/src/lib/error-handler.ts b/frontend/src/lib/error-handler.ts index c51b10f..b96f4c9 100644 --- a/frontend/src/lib/error-handler.ts +++ b/frontend/src/lib/error-handler.ts @@ -8,9 +8,29 @@ export function handleError( let message = fallbackMessage; - if (error instanceof Error) { - message = error.message; - } else if (typeof error === "string") { + // 1. Handle our custom API error response with details + if (error && typeof error === "object") { + const errObj = error as any; + + // Check for Zod validation details + if (Array.isArray(errObj.details) && errObj.details.length > 0) { + message = errObj.details + .map((d: any) => (d.path ? `[${d.path}]: ${d.message}` : d.message)) + .join(". "); + } + // Check for generic error messages from backend + else if (errObj.message) { + message = errObj.message; + } else if (errObj.error) { + message = errObj.error; + } + // Handle standard Error objects + else if (error instanceof Error) { + message = error.message; + } + } + // 2. Handle simple string errors + else if (typeof error === "string") { message = error; } diff --git a/frontend/src/lib/types.ts b/frontend/src/lib/types.ts index a2d8f59..010ef6d 100644 --- a/frontend/src/lib/types.ts +++ b/frontend/src/lib/types.ts @@ -1,3 +1,11 @@ +import { + StepType, + FilterOperator, + TransformOperator, + HttpMethod, + HttpBodyMode, +} from "./enums"; + // Minimal workflow data for list views export interface WorkflowListItem { id: string; @@ -6,6 +14,38 @@ export interface WorkflowListItem { triggerPath: string; } +export interface FilterStep { + type: StepType.FILTER; + conditions: { + path: string; + op: FilterOperator; + value: any; + }[]; +} + +export interface TransformStep { + type: StepType.TRANSFORM; + ops: ( + | { op: TransformOperator.DEFAULT; path: string; value: any } + | { op: TransformOperator.TEMPLATE; to: string; template: string } + | { op: TransformOperator.PICK; paths: string[] } + )[]; +} + +export interface HttpRequestStep { + type: StepType.HTTP_REQUEST; + method: HttpMethod; + url: string; + headers?: Record; + body?: + | { mode: HttpBodyMode.CTX } + | { mode: HttpBodyMode.CUSTOM; value: Record }; + timeoutMs?: number; + retries?: number; +} + +export type WorkflowStep = FilterStep | TransformStep | HttpRequestStep; + // Full workflow data for detail views export interface Workflow extends WorkflowListItem { steps: WorkflowStep[]; @@ -13,8 +53,6 @@ export interface Workflow extends WorkflowListItem { updatedAt?: string; } -export type WorkflowStep = Record; - export interface CreateWorkflowDTO { name: string; enabled: boolean; diff --git a/tech-specs.md b/tech-specs.md index 53f7b5d..bcd439b 100644 --- a/tech-specs.md +++ b/tech-specs.md @@ -16,31 +16,31 @@ ## 2. Detailed Database Design (PostgreSQL) -Internal entities use sequential IDs for performance and storage efficiency. Public trigger URLs use random, non-guessable identifiers to ensure security. +Internal entities use sequential IDs. Public trigger URLs use random, non-guessable identifiers to ensure security. ### 2.1 Table: `workflows` -| Column | Data Type | Constraints | Description | -| :------------- | :------------- | :-------------------------- | :----------------------------------------------------------------------- | -| `id` | `BIGINT` | `PRIMARY KEY`, `SERIAL` | Internal sequential ID for efficient indexing and joins. | -| `name` | `VARCHAR(255)` | `NOT NULL` | Human-readable name. | -| `enabled` | `BOOLEAN` | `NOT NULL`, `DEFAULT true` | Master toggle for the trigger. | -| `trigger_path` | `VARCHAR(64)` | `NOT NULL`, `UNIQUE` | Unique random identifier (e.g., UUID or Nanoid) for the public endpoint. | -| `steps` | `JSONB` | `NOT NULL` | Serialized array of workflow steps. | -| `created_at` | `TIMESTAMP` | `DEFAULT CURRENT_TIMESTAMP` | | -| `updated_at` | `TIMESTAMP` | `DEFAULT CURRENT_TIMESTAMP` | | +| Column | Data Type | Constraints | Description | +| :------------- | :------------- | :-------------------------- | :------------------------------------------------------- | +| `id` | `INT` | `PRIMARY KEY`, `SERIAL` | Internal sequential ID for efficient indexing and joins. | +| `name` | `VARCHAR(255)` | `NOT NULL` | Human-readable name. | +| `enabled` | `BOOLEAN` | `NOT NULL`, `DEFAULT true` | Master toggle for the trigger. | +| `trigger_path` | `VARCHAR(64)` | `NOT NULL`, `UNIQUE` | Unique random UUID string for the public endpoint. | +| `steps` | `JSONB` | `NOT NULL` | Serialized array of workflow steps. | +| `created_at` | `TIMESTAMP` | `DEFAULT CURRENT_TIMESTAMP` | | +| `updated_at` | `TIMESTAMP` | `DEFAULT CURRENT_TIMESTAMP` | | ### 2.2 Table: `workflow_runs` -| Column | Data Type | Constraints | Description | -| :-------------- | :------------ | :--------------------------------------------------- | :------------------------------------------------------------------ | -| `id` | `BIGINT` | `PRIMARY KEY`, `SERIAL` | | -| `workflow_id` | `BIGINT` | `FOREIGN KEY`, `REFERENCES workflows(id)` | Cascading delete enabled. | -| `status` | `VARCHAR(20)` | `CHECK (status IN ('success', 'skipped', 'failed'))` | Manual enum constraint for integrity. | -| `start_time` | `TIMESTAMP` | `NOT NULL` | | -| `end_time` | `TIMESTAMP` | | | -| `error_message` | `TEXT` | | Debug info for failed runs. | -| `failure_meta` | `JSONB` | | HTTP failure details (status, headers, body) for failed HTTP steps. | +| Column | Data Type | Constraints | Description | +| :-------------- | :---------- | :---------------------------------------- | :------------------------------------------------------------------ | +| `id` | `INT` | `PRIMARY KEY`, `SERIAL` | | +| `workflow_id` | `INT` | `FOREIGN KEY`, `REFERENCES workflows(id)` | Cascading delete enabled. | +| `status` | `ENUM` | `SUCCESS`, `SKIPPED`, `FAILED` | Database-level enum for execution state. | +| `start_time` | `TIMESTAMP` | `NOT NULL` | | +| `end_time` | `TIMESTAMP` | | | +| `error_message` | `TEXT` | | Debug info for failed runs. | +| `failure_meta` | `JSONB` | | HTTP failure details (status, headers, body) for failed HTTP steps. | --- @@ -48,7 +48,8 @@ Internal entities use sequential IDs for performance and storage efficiency. Pub ### 3.1 Common Error Responses -- **400 Bad Request**: Malformed JSON, syntax errors, or validation failures. +- **400 Bad Request**: Malformed JSON, syntax errors, or validation failures (Zod mediated). +- **403 Forbidden**: Returned if the workflow is disabled. - **404 Not Found**: Resource (Workflow or Trigger Path) does not exist. - **500 Internal Server Error**: Unexpected server failure. @@ -57,7 +58,7 @@ Internal entities use sequential IDs for performance and storage efficiency. Pub - `GET /api/workflows`: Returns all workflows (ID, Name, Enabled, Trigger Path). - `POST /api/workflows`: Create workflow. Returns 201. - `GET /api/workflows/:id`: Returns full details including `steps`. -- `PATCH /api/workflows/:id`: Update Name, Enabled status, or Steps. +- `PATCH /api/workflows/:id`: Partial update of Name, Enabled status, or Steps. - `DELETE /api/workflows/:id`: Remove workflow. ### 3.3 Trigger API (Public) @@ -65,7 +66,7 @@ Internal entities use sequential IDs for performance and storage efficiency. Pub #### `POST /t/:trigger_path` - **Request Body**: JSON payload (seeds `ctx`). -- **Response (200)**: `{ "runId": BIGINT, "status": "success" | "skipped" | "failed" }` +- **Response (200)**: `{ "runId": "string", "status": "SUCCESS" | "SKIPPED" | "FAILED", "error"?: "string" }` - **Error (403)**: Returned if the workflow is disabled. - **Error (404)**: Returned if the trigger path is invalid. @@ -84,35 +85,47 @@ Internal entities use sequential IDs for performance and storage efficiency. Pub - `WorkflowList`: Lists `WorkflowCard` components. - `WorkflowEditor`: Combines a metadata panel and `MonacoEditor` for JSON steps. - `ThemeToggle`: Global switcher in the header. -- `RunHistory` (**Optional/Stretch**): Minimal read-only view of recent execution statuses. --- -## 6. Security & CORS Strategy +## 5. Security & CORS Strategy -To ensure production-grade isolation, we will implement a tiered CORS (Cross-Origin Resource Sharing) policy. +### 5.1 Permissive CORS -### 6.1 Admin API Security (`/api/*`) +The application currently uses a permissive CORS policy (`*`) to facilitate ease of testing and integration with various webhook sources. -- **Restriction**: Limited to the official Frontend Origin. -- **Config**: `Access-Control-Allow-Origin: process.env.FRONTEND_URL`. -- **Rationale**: Ensures that workflow definitions and management capabilities are only accessible via our UI, preventing unauthorized third-party scripts from interacting with the admin backend. - -### 6.2 Trigger API Security (`/t/*`) - -- **Restriction**: Permissive (`*` or specific webhook origins). - **Config**: `Access-Control-Allow-Origin: *`. -- **Rationale**: Webhooks are often triggered by external systems (Slack, Discord, Custom Scripts) from various origins. Restricting this would break the core functionality of the engine as a public webhook receiver. +- **Rationale**: Webhooks are triggered by external systems (Slack, Discord, Custom Scripts) from various origins. A permissive policy ensures the engine acts as a reliable public webhook receiver. --- -## 7. Execution Engine Design +## 6. Execution Engine Design -### 5.1 Context (`ctx`) Management +### 6.1 Context (`ctx`) Management - **Initialization**: `ctx` is initialized from the inbound request body. -- **Execution**: Steps process `ctx` sequentially. `Filter` steps can short-circuit, `Transform` steps can mutate, and `HTTP` steps perform side effects. +- **Execution**: Steps process `ctx` sequentially. `Filter` steps can short-circuit, `Transform` steps mutate, and `HTTP` steps perform side effects. +- **Template Resolution**: Placeholders like `{{path.to.field}}` are resolved against `ctx`. Missing values resolve to an empty string `""`. + +### 6.2 Supported Step Types + +1. **Filter**: Gates execution based on `eq` or `neq` conditions. Short-circuits to `skipped` on failure. +2. **Transform**: + - `default`: Sets a value if path is missing/null/empty. + - `template`: Creates a string using placeholders. + - `pick`: Replaces `ctx` with a subset of fields. +3. **HTTP Request**: Performs external calls (GET, POST, etc.). Supports `ctx` or `custom` body modes. + +### 6.3 Retry Policy & Reliability + +- **Synchronous Retries**: HTTP requests are retried synchronously on network errors and 5xx responses. +- **Exponential Backoff**: Retries implement a delay (e.g., `100ms * 2^attempt`) to avoid overwhelming target systems. +- **Persistence**: Every run is persisted with its lifecycle state, including full failure metadata for HTTP steps. + +--- -### 5.2 Retry Policy +## 7. Deployment and Infrastructure -HTTP requests are retried **synchronously** on network errors and 5xx responses, respecting the configured retry count. A fixed delay is used between retries to keep the implementation simple and aligned with requirements. +- **Docker**: Both Frontend and Backend are containerized for consistent deployment. +- **CI/CD**: GitHub Actions are configured for automated builds and deployment to GCP (Google Cloud Platform). +- **Environment Variables**: Managed via `.env` files for local development and Secret Manager for production.