From 74fb343ead51ecd043a9ac8a26aedb39754819f3 Mon Sep 17 00:00:00 2001 From: m3mo Date: Sat, 7 Jun 2025 11:19:31 +0200 Subject: [PATCH] Initial commit: FlowForge automation templates integration --- .env.example | 37 ++ .gitignore | 31 ++ README.md | 52 ++ backend/Dockerfile | 16 + backend/Dockerfile.dev | 18 + backend/create-admin.js | 43 ++ backend/fix-user.sql | 79 +++ backend/logs/combined.log | 72 +++ backend/logs/error.log | 34 ++ backend/migrations/20250607_initial_schema.js | 62 +++ backend/package.json | 31 ++ backend/setup-db.sql | 66 +++ backend/src/config/db.js | 34 ++ backend/src/controllers/auth.js | 146 +++++ backend/src/controllers/node.js | 53 ++ backend/src/controllers/user.js | 60 ++ backend/src/controllers/workflow.js | 190 +++++++ backend/src/index.js | 58 ++ backend/src/middleware/auth.js | 57 ++ backend/src/middleware/errorHandler.js | 41 ++ backend/src/models/user.js | 72 +++ backend/src/models/workflow.js | 181 ++++++ backend/src/nodes/delay/meta.json | 18 + backend/src/nodes/delay/runner.js | 47 ++ backend/src/nodes/email/meta.json | 33 ++ backend/src/nodes/email/runner.js | 78 +++ backend/src/nodes/function/meta.json | 19 + backend/src/nodes/function/runner.js | 81 +++ backend/src/nodes/http-request/meta.json | 22 + backend/src/nodes/http-request/runner.js | 81 +++ backend/src/nodes/logger/meta.json | 19 + backend/src/nodes/logger/runner.js | 55 ++ backend/src/nodes/webhook/meta.json | 17 + backend/src/nodes/webhook/runner.js | 50 ++ backend/src/routes/auth.js | 14 + backend/src/routes/nodes.js | 15 + backend/src/routes/users.js | 15 + backend/src/routes/workflows.js | 26 + backend/src/services/nodeRegistry.js | 116 ++++ backend/src/services/workflowExecutor.js | 306 ++++++++++ backend/src/utils/logger.js | 34 ++ backend/test-bcrypt.js | 24 + docker-compose.dev.yml | 68 +++ docker-compose.yml | 73 +++ frontend/Dockerfile | 29 + frontend/Dockerfile.dev | 18 + frontend/integration-guide.md | 203 +++++++ frontend/nginx/nginx.conf | 45 ++ frontend/package.json | 46 ++ frontend/public/index.html | 23 + frontend/public/manifest.json | 25 + frontend/src/App.js | 67 +++ frontend/src/components/common/Modal.js | 92 +++ .../components/execution/ExecutionHistory.js | 304 ++++++++++ .../components/execution/ExecutionResults.js | 262 +++++++++ frontend/src/components/layouts/AuthLayout.js | 37 ++ frontend/src/components/layouts/MainLayout.js | 187 +++++++ .../components/scheduling/CronScheduler.js | 353 ++++++++++++ .../src/components/workflow/CustomNode.js | 108 ++++ .../components/workflow/NodeConfigPanel.js | 275 +++++++++ .../src/components/workflow/NodeTester.js | 197 +++++++ .../components/workflow/NodeTypeSelector.js | 77 +++ .../src/components/workflow/VersionHistory.js | 280 ++++++++++ .../src/components/workflow/WebhookManager.js | 137 +++++ .../workflow/WorkflowEditorActions.js | 158 ++++++ .../components/workflow/WorkflowEditorTabs.js | 64 +++ frontend/src/context/AuthContext.js | 141 +++++ frontend/src/hooks/useAuth.js | 6 + frontend/src/index.css | 72 +++ frontend/src/index.js | 20 + frontend/src/pages/Dashboard.js | 182 ++++++ frontend/src/pages/Login.js | 106 ++++ frontend/src/pages/NotFound.js | 31 ++ frontend/src/pages/Profile.js | 185 ++++++ frontend/src/pages/Register.js | 135 +++++ frontend/src/pages/TemplatesPage.js | 137 +++++ frontend/src/pages/TestPage.js | 166 ++++++ frontend/src/pages/WorkflowEditor.js | 503 +++++++++++++++++ frontend/src/pages/WorkflowList.js | 201 +++++++ frontend/src/services/api.js | 36 ++ frontend/src/services/nodes.js | 46 ++ frontend/src/services/workflow.js | 93 ++++ frontend/src/templates/templates.js | 526 ++++++++++++++++++ frontend/src/test-components.js | 169 ++++++ frontend/tailwind.config.js | 41 ++ 85 files changed, 8427 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 README.md create mode 100644 backend/Dockerfile create mode 100644 backend/Dockerfile.dev create mode 100644 backend/create-admin.js create mode 100644 backend/fix-user.sql create mode 100644 backend/logs/combined.log create mode 100644 backend/logs/error.log create mode 100644 backend/migrations/20250607_initial_schema.js create mode 100644 backend/package.json create mode 100644 backend/setup-db.sql create mode 100644 backend/src/config/db.js create mode 100644 backend/src/controllers/auth.js create mode 100644 backend/src/controllers/node.js create mode 100644 backend/src/controllers/user.js create mode 100644 backend/src/controllers/workflow.js create mode 100644 backend/src/index.js create mode 100644 backend/src/middleware/auth.js create mode 100644 backend/src/middleware/errorHandler.js create mode 100644 backend/src/models/user.js create mode 100644 backend/src/models/workflow.js create mode 100644 backend/src/nodes/delay/meta.json create mode 100644 backend/src/nodes/delay/runner.js create mode 100644 backend/src/nodes/email/meta.json create mode 100644 backend/src/nodes/email/runner.js create mode 100644 backend/src/nodes/function/meta.json create mode 100644 backend/src/nodes/function/runner.js create mode 100644 backend/src/nodes/http-request/meta.json create mode 100644 backend/src/nodes/http-request/runner.js create mode 100644 backend/src/nodes/logger/meta.json create mode 100644 backend/src/nodes/logger/runner.js create mode 100644 backend/src/nodes/webhook/meta.json create mode 100644 backend/src/nodes/webhook/runner.js create mode 100644 backend/src/routes/auth.js create mode 100644 backend/src/routes/nodes.js create mode 100644 backend/src/routes/users.js create mode 100644 backend/src/routes/workflows.js create mode 100644 backend/src/services/nodeRegistry.js create mode 100644 backend/src/services/workflowExecutor.js create mode 100644 backend/src/utils/logger.js create mode 100644 backend/test-bcrypt.js create mode 100644 docker-compose.dev.yml create mode 100644 docker-compose.yml create mode 100644 frontend/Dockerfile create mode 100644 frontend/Dockerfile.dev create mode 100644 frontend/integration-guide.md create mode 100644 frontend/nginx/nginx.conf create mode 100644 frontend/package.json create mode 100644 frontend/public/index.html create mode 100644 frontend/public/manifest.json create mode 100644 frontend/src/App.js create mode 100644 frontend/src/components/common/Modal.js create mode 100644 frontend/src/components/execution/ExecutionHistory.js create mode 100644 frontend/src/components/execution/ExecutionResults.js create mode 100644 frontend/src/components/layouts/AuthLayout.js create mode 100644 frontend/src/components/layouts/MainLayout.js create mode 100644 frontend/src/components/scheduling/CronScheduler.js create mode 100644 frontend/src/components/workflow/CustomNode.js create mode 100644 frontend/src/components/workflow/NodeConfigPanel.js create mode 100644 frontend/src/components/workflow/NodeTester.js create mode 100644 frontend/src/components/workflow/NodeTypeSelector.js create mode 100644 frontend/src/components/workflow/VersionHistory.js create mode 100644 frontend/src/components/workflow/WebhookManager.js create mode 100644 frontend/src/components/workflow/WorkflowEditorActions.js create mode 100644 frontend/src/components/workflow/WorkflowEditorTabs.js create mode 100644 frontend/src/context/AuthContext.js create mode 100644 frontend/src/hooks/useAuth.js create mode 100644 frontend/src/index.css create mode 100644 frontend/src/index.js create mode 100644 frontend/src/pages/Dashboard.js create mode 100644 frontend/src/pages/Login.js create mode 100644 frontend/src/pages/NotFound.js create mode 100644 frontend/src/pages/Profile.js create mode 100644 frontend/src/pages/Register.js create mode 100644 frontend/src/pages/TemplatesPage.js create mode 100644 frontend/src/pages/TestPage.js create mode 100644 frontend/src/pages/WorkflowEditor.js create mode 100644 frontend/src/pages/WorkflowList.js create mode 100644 frontend/src/services/api.js create mode 100644 frontend/src/services/nodes.js create mode 100644 frontend/src/services/workflow.js create mode 100644 frontend/src/templates/templates.js create mode 100644 frontend/src/test-components.js create mode 100644 frontend/tailwind.config.js diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..68390f5 --- /dev/null +++ b/.env.example @@ -0,0 +1,37 @@ +# Database Configuration +DATABASE_URL=postgres://postgres:postgres@postgres:5432/flowforge +POSTGRES_USER=postgres +POSTGRES_PASSWORD=postgres +POSTGRES_DB=flowforge + +# Redis Configuration +REDIS_URL=redis://redis:6379 + +# JWT Authentication +JWT_SECRET=change_this_to_a_secure_random_string +JWT_EXPIRATION=86400 + +# Server Configuration +PORT=4000 +NODE_ENV=production + +# Frontend URL (for CORS) +FRONTEND_URL=https://your-domain.com + +# Email Configuration (optional) +SMTP_HOST=smtp.example.com +SMTP_PORT=587 +SMTP_USER=user@example.com +SMTP_PASS=your_password +SMTP_FROM=noreply@example.com + +# Logging +LOG_LEVEL=info + +# Storage Configuration +STORAGE_TYPE=local +STORAGE_PATH=/app/storage + +# Rate Limiting +RATE_LIMIT_WINDOW=15 +RATE_LIMIT_MAX=100 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2babcd5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,31 @@ +# Dependencies +node_modules/ +npm-debug.log +yarn-debug.log +yarn-error.log + +# Environment variables +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Build outputs +dist/ +build/ +coverage/ + +# Logs +logs/ +*.log + +# Docker volumes +data/ + +# IDE and editor files +.idea/ +.vscode/ +*.swp +*.swo +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 0000000..3a60893 --- /dev/null +++ b/README.md @@ -0,0 +1,52 @@ +# FlowForge + +A self-hosted automation platform that allows you to build workflows visually with a drag-and-drop interface. + +## Features + +- Visual drag-and-drop flow editor +- Modular node system for extensibility +- User authentication and workflow management +- Background task processing with queues +- Docker-based deployment + +## Architecture + +- **Frontend**: React.js, Tailwind CSS, react-flow +- **Backend**: Node.js + Express.js +- **Queue System**: Redis + BullMQ +- **Database**: PostgreSQL + +## Getting Started + +### Prerequisites + +- Docker and Docker Compose +- Node.js 18+ (for local development) + +### Development Setup + +1. Clone the repository +2. Install dependencies: + ``` + cd frontend && npm install + cd ../backend && npm install + ``` +3. Start the development environment: + ``` + docker-compose -f docker-compose.dev.yml up + ``` +4. Access the application at `http://localhost:3000` + +### Production Deployment + +1. Configure environment variables in `.env.production` +2. Build and start the containers: + ``` + docker-compose up -d + ``` +3. Access the application at your configured domain + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..a5ab4f9 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,16 @@ +FROM node:18-alpine + +WORKDIR /app + +# Copy package files and install dependencies +COPY package*.json ./ +RUN npm ci --only=production + +# Copy source code +COPY . . + +# Expose port +EXPOSE 4000 + +# Start server +CMD ["node", "src/index.js"] diff --git a/backend/Dockerfile.dev b/backend/Dockerfile.dev new file mode 100644 index 0000000..7a2a3b7 --- /dev/null +++ b/backend/Dockerfile.dev @@ -0,0 +1,18 @@ +FROM node:18-alpine + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies +RUN npm install + +# Copy source code (this will be overridden by volume mount in dev) +COPY . . + +# Expose port +EXPOSE 4000 + +# Start development server with nodemon for hot reloading +CMD ["npx", "nodemon", "src/index.js"] diff --git a/backend/create-admin.js b/backend/create-admin.js new file mode 100644 index 0000000..f2a99a4 --- /dev/null +++ b/backend/create-admin.js @@ -0,0 +1,43 @@ +const bcrypt = require('bcrypt'); +const { v4: uuidv4 } = require('uuid'); +const { db } = require('./src/config/db'); + +async function createAdminUser() { + try { + // Generate a new hash for the password + const password = 'FlowForge123!'; + const salt = await bcrypt.genSalt(10); + const hash = await bcrypt.hash(password, salt); + + console.log('Generated hash:', hash); + + // Generate a UUID for the user + const userId = uuidv4(); + + // Delete any existing admin user + await db('users').where({ email: 'admin@flowforge.test' }).del(); + + // Insert the new admin user + const [user] = await db('users').insert({ + id: userId, + email: 'admin@flowforge.test', + password: hash, + created_at: new Date(), + updated_at: new Date() + }).returning(['id', 'email', 'created_at']); + + console.log('Admin user created successfully:', user); + + // Verify the password works + const dbUser = await db('users').where({ email: 'admin@flowforge.test' }).first(); + const isValid = await bcrypt.compare(password, dbUser.password); + console.log('Password validation:', isValid); + + process.exit(0); + } catch (error) { + console.error('Error creating admin user:', error); + process.exit(1); + } +} + +createAdminUser(); diff --git a/backend/fix-user.sql b/backend/fix-user.sql new file mode 100644 index 0000000..f196752 --- /dev/null +++ b/backend/fix-user.sql @@ -0,0 +1,79 @@ +-- Drop existing tables with foreign key constraints first +DROP TABLE IF EXISTS webhooks; +DROP TABLE IF EXISTS workflow_schedules; +DROP TABLE IF EXISTS workflow_executions; +DROP TABLE IF EXISTS workflow_versions; +DROP TABLE IF EXISTS workflows; +DROP TABLE IF EXISTS users; + +-- Create users table with UUID as primary key +CREATE TABLE users ( + id UUID PRIMARY KEY, + email VARCHAR(255) UNIQUE NOT NULL, + password VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflows table +CREATE TABLE workflows ( + id UUID PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description TEXT, + nodes JSONB, + edges JSONB, + user_id UUID REFERENCES users(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflow_versions table +CREATE TABLE workflow_versions ( + id UUID PRIMARY KEY, + workflow_id UUID REFERENCES workflows(id), + version INTEGER NOT NULL, + nodes JSONB, + edges JSONB, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflow_executions table +CREATE TABLE workflow_executions ( + id UUID PRIMARY KEY, + workflow_id UUID REFERENCES workflows(id), + status VARCHAR(50) NOT NULL, + started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + completed_at TIMESTAMP, + logs JSONB, + results JSONB +); + +-- Create workflow_schedules table +CREATE TABLE workflow_schedules ( + id UUID PRIMARY KEY, + workflow_id UUID REFERENCES workflows(id), + cron_expression VARCHAR(100) NOT NULL, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create webhooks table +CREATE TABLE webhooks ( + id UUID PRIMARY KEY, + workflow_id UUID REFERENCES workflows(id), + node_id VARCHAR(255) NOT NULL, + path VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Insert a default admin user with UUID +-- Password is 'FlowForge123!' (hashed with bcrypt) +INSERT INTO users (id, email, password, created_at, updated_at) +VALUES ( + '550e8400-e29b-41d4-a716-446655440000', + 'admin@flowforge.test', + '$2b$10$3euPcmQFCiblsZeEu5s7p.9wVdLajnYhAbcjkru4KkUGBIm3WVYjK', + CURRENT_TIMESTAMP, + CURRENT_TIMESTAMP +); diff --git a/backend/logs/combined.log b/backend/logs/combined.log new file mode 100644 index 0000000..2491e61 --- /dev/null +++ b/backend/logs/combined.log @@ -0,0 +1,72 @@ +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 07:33:40"} +{"address":"172.24.0.4","code":"ECONNREFUSED","errno":-111,"level":"error","message":"Database connection failed: connect ECONNREFUSED 172.24.0.4:5432","port":5432,"service":"flowforge-backend","stack":"Error: connect ECONNREFUSED 172.24.0.4:5432\n at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1555:16)","syscall":"connect","timestamp":"2025-06-07 07:33:40"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 07:40:00"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 07:40:00"} +{"level":"error","message":"error: select * from \"users\" where \"email\" = $1 limit $2 - relation \"users\" does not exist","method":"POST","path":"/api/auth/register","service":"flowforge-backend","stack":"error: select * from \"users\" where \"email\" = $1 limit $2 - relation \"users\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 07:46:07"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 07:51:51"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 07:51:51"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:04:49"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:04:49"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:04:59"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:04:59"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:05:34"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:05:34"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:05:46"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:05:46"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:05:54"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:11:39"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:11:51"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:38:45"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:38:57"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:03"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:09"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:11"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:16"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:18"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:40:55"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:40:55"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:22"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:26"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:27"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at /app/src/models/workflow.js:49:17\n at Array.map ()\n at getWorkflowsByUserId (/app/src/models/workflow.js:47:20)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getAll (/app/src/controllers/workflow.js:52:23)","timestamp":"2025-06-07 08:41:35"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at /app/src/models/workflow.js:49:17\n at Array.map ()\n at getWorkflowsByUserId (/app/src/models/workflow.js:47:20)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getAll (/app/src/controllers/workflow.js:52:23)","timestamp":"2025-06-07 08:41:35"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:39"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:41"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:43:08"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:43:09"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:43:55"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:43:55"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:44:16"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:44:16"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:36:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:47:54"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:01"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:01"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:07"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:07"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/88b47234-6ebc-47b5-87a7-f4b70976083b","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:11"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/88b47234-6ebc-47b5-87a7-f4b70976083b","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:11"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/15c42683-3af7-4581-9d2d-0fab1dab07f0","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:14"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/15c42683-3af7-4581-9d2d-0fab1dab07f0","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:14"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:49:50"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:49:50"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:50:33"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:50:33"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:50:57"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:50:57"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:54:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:51:03"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:08"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:08"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:12"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:12"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:52:00"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:52:00"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:52:12"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:52:12"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:53:54"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:53:54"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:54:09"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:54:09"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:55:59"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:55:59"} +{"level":"info","message":"Server running on port 4000","service":"flowforge-backend","timestamp":"2025-06-07 08:56:07"} +{"level":"info","message":"Database connection established successfully","service":"flowforge-backend","timestamp":"2025-06-07 08:56:07"} diff --git a/backend/logs/error.log b/backend/logs/error.log new file mode 100644 index 0000000..ba79ef3 --- /dev/null +++ b/backend/logs/error.log @@ -0,0 +1,34 @@ +{"address":"172.24.0.4","code":"ECONNREFUSED","errno":-111,"level":"error","message":"Database connection failed: connect ECONNREFUSED 172.24.0.4:5432","port":5432,"service":"flowforge-backend","stack":"Error: connect ECONNREFUSED 172.24.0.4:5432\n at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1555:16)","syscall":"connect","timestamp":"2025-06-07 07:33:40"} +{"level":"error","message":"error: select * from \"users\" where \"email\" = $1 limit $2 - relation \"users\" does not exist","method":"POST","path":"/api/auth/register","service":"flowforge-backend","stack":"error: select * from \"users\" where \"email\" = $1 limit $2 - relation \"users\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 07:46:07"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:11:39"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:11:51"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:38:45"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:38:57"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:03"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:09"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:11"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:16"} +{"level":"error","message":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"error: insert into \"workflows\" (\"connections\", \"created_at\", \"id\", \"name\", \"nodes\", \"updated_at\", \"user_id\") values ($1, $2, $3, $4, $5, $6, $7) returning \"id\", \"user_id\", \"name\", \"nodes\", \"connections\", \"created_at\", \"updated_at\" - column \"connections\" of relation \"workflows\" does not exist\n at Parser.parseErrorMessage (/app/node_modules/pg-protocol/dist/parser.js:285:98)\n at Parser.handlePacket (/app/node_modules/pg-protocol/dist/parser.js:122:29)\n at Parser.parse (/app/node_modules/pg-protocol/dist/parser.js:35:38)\n at Socket. (/app/node_modules/pg-protocol/dist/index.js:11:42)\n at Socket.emit (node:events:517:28)\n at addChunk (node:internal/streams/readable:368:12)\n at readableAddChunk (node:internal/streams/readable:341:9)\n at Readable.push (node:internal/streams/readable:278:10)\n at TCP.onStreamRead (node:internal/stream_base_commons:190:23)","timestamp":"2025-06-07 08:39:18"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:22"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:26"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:27"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at /app/src/models/workflow.js:49:17\n at Array.map ()\n at getWorkflowsByUserId (/app/src/models/workflow.js:47:20)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getAll (/app/src/controllers/workflow.js:52:23)","timestamp":"2025-06-07 08:41:35"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at /app/src/models/workflow.js:49:17\n at Array.map ()\n at getWorkflowsByUserId (/app/src/models/workflow.js:47:20)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getAll (/app/src/controllers/workflow.js:52:23)","timestamp":"2025-06-07 08:41:35"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:39"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:30:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:41:41"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:36:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:47:54"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:01"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:01"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:07"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:07"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/88b47234-6ebc-47b5-87a7-f4b70976083b","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:11"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/88b47234-6ebc-47b5-87a7-f4b70976083b","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:11"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/15c42683-3af7-4581-9d2d-0fab1dab07f0","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:14"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/15c42683-3af7-4581-9d2d-0fab1dab07f0","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:78:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:48:14"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:50:57"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:50:57"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"POST","path":"/api/workflows","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at createWorkflow (/app/src/models/workflow.js:54:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async create (/app/src/controllers/workflow.js:30:22)","timestamp":"2025-06-07 08:51:03"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:08"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/971f09e0-6490-4be3-a6cb-928e9f2ca5a5","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:08"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:12"} +{"level":"error","message":"SyntaxError: Unexpected token o in JSON at position 1","method":"GET","path":"/api/workflows/7140abc0-968a-4728-925b-27f49f139b6d","service":"flowforge-backend","stack":"SyntaxError: Unexpected token o in JSON at position 1\n at JSON.parse ()\n at getWorkflowById (/app/src/models/workflow.js:96:17)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async getById (/app/src/controllers/workflow.js:75:22)","timestamp":"2025-06-07 08:51:12"} diff --git a/backend/migrations/20250607_initial_schema.js b/backend/migrations/20250607_initial_schema.js new file mode 100644 index 0000000..d37ae1f --- /dev/null +++ b/backend/migrations/20250607_initial_schema.js @@ -0,0 +1,62 @@ +/** + * Initial database schema for FlowForge + */ +exports.up = function(knex) { + return knex.schema + // Users table + .createTable('users', function(table) { + table.uuid('id').primary(); + table.string('email').notNullable().unique(); + table.string('password').notNullable(); + table.timestamp('created_at').defaultTo(knex.fn.now()); + table.timestamp('updated_at').defaultTo(knex.fn.now()); + }) + + // Workflows table + .createTable('workflows', function(table) { + table.uuid('id').primary(); + table.uuid('user_id').notNullable().references('id').inTable('users').onDelete('CASCADE'); + table.string('name').notNullable(); + table.jsonb('nodes').notNullable().defaultTo('[]'); + table.jsonb('connections').notNullable().defaultTo('[]'); + table.timestamp('created_at').defaultTo(knex.fn.now()); + table.timestamp('updated_at').defaultTo(knex.fn.now()); + + // Index for faster user-based queries + table.index('user_id'); + }) + + // Workflow logs table + .createTable('workflow_logs', function(table) { + table.uuid('id').primary(); + table.uuid('workflow_id').notNullable().references('id').inTable('workflows').onDelete('CASCADE'); + table.jsonb('logs').notNullable().defaultTo('[]'); + table.timestamp('created_at').defaultTo(knex.fn.now()); + + // Index for faster workflow-based queries + table.index('workflow_id'); + }) + + // Webhook registrations table + .createTable('webhooks', function(table) { + table.uuid('id').primary(); + table.uuid('workflow_id').notNullable().references('id').inTable('workflows').onDelete('CASCADE'); + table.uuid('node_id').notNullable(); + table.string('path').notNullable().unique(); + table.string('method').notNullable().defaultTo('POST'); + table.timestamp('created_at').defaultTo(knex.fn.now()); + table.timestamp('updated_at').defaultTo(knex.fn.now()); + + // Indexes + table.index('workflow_id'); + table.index('path'); + }); +}; + +exports.down = function(knex) { + return knex.schema + .dropTableIfExists('webhooks') + .dropTableIfExists('workflow_logs') + .dropTableIfExists('workflows') + .dropTableIfExists('users'); +}; diff --git a/backend/package.json b/backend/package.json new file mode 100644 index 0000000..473b44c --- /dev/null +++ b/backend/package.json @@ -0,0 +1,31 @@ +{ + "name": "flowforge-backend", + "version": "1.0.0", + "description": "Backend for FlowForge automation platform", + "main": "src/index.js", + "scripts": { + "start": "node src/index.js", + "dev": "nodemon src/index.js", + "test": "jest" + }, + "dependencies": { + "bcrypt": "^5.1.0", + "bull": "^4.10.4", + "cors": "^2.8.5", + "dotenv": "^16.0.3", + "express": "^4.18.2", + "express-rate-limit": "^6.7.0", + "helmet": "^6.1.5", + "jsonwebtoken": "^9.0.0", + "knex": "^2.4.2", + "pg": "^8.10.0", + "redis": "^4.6.6", + "uuid": "^9.0.0", + "winston": "^3.8.2" + }, + "devDependencies": { + "jest": "^29.5.0", + "nodemon": "^2.0.22", + "supertest": "^6.3.3" + } +} diff --git a/backend/setup-db.sql b/backend/setup-db.sql new file mode 100644 index 0000000..8931c94 --- /dev/null +++ b/backend/setup-db.sql @@ -0,0 +1,66 @@ +-- Create users table +CREATE TABLE IF NOT EXISTS users ( + id SERIAL PRIMARY KEY, + email VARCHAR(255) UNIQUE NOT NULL, + password VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflows table +CREATE TABLE IF NOT EXISTS workflows ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description TEXT, + nodes JSONB, + edges JSONB, + user_id INTEGER REFERENCES users(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflow_versions table +CREATE TABLE IF NOT EXISTS workflow_versions ( + id SERIAL PRIMARY KEY, + workflow_id INTEGER REFERENCES workflows(id), + version INTEGER NOT NULL, + nodes JSONB, + edges JSONB, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create workflow_executions table +CREATE TABLE IF NOT EXISTS workflow_executions ( + id SERIAL PRIMARY KEY, + workflow_id INTEGER REFERENCES workflows(id), + status VARCHAR(50) NOT NULL, + started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + completed_at TIMESTAMP, + logs JSONB, + results JSONB +); + +-- Create workflow_schedules table +CREATE TABLE IF NOT EXISTS workflow_schedules ( + id SERIAL PRIMARY KEY, + workflow_id INTEGER REFERENCES workflows(id), + cron_expression VARCHAR(100) NOT NULL, + is_active BOOLEAN DEFAULT TRUE, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Create webhooks table +CREATE TABLE IF NOT EXISTS webhooks ( + id SERIAL PRIMARY KEY, + workflow_id INTEGER REFERENCES workflows(id), + node_id VARCHAR(255) NOT NULL, + path VARCHAR(255) NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Insert a default admin user +INSERT INTO users (email, password) +VALUES ('admin@flowforge.test', '$2b$10$3euPcmQFCiblsZeEu5s7p.9wVdLajnYhAbcjkru4KkUGBIm3WVYjK') +ON CONFLICT (email) DO NOTHING; +-- Password is 'FlowForge123!' (pre-hashed with bcrypt) diff --git a/backend/src/config/db.js b/backend/src/config/db.js new file mode 100644 index 0000000..9c39187 --- /dev/null +++ b/backend/src/config/db.js @@ -0,0 +1,34 @@ +const knex = require('knex'); +const logger = require('../utils/logger'); + +// Initialize knex with PostgreSQL configuration +const db = knex({ + client: 'pg', + connection: process.env.DATABASE_URL, + pool: { + min: 2, + max: 10 + }, + migrations: { + tableName: 'knex_migrations', + directory: '../migrations' + }, + debug: process.env.NODE_ENV === 'development' +}); + +// Test database connection +const testConnection = async () => { + try { + await db.raw('SELECT 1'); + logger.info('Database connection established successfully'); + return true; + } catch (error) { + logger.error('Database connection failed:', error); + return false; + } +}; + +// Call the test function when this module is imported +testConnection(); + +module.exports = { db, testConnection }; diff --git a/backend/src/controllers/auth.js b/backend/src/controllers/auth.js new file mode 100644 index 0000000..9eb11db --- /dev/null +++ b/backend/src/controllers/auth.js @@ -0,0 +1,146 @@ +const jwt = require('jsonwebtoken'); +const { createUser, getUserByEmail, verifyPassword } = require('../models/user'); +const logger = require('../utils/logger'); + +/** + * Register a new user + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const register = async (req, res, next) => { + try { + const { email, password } = req.body; + + // Validate input + if (!email || !password) { + return res.status(400).json({ + error: 'Bad Request', + message: 'Email and password are required' + }); + } + + // Check if user already exists + const existingUser = await getUserByEmail(email); + if (existingUser) { + return res.status(409).json({ + error: 'Conflict', + message: 'User with this email already exists' + }); + } + + // Create new user + const user = await createUser({ email, password }); + + // Generate JWT token + const token = jwt.sign( + { id: user.id, email: user.email }, + process.env.JWT_SECRET, + { expiresIn: '24h' } + ); + + // Return user data and token + res.status(201).json({ + message: 'User registered successfully', + user: { + id: user.id, + email: user.email, + createdAt: user.created_at + }, + token + }); + } catch (error) { + next(error); + } +}; + +/** + * Login user + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const login = async (req, res, next) => { + try { + const { email, password } = req.body; + + // Validate input + if (!email || !password) { + return res.status(400).json({ + error: 'Bad Request', + message: 'Email and password are required' + }); + } + + // Check if user exists + const user = await getUserByEmail(email); + if (!user) { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Invalid credentials' + }); + } + + // Verify password + const isPasswordValid = await verifyPassword(password, user.password); + if (!isPasswordValid) { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Invalid credentials' + }); + } + + // Generate JWT token + const token = jwt.sign( + { id: user.id, email: user.email }, + process.env.JWT_SECRET, + { expiresIn: '24h' } + ); + + // Return user data and token + res.status(200).json({ + message: 'Login successful', + user: { + id: user.id, + email: user.email, + createdAt: user.created_at + }, + token + }); + } catch (error) { + next(error); + } +}; + +/** + * Get current user profile + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getProfile = async (req, res, next) => { + try { + // User is already attached to req by auth middleware + res.status(200).json({ + user: req.user + }); + } catch (error) { + next(error); + } +}; + +/** + * Logout user (client-side only) + * @param {Object} req - Express request object + * @param {Object} res - Express response object + */ +const logout = (req, res) => { + res.status(200).json({ message: 'Logout successful' }); +}; + +module.exports = { + register, + login, + getProfile, + logout +}; diff --git a/backend/src/controllers/node.js b/backend/src/controllers/node.js new file mode 100644 index 0000000..0774b9b --- /dev/null +++ b/backend/src/controllers/node.js @@ -0,0 +1,53 @@ +const nodeRegistry = require('../services/nodeRegistry'); +const logger = require('../utils/logger'); + +/** + * Get all available node types + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getAllNodeTypes = async (req, res, next) => { + try { + const nodeTypes = nodeRegistry.getAllNodeTypes(); + + res.status(200).json({ + count: nodeTypes.length, + nodes: nodeTypes + }); + } catch (error) { + next(error); + } +}; + +/** + * Get a specific node type by type + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getNodeType = async (req, res, next) => { + try { + const { type } = req.params; + + const nodeType = nodeRegistry.getNodeType(type); + + if (!nodeType) { + return res.status(404).json({ + error: 'Not Found', + message: `Node type not found: ${type}` + }); + } + + res.status(200).json({ + node: nodeType.meta + }); + } catch (error) { + next(error); + } +}; + +module.exports = { + getAllNodeTypes, + getNodeType +}; diff --git a/backend/src/controllers/user.js b/backend/src/controllers/user.js new file mode 100644 index 0000000..22d58c3 --- /dev/null +++ b/backend/src/controllers/user.js @@ -0,0 +1,60 @@ +const { getUserById } = require('../models/user'); +const logger = require('../utils/logger'); + +/** + * Get current user profile + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getProfile = async (req, res, next) => { + try { + // User is already attached to req by auth middleware + const userId = req.user.id; + + // Get full user details from database + const user = await getUserById(userId); + + if (!user) { + return res.status(404).json({ + error: 'Not Found', + message: 'User not found' + }); + } + + // Return user data (excluding password) + res.status(200).json({ + user: { + id: user.id, + email: user.email, + createdAt: user.created_at + } + }); + } catch (error) { + next(error); + } +}; + +/** + * Update user profile + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const updateProfile = async (req, res, next) => { + try { + // TODO: Implement user profile update functionality + // This would include updating user details in the database + + res.status(501).json({ + message: 'Profile update functionality not yet implemented' + }); + } catch (error) { + next(error); + } +}; + +module.exports = { + getProfile, + updateProfile +}; diff --git a/backend/src/controllers/workflow.js b/backend/src/controllers/workflow.js new file mode 100644 index 0000000..29ed550 --- /dev/null +++ b/backend/src/controllers/workflow.js @@ -0,0 +1,190 @@ +const { + createWorkflow, + getWorkflowsByUserId, + getWorkflowById, + updateWorkflow, + deleteWorkflow +} = require('../models/workflow'); +const logger = require('../utils/logger'); + +/** + * Create a new workflow + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const create = async (req, res, next) => { + try { + const { name, nodes, connections } = req.body; + const userId = req.user.id; + + // Validate input + if (!name) { + return res.status(400).json({ + error: 'Bad Request', + message: 'Workflow name is required' + }); + } + + // Create workflow + const workflow = await createWorkflow({ name, nodes, connections }, userId); + + res.status(201).json({ + message: 'Workflow created successfully', + workflow + }); + } catch (error) { + next(error); + } +}; + +/** + * Get all workflows for the current user + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getAll = async (req, res, next) => { + try { + const userId = req.user.id; + + // Get workflows + const workflows = await getWorkflowsByUserId(userId); + + res.status(200).json({ + count: workflows.length, + workflows + }); + } catch (error) { + next(error); + } +}; + +/** + * Get workflow by ID + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const getById = async (req, res, next) => { + try { + const { id } = req.params; + const userId = req.user.id; + + // Get workflow + const workflow = await getWorkflowById(id, userId); + + if (!workflow) { + return res.status(404).json({ + error: 'Not Found', + message: 'Workflow not found' + }); + } + + res.status(200).json({ workflow }); + } catch (error) { + next(error); + } +}; + +/** + * Update workflow + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const update = async (req, res, next) => { + try { + const { id } = req.params; + const { name, nodes, connections } = req.body; + const userId = req.user.id; + + // Update workflow + const workflow = await updateWorkflow(id, { name, nodes, connections }, userId); + + if (!workflow) { + return res.status(404).json({ + error: 'Not Found', + message: 'Workflow not found' + }); + } + + res.status(200).json({ + message: 'Workflow updated successfully', + workflow + }); + } catch (error) { + next(error); + } +}; + +/** + * Delete workflow + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const remove = async (req, res, next) => { + try { + const { id } = req.params; + const userId = req.user.id; + + // Delete workflow + const deleted = await deleteWorkflow(id, userId); + + if (!deleted) { + return res.status(404).json({ + error: 'Not Found', + message: 'Workflow not found' + }); + } + + res.status(200).json({ + message: 'Workflow deleted successfully' + }); + } catch (error) { + next(error); + } +}; + +/** + * Execute workflow + * @param {Object} req - Express request object + * @param {Object} res - Express response object + * @param {Function} next - Express next middleware function + */ +const execute = async (req, res, next) => { + try { + const { id } = req.params; + const userId = req.user.id; + + // Get workflow + const workflow = await getWorkflowById(id, userId); + + if (!workflow) { + return res.status(404).json({ + error: 'Not Found', + message: 'Workflow not found' + }); + } + + // TODO: Implement workflow execution logic with BullMQ + // This will be implemented in the workflow execution service + + res.status(202).json({ + message: 'Workflow execution started', + executionId: 'temp-id' // Will be replaced with actual execution ID + }); + } catch (error) { + next(error); + } +}; + +module.exports = { + create, + getAll, + getById, + update, + remove, + execute +}; diff --git a/backend/src/index.js b/backend/src/index.js new file mode 100644 index 0000000..1730db4 --- /dev/null +++ b/backend/src/index.js @@ -0,0 +1,58 @@ +require('dotenv').config(); +const express = require('express'); +const cors = require('cors'); +const helmet = require('helmet'); +const rateLimit = require('express-rate-limit'); +const { errorHandler } = require('./middleware/errorHandler'); +const logger = require('./utils/logger'); + +// Import routes +const authRoutes = require('./routes/auth'); +const workflowRoutes = require('./routes/workflows'); +const userRoutes = require('./routes/users'); +const nodeRoutes = require('./routes/nodes'); + +// Initialize express app +const app = express(); +const PORT = process.env.PORT || 4000; + +// Middleware +app.use(helmet()); +app.use(cors()); +app.use(express.json()); + +// Rate limiting +const limiter = rateLimit({ + windowMs: 15 * 60 * 1000, // 15 minutes + max: 100, // limit each IP to 100 requests per windowMs + standardHeaders: true, + legacyHeaders: false, +}); +app.use(limiter); + +// Routes +app.use('/api/auth', authRoutes); +app.use('/api/workflows', workflowRoutes); +app.use('/api/users', userRoutes); +app.use('/api/nodes', nodeRoutes); + +// Health check endpoint +app.get('/health', (req, res) => { + res.status(200).json({ status: 'ok', timestamp: new Date().toISOString() }); +}); + +// Error handling middleware +app.use(errorHandler); + +// Start server +app.listen(PORT, () => { + logger.info(`Server running on port ${PORT}`); +}); + +// Handle unhandled promise rejections +process.on('unhandledRejection', (err) => { + logger.error('Unhandled Rejection:', err); + // Don't crash the server, but log the error +}); + +module.exports = app; // For testing diff --git a/backend/src/middleware/auth.js b/backend/src/middleware/auth.js new file mode 100644 index 0000000..e79185d --- /dev/null +++ b/backend/src/middleware/auth.js @@ -0,0 +1,57 @@ +const jwt = require('jsonwebtoken'); +const { getUserById } = require('../models/user'); + +/** + * Authentication middleware to protect routes + */ +const authenticate = async (req, res, next) => { + try { + // Get token from header + const authHeader = req.headers.authorization; + if (!authHeader || !authHeader.startsWith('Bearer ')) { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Authentication token required' + }); + } + + // Verify token + const token = authHeader.split(' ')[1]; + const decoded = jwt.verify(token, process.env.JWT_SECRET); + + // Get user from database + const user = await getUserById(decoded.id); + if (!user) { + return res.status(401).json({ + error: 'Unauthorized', + message: 'User not found' + }); + } + + // Attach user to request object + req.user = { + id: user.id, + email: user.email + }; + + next(); + } catch (error) { + if (error.name === 'JsonWebTokenError') { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Invalid token' + }); + } + + if (error.name === 'TokenExpiredError') { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Token expired' + }); + } + + next(error); + } +}; + +module.exports = { authenticate }; diff --git a/backend/src/middleware/errorHandler.js b/backend/src/middleware/errorHandler.js new file mode 100644 index 0000000..36fe435 --- /dev/null +++ b/backend/src/middleware/errorHandler.js @@ -0,0 +1,41 @@ +const logger = require('../utils/logger'); + +/** + * Global error handling middleware + */ +const errorHandler = (err, req, res, next) => { + // Log the error + logger.error(`${err.name}: ${err.message}`, { + stack: err.stack, + path: req.path, + method: req.method + }); + + // Default error status and message + const status = err.status || 500; + const message = err.message || 'Internal Server Error'; + + // Handle specific error types + if (err.name === 'ValidationError') { + return res.status(400).json({ + error: 'Validation Error', + message: err.message, + details: err.details + }); + } + + if (err.name === 'UnauthorizedError') { + return res.status(401).json({ + error: 'Unauthorized', + message: 'Invalid or expired token' + }); + } + + // Return error response + res.status(status).json({ + error: err.name || 'Error', + message: status === 500 ? 'Internal Server Error' : message + }); +}; + +module.exports = { errorHandler }; diff --git a/backend/src/models/user.js b/backend/src/models/user.js new file mode 100644 index 0000000..bb389a6 --- /dev/null +++ b/backend/src/models/user.js @@ -0,0 +1,72 @@ +const { db } = require('../config/db'); +const { v4: uuidv4 } = require('uuid'); +const bcrypt = require('bcrypt'); + +/** + * Create a new user + * @param {Object} userData - User data + * @returns {Object} Created user + */ +const createUser = async (userData) => { + // Hash password + const salt = await bcrypt.genSalt(10); + const hashedPassword = await bcrypt.hash(userData.password, salt); + + // Generate UUID for user + const userId = uuidv4(); + + // Insert user into database + const [user] = await db('users') + .insert({ + id: userId, + email: userData.email.toLowerCase(), + password: hashedPassword, + created_at: new Date() + }) + .returning(['id', 'email', 'created_at']); + + return user; +}; + +/** + * Get user by email + * @param {string} email - User email + * @returns {Object|null} User object or null if not found + */ +const getUserByEmail = async (email) => { + const user = await db('users') + .where({ email: email.toLowerCase() }) + .first(); + + return user; +}; + +/** + * Get user by ID + * @param {string} id - User ID + * @returns {Object|null} User object or null if not found + */ +const getUserById = async (id) => { + const user = await db('users') + .where({ id }) + .first(); + + return user; +}; + +/** + * Verify user password + * @param {string} password - Plain text password + * @param {string} hashedPassword - Hashed password from database + * @returns {boolean} True if password matches, false otherwise + */ +const verifyPassword = async (password, hashedPassword) => { + return await bcrypt.compare(password, hashedPassword); +}; + +module.exports = { + createUser, + getUserByEmail, + getUserById, + verifyPassword +}; diff --git a/backend/src/models/workflow.js b/backend/src/models/workflow.js new file mode 100644 index 0000000..b02fbd7 --- /dev/null +++ b/backend/src/models/workflow.js @@ -0,0 +1,181 @@ +const { db } = require('../config/db'); +const { v4: uuidv4 } = require('uuid'); + +/** + * Safely parse JSON data + * @param {string|Object} data - Data to parse + * @returns {Object} Parsed data + */ +const safeJsonParse = (data) => { + if (!data) return []; + if (typeof data === 'string') { + try { + return JSON.parse(data); + } catch (error) { + console.error('Error parsing JSON:', error); + return []; + } + } + return data; // Already an object +}; + +/** + * Safely stringify JSON data + * @param {string|Object} data - Data to stringify + * @returns {string} Stringified data + */ +const safeJsonStringify = (data) => { + if (!data) return '[]'; + if (typeof data === 'string') { + return data; // Already a string + } + return JSON.stringify(data); +}; + +/** + * Create a new workflow + * @param {Object} workflowData - Workflow data + * @param {string} userId - User ID + * @returns {Object} Created workflow + */ +const createWorkflow = async (workflowData, userId) => { + // Generate UUID for workflow + const workflowId = uuidv4(); + + // Safely stringify nodes and connections + const nodesToStore = safeJsonStringify(workflowData.nodes || []); + const connectionsToStore = safeJsonStringify(workflowData.connections || []); + + // Insert workflow into database + const [workflow] = await db('workflows') + .insert({ + id: workflowId, + user_id: userId, + name: workflowData.name, + nodes: nodesToStore, + connections: connectionsToStore, + edges: connectionsToStore, // Also store in edges for backward compatibility + created_at: new Date(), + updated_at: new Date() + }) + .returning(['id', 'user_id', 'name', 'nodes', 'connections', 'edges', 'created_at', 'updated_at']); + + // Safely parse JSON fields + return { + ...workflow, + nodes: safeJsonParse(workflow.nodes), + connections: safeJsonParse(workflow.connections || workflow.edges) + }; +}; + +/** + * Get all workflows for a user + * @param {string} userId - User ID + * @returns {Array} Array of workflows + */ +const getWorkflowsByUserId = async (userId) => { + const workflows = await db('workflows') + .where({ user_id: userId }) + .select('*') + .orderBy('updated_at', 'desc'); + + // Safely parse JSON fields + return workflows.map(workflow => ({ + ...workflow, + nodes: safeJsonParse(workflow.nodes), + connections: safeJsonParse(workflow.connections || workflow.edges) + })); +}; + +/** + * Get workflow by ID + * @param {string} id - Workflow ID + * @param {string} userId - User ID (for authorization) + * @returns {Object|null} Workflow object or null if not found + */ +const getWorkflowById = async (id, userId) => { + const workflow = await db('workflows') + .where({ id, user_id: userId }) + .first(); + + if (!workflow) { + return null; + } + + // Safely parse JSON fields + // Use connections if available, otherwise fall back to edges + const connections = workflow.connections ? safeJsonParse(workflow.connections) : safeJsonParse(workflow.edges); + + return { + ...workflow, + nodes: safeJsonParse(workflow.nodes), + connections: connections + }; +}; + +/** + * Update workflow + * @param {string} id - Workflow ID + * @param {Object} workflowData - Updated workflow data + * @param {string} userId - User ID (for authorization) + * @returns {Object|null} Updated workflow or null if not found + */ +const updateWorkflow = async (id, workflowData, userId) => { + // Check if workflow exists and belongs to user + const existingWorkflow = await getWorkflowById(id, userId); + if (!existingWorkflow) { + return null; + } + + // Safely stringify nodes and connections + const nodesToStore = safeJsonStringify(workflowData.nodes || existingWorkflow.nodes); + const connectionsToStore = safeJsonStringify(workflowData.connections || existingWorkflow.connections); + + // Update workflow + const [workflow] = await db('workflows') + .where({ id, user_id: userId }) + .update({ + name: workflowData.name || existingWorkflow.name, + nodes: nodesToStore, + connections: connectionsToStore, + edges: connectionsToStore, // Also update edges for backward compatibility + updated_at: new Date() + }) + .returning(['id', 'user_id', 'name', 'nodes', 'connections', 'edges', 'created_at', 'updated_at']); + + // Safely parse JSON fields + return { + ...workflow, + nodes: safeJsonParse(workflow.nodes), + connections: safeJsonParse(workflow.connections || workflow.edges) + }; +}; + +/** + * Delete workflow + * @param {string} id - Workflow ID + * @param {string} userId - User ID (for authorization) + * @returns {boolean} True if deleted, false if not found + */ +const deleteWorkflow = async (id, userId) => { + // Check if workflow exists and belongs to user + const existingWorkflow = await getWorkflowById(id, userId); + if (!existingWorkflow) { + return false; + } + + // Delete workflow + await db('workflows') + .where({ id, user_id: userId }) + .delete(); + + return true; +}; + +module.exports = { + createWorkflow, + getWorkflowsByUserId, + getWorkflowById, + updateWorkflow, + deleteWorkflow +}; diff --git a/backend/src/nodes/delay/meta.json b/backend/src/nodes/delay/meta.json new file mode 100644 index 0000000..1416194 --- /dev/null +++ b/backend/src/nodes/delay/meta.json @@ -0,0 +1,18 @@ +{ + "name": "Delay", + "type": "delay", + "icon": "clock", + "description": "Pause workflow execution for a specified time", + "category": "Flow Control", + "version": "1.0.0", + "configSchema": [ + { "key": "duration", "type": "number", "label": "Duration", "required": true, "default": 1000 }, + { "key": "unit", "type": "select", "label": "Unit", "options": ["milliseconds", "seconds", "minutes", "hours"], "default": "milliseconds" } + ], + "inputs": [ + { "key": "input", "label": "Input" } + ], + "outputs": [ + { "key": "output", "label": "Output" } + ] +} diff --git a/backend/src/nodes/delay/runner.js b/backend/src/nodes/delay/runner.js new file mode 100644 index 0000000..0361246 --- /dev/null +++ b/backend/src/nodes/delay/runner.js @@ -0,0 +1,47 @@ +const logger = require('../../utils/logger'); + +/** + * Delay Node Runner + * Pauses workflow execution for a specified time + */ +async function run(nodeConfig, inputData) { + try { + // Calculate delay in milliseconds + let delayMs = nodeConfig.duration || 1000; + + // Convert based on unit + switch (nodeConfig.unit) { + case 'seconds': + delayMs *= 1000; + break; + case 'minutes': + delayMs *= 60 * 1000; + break; + case 'hours': + delayMs *= 60 * 60 * 1000; + break; + default: + // Default is milliseconds, no conversion needed + break; + } + + logger.debug('Delay node executing', { + duration: nodeConfig.duration, + unit: nodeConfig.unit, + delayMs + }); + + // Create a promise that resolves after the delay + await new Promise(resolve => setTimeout(resolve, delayMs)); + + // Pass through the input data to the output + return { + output: inputData.input || {} + }; + } catch (error) { + logger.error('Delay node error', { error: error.message }); + throw error; + } +} + +module.exports = { run }; diff --git a/backend/src/nodes/email/meta.json b/backend/src/nodes/email/meta.json new file mode 100644 index 0000000..998cd70 --- /dev/null +++ b/backend/src/nodes/email/meta.json @@ -0,0 +1,33 @@ +{ + "name": "Email", + "type": "email", + "icon": "envelope", + "description": "Send an email via SMTP", + "category": "Communication", + "version": "1.0.0", + "configSchema": [ + { "key": "to", "type": "text", "label": "To", "required": true }, + { "key": "subject", "type": "text", "label": "Subject", "required": true }, + { "key": "body", "type": "text", "label": "Body", "multiline": true, "required": true }, + { "key": "isHtml", "type": "boolean", "label": "HTML Content", "default": false }, + { "key": "from", "type": "text", "label": "From (optional)" }, + { "key": "cc", "type": "text", "label": "CC" }, + { "key": "bcc", "type": "text", "label": "BCC" }, + { "key": "smtpConfig", "type": "object", "label": "SMTP Configuration", "properties": [ + { "key": "host", "type": "text", "label": "Host", "required": true }, + { "key": "port", "type": "number", "label": "Port", "required": true, "default": 587 }, + { "key": "secure", "type": "boolean", "label": "Use SSL/TLS", "default": false }, + { "key": "auth", "type": "object", "label": "Authentication", "properties": [ + { "key": "user", "type": "text", "label": "Username", "required": true }, + { "key": "pass", "type": "password", "label": "Password", "required": true } + ]} + ]} + ], + "inputs": [ + { "key": "input", "label": "Input" } + ], + "outputs": [ + { "key": "output", "label": "Output" }, + { "key": "error", "label": "Error" } + ] +} diff --git a/backend/src/nodes/email/runner.js b/backend/src/nodes/email/runner.js new file mode 100644 index 0000000..5d41957 --- /dev/null +++ b/backend/src/nodes/email/runner.js @@ -0,0 +1,78 @@ +const nodemailer = require('nodemailer'); +const logger = require('../../utils/logger'); + +/** + * Email Node Runner + * Sends an email using SMTP + */ +async function run(nodeConfig, inputData) { + try { + logger.debug('Email node executing'); + + // Check required configuration + if (!nodeConfig.to || !nodeConfig.subject || !nodeConfig.body) { + throw new Error('Missing required email configuration (to, subject, or body)'); + } + + if (!nodeConfig.smtpConfig || !nodeConfig.smtpConfig.host) { + throw new Error('Missing SMTP configuration'); + } + + // Create transporter + const transporter = nodemailer.createTransport({ + host: nodeConfig.smtpConfig.host, + port: nodeConfig.smtpConfig.port || 587, + secure: nodeConfig.smtpConfig.secure || false, + auth: nodeConfig.smtpConfig.auth ? { + user: nodeConfig.smtpConfig.auth.user, + pass: nodeConfig.smtpConfig.auth.pass + } : undefined + }); + + // Prepare email options + const mailOptions = { + from: nodeConfig.from, + to: nodeConfig.to, + subject: nodeConfig.subject, + cc: nodeConfig.cc, + bcc: nodeConfig.bcc + }; + + // Set email content based on HTML flag + if (nodeConfig.isHtml) { + mailOptions.html = nodeConfig.body; + } else { + mailOptions.text = nodeConfig.body; + } + + // Send email + const info = await transporter.sendMail(mailOptions); + + logger.debug('Email sent successfully', { + messageId: info.messageId, + to: nodeConfig.to + }); + + // Return success response + return { + output: { + success: true, + messageId: info.messageId, + response: info.response, + timestamp: new Date().toISOString() + } + }; + } catch (error) { + logger.error('Email node error', { error: error.message }); + + // Return error through the error output + return { + error: { + message: error.message, + stack: error.stack + } + }; + } +} + +module.exports = { run }; diff --git a/backend/src/nodes/function/meta.json b/backend/src/nodes/function/meta.json new file mode 100644 index 0000000..1f81542 --- /dev/null +++ b/backend/src/nodes/function/meta.json @@ -0,0 +1,19 @@ +{ + "name": "Function", + "type": "function", + "icon": "code", + "description": "Execute custom JavaScript code", + "category": "Advanced", + "version": "1.0.0", + "configSchema": [ + { "key": "code", "type": "code", "language": "javascript", "label": "Code", "required": true, "default": "// Input data is available as the 'input' variable\n// Must return an object with the output data\nreturn { result: input };" }, + { "key": "timeout", "type": "number", "label": "Timeout (ms)", "default": 5000 } + ], + "inputs": [ + { "key": "input", "label": "Input" } + ], + "outputs": [ + { "key": "output", "label": "Output" }, + { "key": "error", "label": "Error" } + ] +} diff --git a/backend/src/nodes/function/runner.js b/backend/src/nodes/function/runner.js new file mode 100644 index 0000000..1d4090c --- /dev/null +++ b/backend/src/nodes/function/runner.js @@ -0,0 +1,81 @@ +const { VM } = require('vm2'); +const logger = require('../../utils/logger'); + +/** + * Function Node Runner + * Executes custom JavaScript code in a sandboxed environment + */ +async function run(nodeConfig, inputData) { + try { + logger.debug('Function node executing'); + + // Check required configuration + if (!nodeConfig.code) { + throw new Error('Missing function code'); + } + + // Set timeout (default: 5000ms) + const timeout = nodeConfig.timeout || 5000; + + // Create a sandboxed VM + const vm = new VM({ + timeout, + sandbox: { + // Provide input data to the sandbox + input: inputData.input || {}, + // Provide console methods that log to our logger + console: { + log: (...args) => logger.info('Function node log:', ...args), + info: (...args) => logger.info('Function node info:', ...args), + warn: (...args) => logger.warn('Function node warn:', ...args), + error: (...args) => logger.error('Function node error:', ...args) + } + }, + // Prevent access to Node.js internal modules + require: { + external: false, + builtin: ['path', 'util', 'buffer'], + root: "./", + mock: { + fs: { + readFileSync: () => 'Not allowed' + } + } + } + }); + + // Wrap the code in an async function + const wrappedCode = ` + (async function() { + ${nodeConfig.code} + })(); + `; + + // Execute the code + const result = await vm.run(wrappedCode); + + // Validate the result + if (result === undefined || result === null) { + return { + output: {} + }; + } + + // Return the result + return { + output: result + }; + } catch (error) { + logger.error('Function node error', { error: error.message }); + + // Return error through the error output + return { + error: { + message: error.message, + stack: error.stack + } + }; + } +} + +module.exports = { run }; diff --git a/backend/src/nodes/http-request/meta.json b/backend/src/nodes/http-request/meta.json new file mode 100644 index 0000000..911c688 --- /dev/null +++ b/backend/src/nodes/http-request/meta.json @@ -0,0 +1,22 @@ +{ + "name": "HTTP Request", + "type": "http-request", + "icon": "globe", + "description": "Send an HTTP request to an external API", + "category": "Network", + "version": "1.0.0", + "configSchema": [ + { "key": "url", "type": "text", "label": "URL", "required": true }, + { "key": "method", "type": "select", "label": "Method", "options": ["GET", "POST", "PUT", "DELETE", "PATCH"], "default": "GET" }, + { "key": "headers", "type": "keyValue", "label": "Headers" }, + { "key": "body", "type": "json", "label": "Body", "showIf": { "method": ["POST", "PUT", "PATCH"] } }, + { "key": "timeout", "type": "number", "label": "Timeout (ms)", "default": 5000 } + ], + "inputs": [ + { "key": "input", "label": "Input" } + ], + "outputs": [ + { "key": "output", "label": "Output" }, + { "key": "error", "label": "Error" } + ] +} diff --git a/backend/src/nodes/http-request/runner.js b/backend/src/nodes/http-request/runner.js new file mode 100644 index 0000000..f499fb3 --- /dev/null +++ b/backend/src/nodes/http-request/runner.js @@ -0,0 +1,81 @@ +const axios = require('axios'); +const logger = require('../../utils/logger'); + +/** + * HTTP Request Node Runner + * Executes an HTTP request based on the node configuration + */ +async function run(nodeConfig, inputData) { + try { + logger.debug('HTTP Request node executing', { + url: nodeConfig.url, + method: nodeConfig.method + }); + + // Build request config + const requestConfig = { + url: nodeConfig.url, + method: nodeConfig.method || 'GET', + timeout: nodeConfig.timeout || 5000 + }; + + // Add headers if provided + if (nodeConfig.headers && Object.keys(nodeConfig.headers).length > 0) { + requestConfig.headers = nodeConfig.headers; + } + + // Add request body for POST, PUT, PATCH methods + if (['POST', 'PUT', 'PATCH'].includes(requestConfig.method) && nodeConfig.body) { + try { + // If body is a string that looks like JSON, parse it + if (typeof nodeConfig.body === 'string') { + requestConfig.data = JSON.parse(nodeConfig.body); + } else { + requestConfig.data = nodeConfig.body; + } + } catch (error) { + // If parsing fails, use as is + requestConfig.data = nodeConfig.body; + } + } + + // Execute the HTTP request + const response = await axios(requestConfig); + + // Return the response data + return { + output: { + status: response.status, + statusText: response.statusText, + headers: response.headers, + data: response.data + } + }; + } catch (error) { + logger.error('HTTP Request node error', { + error: error.message, + url: nodeConfig.url + }); + + // Format axios error response + const errorResponse = { + message: error.message, + code: error.code + }; + + // Add response data if available + if (error.response) { + errorResponse.status = error.response.status; + errorResponse.statusText = error.response.statusText; + errorResponse.headers = error.response.headers; + errorResponse.data = error.response.data; + } + + // Return error through the error output + return { + error: errorResponse + }; + } +} + +module.exports = { run }; diff --git a/backend/src/nodes/logger/meta.json b/backend/src/nodes/logger/meta.json new file mode 100644 index 0000000..cc34def --- /dev/null +++ b/backend/src/nodes/logger/meta.json @@ -0,0 +1,19 @@ +{ + "name": "Logger", + "type": "logger", + "icon": "file-text", + "description": "Log data to the console and workflow logs", + "category": "Utility", + "version": "1.0.0", + "configSchema": [ + { "key": "level", "type": "select", "label": "Log Level", "options": ["debug", "info", "warn", "error"], "default": "info" }, + { "key": "message", "type": "text", "label": "Message", "required": true }, + { "key": "logInputData", "type": "boolean", "label": "Log Input Data", "default": true } + ], + "inputs": [ + { "key": "input", "label": "Input" } + ], + "outputs": [ + { "key": "output", "label": "Output" } + ] +} diff --git a/backend/src/nodes/logger/runner.js b/backend/src/nodes/logger/runner.js new file mode 100644 index 0000000..ee37b56 --- /dev/null +++ b/backend/src/nodes/logger/runner.js @@ -0,0 +1,55 @@ +const logger = require('../../utils/logger'); + +/** + * Logger Node Runner + * Logs data to the console and workflow logs + */ +async function run(nodeConfig, inputData) { + try { + const level = nodeConfig.level || 'info'; + const message = nodeConfig.message || 'Logger node executed'; + const logInputData = nodeConfig.logInputData !== false; // Default to true + + // Prepare log data + const logData = { + message, + timestamp: new Date().toISOString() + }; + + // Add input data if configured + if (logInputData && inputData && inputData.input) { + logData.data = inputData.input; + } + + // Log with the appropriate level + switch (level) { + case 'debug': + logger.debug(message, logData); + break; + case 'warn': + logger.warn(message, logData); + break; + case 'error': + logger.error(message, logData); + break; + case 'info': + default: + logger.info(message, logData); + break; + } + + // Pass through the input data to the output + return { + output: inputData.input || {} + }; + } catch (error) { + logger.error('Logger node error', { error: error.message }); + + // Even if there's an error, try to pass through the input data + return { + output: inputData.input || {} + }; + } +} + +module.exports = { run }; diff --git a/backend/src/nodes/webhook/meta.json b/backend/src/nodes/webhook/meta.json new file mode 100644 index 0000000..8f912c7 --- /dev/null +++ b/backend/src/nodes/webhook/meta.json @@ -0,0 +1,17 @@ +{ + "name": "Webhook", + "type": "webhook", + "icon": "link", + "description": "Trigger a workflow via a webhook endpoint", + "category": "Trigger", + "version": "1.0.0", + "configSchema": [ + { "key": "path", "type": "text", "label": "Path", "required": true, "description": "The webhook path (e.g., /my-webhook)" }, + { "key": "method", "type": "select", "label": "Method", "options": ["GET", "POST", "PUT", "DELETE", "ANY"], "default": "POST" }, + { "key": "description", "type": "text", "label": "Description", "multiline": true } + ], + "inputs": [], + "outputs": [ + { "key": "output", "label": "Output" } + ] +} diff --git a/backend/src/nodes/webhook/runner.js b/backend/src/nodes/webhook/runner.js new file mode 100644 index 0000000..db0e2f2 --- /dev/null +++ b/backend/src/nodes/webhook/runner.js @@ -0,0 +1,50 @@ +const logger = require('../../utils/logger'); + +/** + * Webhook Node Runner + * This node doesn't have a traditional "run" function since it's triggered by HTTP requests. + * Instead, it provides functions to register and handle webhook endpoints. + */ + +/** + * Generate a unique webhook URL for a workflow node + * @param {string} workflowId - The workflow ID + * @param {string} nodeId - The node ID + * @returns {string} The webhook path + */ +function generateWebhookPath(workflowId, nodeId) { + return `/webhook/${workflowId}/${nodeId}`; +} + +/** + * Handle an incoming webhook request + * @param {Object} req - Express request object + * @param {Object} nodeConfig - Node configuration + * @returns {Object} Data to pass to the next node + */ +function handleRequest(req, nodeConfig) { + logger.debug('Webhook node triggered', { + path: req.path, + method: req.method + }); + + // Prepare the output data + const output = { + method: req.method, + path: req.path, + query: req.query || {}, + params: req.params || {}, + headers: req.headers || {}, + body: req.body || {} + }; + + // Return the output data + return { + output + }; +} + +module.exports = { + generateWebhookPath, + handleRequest +}; diff --git a/backend/src/routes/auth.js b/backend/src/routes/auth.js new file mode 100644 index 0000000..d61c282 --- /dev/null +++ b/backend/src/routes/auth.js @@ -0,0 +1,14 @@ +const express = require('express'); +const router = express.Router(); +const { register, login, getProfile, logout } = require('../controllers/auth'); +const { authenticate } = require('../middleware/auth'); + +// Public routes +router.post('/register', register); +router.post('/login', login); +router.post('/logout', logout); + +// Protected routes +router.get('/me', authenticate, getProfile); + +module.exports = router; diff --git a/backend/src/routes/nodes.js b/backend/src/routes/nodes.js new file mode 100644 index 0000000..e337c7d --- /dev/null +++ b/backend/src/routes/nodes.js @@ -0,0 +1,15 @@ +const express = require('express'); +const router = express.Router(); +const { getAllNodeTypes, getNodeType } = require('../controllers/node'); +const { authenticate } = require('../middleware/auth'); + +// All node routes require authentication +router.use(authenticate); + +// Get all node types +router.get('/', getAllNodeTypes); + +// Get specific node type +router.get('/:type', getNodeType); + +module.exports = router; diff --git a/backend/src/routes/users.js b/backend/src/routes/users.js new file mode 100644 index 0000000..1d7c747 --- /dev/null +++ b/backend/src/routes/users.js @@ -0,0 +1,15 @@ +const express = require('express'); +const router = express.Router(); +const { getProfile, updateProfile } = require('../controllers/user'); +const { authenticate } = require('../middleware/auth'); + +// All user routes require authentication +router.use(authenticate); + +// Get current user profile +router.get('/me', getProfile); + +// Update user profile +router.put('/me', updateProfile); + +module.exports = router; diff --git a/backend/src/routes/workflows.js b/backend/src/routes/workflows.js new file mode 100644 index 0000000..5e06425 --- /dev/null +++ b/backend/src/routes/workflows.js @@ -0,0 +1,26 @@ +const express = require('express'); +const router = express.Router(); +const { + create, + getAll, + getById, + update, + remove, + execute +} = require('../controllers/workflow'); +const { authenticate } = require('../middleware/auth'); + +// All workflow routes require authentication +router.use(authenticate); + +// CRUD operations +router.post('/', create); +router.get('/', getAll); +router.get('/:id', getById); +router.put('/:id', update); +router.delete('/:id', remove); + +// Workflow execution +router.post('/:id/execute', execute); + +module.exports = router; diff --git a/backend/src/services/nodeRegistry.js b/backend/src/services/nodeRegistry.js new file mode 100644 index 0000000..884051d --- /dev/null +++ b/backend/src/services/nodeRegistry.js @@ -0,0 +1,116 @@ +const fs = require('fs'); +const path = require('path'); +const logger = require('../utils/logger'); + +class NodeRegistry { + constructor() { + this.nodes = new Map(); + this.nodesPath = path.join(__dirname, '../nodes'); + } + + /** + * Initialize the node registry by loading all available nodes + */ + async initialize() { + try { + logger.info('Initializing node registry'); + + // Get all node type directories + const nodeTypes = fs.readdirSync(this.nodesPath, { withFileTypes: true }) + .filter(dirent => dirent.isDirectory()) + .map(dirent => dirent.name); + + // Load each node type + for (const nodeType of nodeTypes) { + await this.loadNodeType(nodeType); + } + + logger.info(`Node registry initialized with ${this.nodes.size} node types`); + return true; + } catch (error) { + logger.error('Failed to initialize node registry', { error: error.message }); + return false; + } + } + + /** + * Load a specific node type + * @param {string} nodeType - The node type to load + */ + async loadNodeType(nodeType) { + try { + const nodePath = path.join(this.nodesPath, nodeType); + + // Load meta.json + const metaPath = path.join(nodePath, 'meta.json'); + if (!fs.existsSync(metaPath)) { + logger.warn(`Node type ${nodeType} missing meta.json, skipping`); + return; + } + + const meta = JSON.parse(fs.readFileSync(metaPath, 'utf8')); + + // Load runner.js + const runnerPath = path.join(nodePath, 'runner.js'); + if (!fs.existsSync(runnerPath)) { + logger.warn(`Node type ${nodeType} missing runner.js, skipping`); + return; + } + + const runner = require(runnerPath); + + // Register the node type + this.nodes.set(nodeType, { + meta, + runner + }); + + logger.debug(`Loaded node type: ${nodeType}`); + } catch (error) { + logger.error(`Failed to load node type: ${nodeType}`, { error: error.message }); + } + } + + /** + * Get all registered node types + * @returns {Array} Array of node type metadata + */ + getAllNodeTypes() { + return Array.from(this.nodes.values()).map(node => node.meta); + } + + /** + * Get a specific node type by type + * @param {string} nodeType - The node type to get + * @returns {Object|null} Node type or null if not found + */ + getNodeType(nodeType) { + return this.nodes.get(nodeType) || null; + } + + /** + * Execute a node + * @param {string} nodeType - The node type to execute + * @param {Object} nodeConfig - Node configuration + * @param {Object} inputData - Input data for the node + * @returns {Promise} Node execution result + */ + async executeNode(nodeType, nodeConfig, inputData) { + const node = this.getNodeType(nodeType); + + if (!node) { + throw new Error(`Node type not found: ${nodeType}`); + } + + if (!node.runner.run) { + throw new Error(`Node type ${nodeType} does not have a run method`); + } + + return await node.runner.run(nodeConfig, inputData); + } +} + +// Create and export a singleton instance +const nodeRegistry = new NodeRegistry(); + +module.exports = nodeRegistry; diff --git a/backend/src/services/workflowExecutor.js b/backend/src/services/workflowExecutor.js new file mode 100644 index 0000000..3b16dd7 --- /dev/null +++ b/backend/src/services/workflowExecutor.js @@ -0,0 +1,306 @@ +const Bull = require('bull'); +const { v4: uuidv4 } = require('uuid'); +const { db } = require('../config/db'); +const nodeRegistry = require('./nodeRegistry'); +const logger = require('../utils/logger'); + +class WorkflowExecutor { + constructor() { + // Initialize the workflow queue + this.workflowQueue = new Bull('workflow-execution', process.env.REDIS_URL); + + // Initialize the execution logs map + this.executionLogs = new Map(); + + // Set up queue processing + this.setupQueueProcessor(); + } + + /** + * Set up the queue processor + */ + setupQueueProcessor() { + this.workflowQueue.process(async (job) => { + const { workflowId, executionId, triggerNodeId, triggerData } = job.data; + return await this.processWorkflow(workflowId, executionId, triggerNodeId, triggerData); + }); + + // Handle completed jobs + this.workflowQueue.on('completed', (job, result) => { + logger.info(`Workflow execution completed: ${job.data.executionId}`, { + workflowId: job.data.workflowId, + success: result.success + }); + }); + + // Handle failed jobs + this.workflowQueue.on('failed', (job, error) => { + logger.error(`Workflow execution failed: ${job.data.executionId}`, { + workflowId: job.data.workflowId, + error: error.message + }); + }); + } + + /** + * Execute a workflow + * @param {string} workflowId - The workflow ID + * @param {string} triggerNodeId - The ID of the trigger node + * @param {Object} triggerData - Data from the trigger + * @returns {Object} Execution details + */ + async executeWorkflow(workflowId, triggerNodeId, triggerData) { + // Generate a unique execution ID + const executionId = uuidv4(); + + // Initialize execution logs + this.executionLogs.set(executionId, []); + + // Add the job to the queue + await this.workflowQueue.add({ + workflowId, + executionId, + triggerNodeId, + triggerData + }); + + logger.info(`Workflow execution queued: ${executionId}`, { workflowId }); + + return { + executionId, + status: 'queued', + timestamp: new Date().toISOString() + }; + } + + /** + * Process a workflow execution + * @param {string} workflowId - The workflow ID + * @param {string} executionId - The execution ID + * @param {string} triggerNodeId - The ID of the trigger node + * @param {Object} triggerData - Data from the trigger + * @returns {Object} Execution result + */ + async processWorkflow(workflowId, executionId, triggerNodeId, triggerData) { + try { + // Log execution start + this.logExecution(executionId, 'info', 'Workflow execution started', { + workflowId, + triggerNodeId + }); + + // Get the workflow from the database + const workflow = await db('workflows') + .where({ id: workflowId }) + .first(); + + if (!workflow) { + throw new Error(`Workflow not found: ${workflowId}`); + } + + // Parse JSON fields + const nodes = JSON.parse(workflow.nodes); + const connections = JSON.parse(workflow.connections); + + // Find the trigger node + const triggerNode = nodes.find(node => node.id === triggerNodeId); + if (!triggerNode) { + throw new Error(`Trigger node not found: ${triggerNodeId}`); + } + + // Execute the workflow starting from the trigger node + const result = await this.executeNode( + executionId, + triggerNode, + nodes, + connections, + triggerData + ); + + // Log execution completion + this.logExecution(executionId, 'info', 'Workflow execution completed', { + workflowId, + success: true + }); + + // Store execution logs in the database + await this.saveExecutionLogs(workflowId, executionId); + + return { + success: true, + executionId, + result + }; + } catch (error) { + // Log execution error + this.logExecution(executionId, 'error', 'Workflow execution failed', { + workflowId, + error: error.message + }); + + // Store execution logs in the database + await this.saveExecutionLogs(workflowId, executionId); + + return { + success: false, + executionId, + error: error.message + }; + } + } + + /** + * Execute a node and follow connections to next nodes + * @param {string} executionId - The execution ID + * @param {Object} node - The node to execute + * @param {Array} allNodes - All nodes in the workflow + * @param {Array} connections - All connections in the workflow + * @param {Object} inputData - Input data for the node + * @returns {Object} Node execution results + */ + async executeNode(executionId, node, allNodes, connections, inputData) { + try { + // Log node execution start + this.logExecution(executionId, 'debug', `Executing node: ${node.id}`, { + nodeType: node.type, + nodeId: node.id + }); + + // Execute the node + const nodeResult = await nodeRegistry.executeNode( + node.type, + node.config || {}, + { input: inputData } + ); + + // Log node execution result + this.logExecution(executionId, 'debug', `Node execution result: ${node.id}`, { + nodeId: node.id, + outputs: Object.keys(nodeResult) + }); + + // Find connections from this node + const nodeConnections = connections.filter(conn => conn.from === node.id); + + // Execute connected nodes + const nextResults = {}; + + for (const conn of nodeConnections) { + const nextNode = allNodes.find(n => n.id === conn.to); + + if (nextNode) { + // Get the output data from the current node + const outputData = nodeResult.output || {}; + + // Execute the next node + nextResults[nextNode.id] = await this.executeNode( + executionId, + nextNode, + allNodes, + connections, + outputData + ); + } + } + + // Return combined results + return { + nodeId: node.id, + nodeType: node.type, + result: nodeResult, + nextNodes: nextResults + }; + } catch (error) { + // Log node execution error + this.logExecution(executionId, 'error', `Node execution error: ${node.id}`, { + nodeId: node.id, + error: error.message + }); + + throw error; + } + } + + /** + * Log an execution event + * @param {string} executionId - The execution ID + * @param {string} level - Log level (debug, info, warn, error) + * @param {string} message - Log message + * @param {Object} data - Additional log data + */ + logExecution(executionId, level, message, data = {}) { + // Create log entry + const logEntry = { + timestamp: new Date().toISOString(), + level, + message, + data + }; + + // Add to execution logs + if (this.executionLogs.has(executionId)) { + this.executionLogs.get(executionId).push(logEntry); + } else { + this.executionLogs.set(executionId, [logEntry]); + } + + // Also log to system logger + logger[level](message, { executionId, ...data }); + } + + /** + * Save execution logs to the database + * @param {string} workflowId - The workflow ID + * @param {string} executionId - The execution ID + */ + async saveExecutionLogs(workflowId, executionId) { + try { + // Get logs for this execution + const logs = this.executionLogs.get(executionId) || []; + + // Insert into database + await db('workflow_logs').insert({ + id: executionId, + workflow_id: workflowId, + logs: JSON.stringify(logs), + created_at: new Date() + }); + + // Clear logs from memory + this.executionLogs.delete(executionId); + } catch (error) { + logger.error('Failed to save execution logs', { + executionId, + error: error.message + }); + } + } + + /** + * Get execution logs + * @param {string} executionId - The execution ID + * @returns {Array} Execution logs + */ + async getExecutionLogs(executionId) { + // Check if logs are in memory + if (this.executionLogs.has(executionId)) { + return this.executionLogs.get(executionId); + } + + // Get logs from database + const logRecord = await db('workflow_logs') + .where({ id: executionId }) + .first(); + + if (!logRecord) { + return []; + } + + return JSON.parse(logRecord.logs); + } +} + +// Create and export a singleton instance +const workflowExecutor = new WorkflowExecutor(); + +module.exports = workflowExecutor; diff --git a/backend/src/utils/logger.js b/backend/src/utils/logger.js new file mode 100644 index 0000000..298a8fd --- /dev/null +++ b/backend/src/utils/logger.js @@ -0,0 +1,34 @@ +const winston = require('winston'); + +// Define log format +const logFormat = winston.format.combine( + winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), + winston.format.errors({ stack: true }), + winston.format.splat(), + winston.format.json() +); + +// Create the logger instance +const logger = winston.createLogger({ + level: process.env.NODE_ENV === 'production' ? 'info' : 'debug', + format: logFormat, + defaultMeta: { service: 'flowforge-backend' }, + transports: [ + // Write all logs with level 'error' and below to error.log + new winston.transports.File({ filename: 'logs/error.log', level: 'error' }), + // Write all logs to combined.log + new winston.transports.File({ filename: 'logs/combined.log' }), + ], +}); + +// If we're not in production, also log to the console +if (process.env.NODE_ENV !== 'production') { + logger.add(new winston.transports.Console({ + format: winston.format.combine( + winston.format.colorize(), + winston.format.simple() + ), + })); +} + +module.exports = logger; diff --git a/backend/test-bcrypt.js b/backend/test-bcrypt.js new file mode 100644 index 0000000..e7a1894 --- /dev/null +++ b/backend/test-bcrypt.js @@ -0,0 +1,24 @@ +const bcrypt = require('bcrypt'); + +async function testBcrypt() { + const password = 'FlowForge123!'; + + // Generate a new hash + const salt = await bcrypt.genSalt(10); + const hash = await bcrypt.hash(password, salt); + + console.log('Generated hash:', hash); + console.log('Hash length:', hash.length); + + // Test the hash we're using in the database + const dbHash = '$2b$10$3euPcmQFCiblsZeEu5s7p.9wVdLajnYhAbcjkru4KkUGBIm3WVYjK'; + + // Compare the password with both hashes + const isValidNew = await bcrypt.compare(password, hash); + const isValidDB = await bcrypt.compare(password, dbHash); + + console.log('Is valid with new hash:', isValidNew); + console.log('Is valid with DB hash:', isValidDB); +} + +testBcrypt().catch(console.error); diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 0000000..be82f74 --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,68 @@ +version: '3.8' + +services: + frontend: + build: + context: ./frontend + dockerfile: Dockerfile.dev + volumes: + - ./frontend:/app + - /app/node_modules + ports: + - "3000:3000" + environment: + - NODE_ENV=development + - REACT_APP_API_URL=http://localhost:4000 + networks: + - flowforge-network + + backend: + build: + context: ./backend + dockerfile: Dockerfile.dev + volumes: + - ./backend:/app + - /app/node_modules + ports: + - "4000:4000" + environment: + - NODE_ENV=development + - PORT=4000 + - DATABASE_URL=postgres://postgres:postgres@postgres:5432/flowforge + - REDIS_URL=redis://redis:6379 + - JWT_SECRET=dev_secret_change_in_production + depends_on: + - postgres + - redis + networks: + - flowforge-network + + postgres: + image: postgres:14-alpine + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - POSTGRES_DB=flowforge + ports: + - "5432:5432" + volumes: + - postgres-dev-data:/var/lib/postgresql/data + networks: + - flowforge-network + + redis: + image: redis:7-alpine + ports: + - "6379:6379" + volumes: + - redis-dev-data:/data + networks: + - flowforge-network + +networks: + flowforge-network: + driver: bridge + +volumes: + postgres-dev-data: + redis-dev-data: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..03722a3 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,73 @@ +version: '3.8' + +services: + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + restart: unless-stopped + depends_on: + - backend + networks: + - flowforge-network + + backend: + build: + context: ./backend + dockerfile: Dockerfile + restart: unless-stopped + env_file: + - .env + depends_on: + - postgres + - redis + networks: + - flowforge-network + + postgres: + image: postgres:14-alpine + restart: unless-stopped + env_file: + - .env + volumes: + - postgres-data:/var/lib/postgresql/data + networks: + - flowforge-network + + redis: + image: redis:7-alpine + restart: unless-stopped + volumes: + - redis-data:/data + networks: + - flowforge-network + + nginx: + image: nginx:alpine + restart: unless-stopped + ports: + - "80:80" + - "443:443" + volumes: + - ./nginx/conf:/etc/nginx/conf.d + - ./nginx/certbot/conf:/etc/letsencrypt + - ./nginx/certbot/www:/var/www/certbot + depends_on: + - frontend + - backend + networks: + - flowforge-network + + certbot: + image: certbot/certbot + volumes: + - ./nginx/certbot/conf:/etc/letsencrypt + - ./nginx/certbot/www:/var/www/certbot + +networks: + flowforge-network: + driver: bridge + +volumes: + postgres-data: + redis-data: diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..9f62f7b --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,29 @@ +# Build stage +FROM node:18-alpine as build + +WORKDIR /app + +# Copy package files and install dependencies +COPY package*.json ./ +RUN npm ci + +# Copy source code +COPY . . + +# Build the application +RUN npm run build + +# Production stage +FROM nginx:alpine + +# Copy built assets from the build stage +COPY --from=build /app/build /usr/share/nginx/html + +# Copy nginx configuration +COPY nginx/nginx.conf /etc/nginx/conf.d/default.conf + +# Expose port 80 +EXPOSE 80 + +# Start nginx +CMD ["nginx", "-g", "daemon off;"] diff --git a/frontend/Dockerfile.dev b/frontend/Dockerfile.dev new file mode 100644 index 0000000..6832d56 --- /dev/null +++ b/frontend/Dockerfile.dev @@ -0,0 +1,18 @@ +FROM node:18-alpine + +WORKDIR /app + +# Copy package files +COPY package*.json ./ + +# Install dependencies +RUN npm install + +# Copy source code (this will be overridden by volume mount in dev) +COPY . . + +# Expose port +EXPOSE 3000 + +# Start development server with hot reloading +CMD ["npm", "start"] diff --git a/frontend/integration-guide.md b/frontend/integration-guide.md new file mode 100644 index 0000000..73a718a --- /dev/null +++ b/frontend/integration-guide.md @@ -0,0 +1,203 @@ +# FlowForge Frontend Integration Guide + +This guide provides instructions for integrating the new components we've created into the existing WorkflowEditor.js file. + +## Step 1: Import New Components + +Add these imports at the top of your `WorkflowEditor.js` file: + +```javascript +import Modal from '../components/common/Modal'; +import WorkflowEditorTabs from '../components/workflow/WorkflowEditorTabs'; +import WorkflowEditorActions from '../components/workflow/WorkflowEditorActions'; +import NodeTester from '../components/workflow/NodeTester'; +import ExecutionResults from '../components/execution/ExecutionResults'; +``` + +## Step 2: Add New State Variables + +Add these state variables inside your WorkflowEditor component: + +```javascript +const [showNodeTester, setShowNodeTester] = useState(false); +const [showExecutionResults, setShowExecutionResults] = useState(false); +const [latestExecutionId, setLatestExecutionId] = useState(null); +const [currentVersion, setCurrentVersion] = useState(1); +const [showTabs, setShowTabs] = useState(true); +``` + +## Step 3: Add New Methods + +Add these methods inside your WorkflowEditor component: + +```javascript +// Duplicate a node +const handleDuplicateNode = (node) => { + const newNode = { + ...node, + id: `${node.id}-copy-${Date.now()}`, + position: { + x: node.position.x + 50, + y: node.position.y + 50 + } + }; + + setNodes((nds) => nds.concat(newNode)); +}; + +// Delete a node +const handleDeleteNode = (node) => { + setNodes((nds) => nds.filter((n) => n.id !== node.id)); + setEdges((eds) => eds.filter((e) => e.source !== node.id && e.target !== node.id)); + setSelectedNode(null); +}; + +// Handle workflow execution +const handleExecuteWorkflow = (executionId) => { + setLatestExecutionId(executionId); +}; + +// Handle version restoration +const handleRestoreVersion = async (version) => { + try { + setIsLoading(true); + const response = await api.get(`/api/workflows/${id}/versions/${version}`); + const workflowData = response.data.workflow; + + // Update workflow data + setWorkflow({ + ...workflow, + name: workflowData.name, + description: workflowData.description + }); + + // Convert backend nodes/connections to React Flow format + const flowNodes = workflowData.nodes.map(node => ({ + id: node.id, + type: 'customNode', + position: { x: node.position_x, y: node.position_y }, + data: { + label: node.name, + nodeType: node.type, + config: node.config || {} + } + })); + + const flowEdges = workflowData.connections.map(conn => ({ + id: conn.id, + source: conn.source_node_id, + target: conn.target_node_id, + sourceHandle: conn.source_handle, + targetHandle: conn.target_handle + })); + + setNodes(flowNodes); + setEdges(flowEdges); + setCurrentVersion(version); + toast.success(`Restored workflow to version ${version}`); + } catch (error) { + console.error('Error restoring version:', error); + toast.error('Failed to restore workflow version'); + } finally { + setIsLoading(false); + } +}; +``` + +## Step 4: Update the Return Statement + +Replace the existing buttons in the workflow header with the WorkflowEditorActions component: + +```jsx +
+ + + +
+``` + +## Step 5: Add the Tabs Component + +Add the WorkflowEditorTabs component at the bottom of your component, just before the closing div of the main container: + +```jsx +{/* Workflow Tabs */} +{id && id !== 'new' && showTabs && ( +
+ +
+)} +``` + +## Step 6: Add Toggle Button for Tabs + +Add a button to toggle the tabs visibility in the workflow header: + +```jsx + +``` + +## Step 7: Fetch Current Version on Load + +Update the fetchData method to get the current version: + +```javascript +// If editing existing workflow, fetch it +if (id && id !== 'new') { + const workflowResponse = await api.get(`/api/workflows/${id}`); + const workflowData = workflowResponse.data.workflow; + + setWorkflow({ + id: workflowData.id, + name: workflowData.name, + description: workflowData.description + }); + + // Set current version + setCurrentVersion(workflowData.version || 1); + + // Rest of the existing code... +} +``` + +## Testing Your Integration + +After making these changes, you should be able to: + +1. Test individual nodes with the NodeTester component +2. View execution history in the tabs +3. Schedule workflows with the CronScheduler component +4. Manage workflow versions with the VersionHistory component +5. View and copy webhook URLs with the WebhookManager component + +If you encounter any issues, check the browser console for errors and verify that all the components are properly imported and integrated. diff --git a/frontend/nginx/nginx.conf b/frontend/nginx/nginx.conf new file mode 100644 index 0000000..10ad3f0 --- /dev/null +++ b/frontend/nginx/nginx.conf @@ -0,0 +1,45 @@ +server { + listen 80; + server_name localhost; + + # Root directory for static files + root /usr/share/nginx/html; + index index.html; + + # Compression settings + gzip on; + gzip_vary on; + gzip_min_length 10240; + gzip_proxied expired no-cache no-store private auth; + gzip_types text/plain text/css text/xml text/javascript application/javascript application/x-javascript application/xml; + gzip_disable "MSIE [1-6]\."; + + # Security headers + add_header X-Frame-Options "SAMEORIGIN"; + add_header X-XSS-Protection "1; mode=block"; + add_header X-Content-Type-Options "nosniff"; + + # Handle React Router paths + location / { + try_files $uri $uri/ /index.html; + } + + # Cache static assets + location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ { + expires 30d; + add_header Cache-Control "public, no-transform"; + } + + # API proxy + location /api/ { + proxy_pass http://backend:4000; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..5182b05 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,46 @@ +{ + "name": "flowforge-frontend", + "version": "1.0.0", + "private": true, + "dependencies": { + "@headlessui/react": "^1.7.15", + "@heroicons/react": "^2.0.18", + "axios": "^1.4.0", + "jwt-decode": "^3.1.2", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "reactflow": "^11.7.0", + "react-router-dom": "^6.14.1", + "react-scripts": "5.0.1", + "react-toastify": "^9.1.3", + "tailwindcss": "^3.3.2" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "eslintConfig": { + "extends": [ + "react-app", + "react-app/jest" + ] + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + }, + "devDependencies": { + "autoprefixer": "^10.4.14", + "postcss": "^8.4.24" + } +} diff --git a/frontend/public/index.html b/frontend/public/index.html new file mode 100644 index 0000000..e3c33d9 --- /dev/null +++ b/frontend/public/index.html @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + FlowForge + + + +
+ + diff --git a/frontend/public/manifest.json b/frontend/public/manifest.json new file mode 100644 index 0000000..df0c015 --- /dev/null +++ b/frontend/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "FlowForge", + "name": "FlowForge Automation Platform", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#0ea5e9", + "background_color": "#f9fafb" +} diff --git a/frontend/src/App.js b/frontend/src/App.js new file mode 100644 index 0000000..f459c1f --- /dev/null +++ b/frontend/src/App.js @@ -0,0 +1,67 @@ +import React from 'react'; +import { Routes, Route, Navigate } from 'react-router-dom'; +import { useAuth } from './hooks/useAuth'; + +// Layouts +import MainLayout from './components/layouts/MainLayout'; +import AuthLayout from './components/layouts/AuthLayout'; + +// Pages +import Dashboard from './pages/Dashboard'; +import WorkflowEditor from './pages/WorkflowEditor'; +import WorkflowList from './pages/WorkflowList'; +import Login from './pages/Login'; +import Register from './pages/Register'; +import NotFound from './pages/NotFound'; +import Profile from './pages/Profile'; +import TestPage from './pages/TestPage'; +import TemplatesPage from './pages/TemplatesPage'; + +// Protected route component +const ProtectedRoute = ({ children }) => { + const { isAuthenticated, isLoading } = useAuth(); + + if (isLoading) { + return
Loading...
; + } + + if (!isAuthenticated) { + return ; + } + + return children; +}; + +function App() { + return ( + + {/* Auth routes */} + }> + } /> + } /> + + + {/* Protected routes */} + + + + }> + } /> + } /> + } /> + } /> + } /> + } /> + + + {/* Test route - accessible without authentication */} + } /> + + {/* 404 route */} + } /> + + ); +} + +export default App; diff --git a/frontend/src/components/common/Modal.js b/frontend/src/components/common/Modal.js new file mode 100644 index 0000000..869d9e0 --- /dev/null +++ b/frontend/src/components/common/Modal.js @@ -0,0 +1,92 @@ +import React, { Fragment } from 'react'; +import { Dialog, Transition } from '@headlessui/react'; +import { XMarkIcon } from '@heroicons/react/24/outline'; + +const Modal = ({ + isOpen, + onClose, + title, + children, + size = 'md', + showCloseButton = true, + footer = null +}) => { + // Size classes + const sizeClasses = { + sm: 'sm:max-w-lg', + md: 'sm:max-w-xl', + lg: 'sm:max-w-3xl', + xl: 'sm:max-w-5xl', + full: 'sm:max-w-full sm:h-screen' + }; + + return ( + + +
+ + + + + {/* This element is to trick the browser into centering the modal contents. */} + + + +
+ {/* Header */} + {title && ( +
+ + {title} + + {showCloseButton && ( + + )} +
+ )} + + {/* Body */} +
+ {children} +
+ + {/* Footer */} + {footer && ( +
+ {footer} +
+ )} +
+
+
+
+
+ ); +}; + +export default Modal; diff --git a/frontend/src/components/execution/ExecutionHistory.js b/frontend/src/components/execution/ExecutionHistory.js new file mode 100644 index 0000000..92a8bf3 --- /dev/null +++ b/frontend/src/components/execution/ExecutionHistory.js @@ -0,0 +1,304 @@ +import React, { useState, useEffect } from 'react'; +import { toast } from 'react-toastify'; +import workflowService from '../../services/workflow'; +import Modal from '../common/Modal'; +import ExecutionResults from './ExecutionResults'; +import { + ClockIcon, + CheckCircleIcon, + ExclamationCircleIcon, + ArrowPathIcon, + EyeIcon +} from '@heroicons/react/24/outline'; + +const ExecutionHistory = ({ workflowId }) => { + const [executions, setExecutions] = useState([]); + const [loading, setLoading] = useState(true); + const [selectedExecution, setSelectedExecution] = useState(null); + const [showExecutionModal, setShowExecutionModal] = useState(false); + const [pagination, setPagination] = useState({ + page: 1, + limit: 10, + total: 0 + }); + + // Fetch execution history + useEffect(() => { + const fetchExecutionHistory = async () => { + try { + setLoading(true); + const response = await workflowService.getExecutionHistory(workflowId, { + limit: pagination.limit, + offset: (pagination.page - 1) * pagination.limit + }).catch(err => { + // If executions not found, handle gracefully (new workflow) + if (err.response && err.response.status === 404) { + console.log('No execution history found - this is likely a new workflow'); + return { data: { executions: [], total: 0 } }; + } + throw err; + }); + + setExecutions(response.data.executions || []); + setPagination(prev => ({ + ...prev, + total: response.data.total || 0 + })); + } catch (error) { + console.error('Error fetching execution history:', error); + // Don't show error toast for 404s as they're expected for new workflows + if (!error.response || error.response.status !== 404) { + toast.error('Failed to load execution history'); + } + } finally { + setLoading(false); + } + }; + + if (workflowId) { + fetchExecutionHistory(); + } + }, [workflowId, pagination.page, pagination.limit]); + + // Format timestamp + const formatTimestamp = (timestamp) => { + if (!timestamp) return ''; + const date = new Date(timestamp); + return date.toLocaleString(); + }; + + // Format duration + const formatDuration = (startTime, endTime) => { + if (!startTime || !endTime) return ''; + const start = new Date(startTime); + const end = new Date(endTime); + const durationMs = end - start; + + if (durationMs < 1000) { + return `${durationMs}ms`; + } else if (durationMs < 60000) { + return `${(durationMs / 1000).toFixed(2)}s`; + } else { + const minutes = Math.floor(durationMs / 60000); + const seconds = ((durationMs % 60000) / 1000).toFixed(0); + return `${minutes}m ${seconds}s`; + } + }; + + // Get status icon + const getStatusIcon = (status) => { + switch (status) { + case 'success': + return ; + case 'error': + return ; + case 'running': + return ; + default: + return ; + } + }; + + // View execution details + const viewExecution = (execution) => { + setSelectedExecution(execution); + setShowExecutionModal(true); + }; + + // Handle page change + const handlePageChange = (newPage) => { + if (newPage > 0 && newPage <= Math.ceil(pagination.total / pagination.limit)) { + setPagination(prev => ({ + ...prev, + page: newPage + })); + } + }; + + return ( +
+
+

+ Execution History +

+

+ View past executions of this workflow +

+
+ + {loading ? ( +
+
+ {[...Array(3)].map((_, i) => ( +
+ ))} +
+
+ ) : executions.length > 0 ? ( + <> +
    + {executions.map((execution) => ( +
  • +
    +
    +
    + {getStatusIcon(execution.status)} +
    +
    +
    + Execution #{execution.id} +
    +
    + Started: {formatTimestamp(execution.started_at)} +
    + {execution.completed_at && ( +
    + Duration: {formatDuration(execution.started_at, execution.completed_at)} +
    + )} +
    +
    +
    + +
    +
    +
  • + ))} +
+ + {/* Pagination */} + {pagination.total > pagination.limit && ( +
+
+ + +
+
+
+

+ Showing {(pagination.page - 1) * pagination.limit + 1} to{' '} + + {Math.min(pagination.page * pagination.limit, pagination.total)} + {' '} + of {pagination.total} results +

+
+
+ +
+
+
+ )} + + ) : ( +
+ +

No executions yet

+

+ This workflow hasn't been executed yet. +

+
+ )} + + {/* Execution details modal */} + {selectedExecution && ( + setShowExecutionModal(false)} + size="lg" + > + setShowExecutionModal(false)} + /> + + )} +
+ ); +}; + +export default ExecutionHistory; diff --git a/frontend/src/components/execution/ExecutionResults.js b/frontend/src/components/execution/ExecutionResults.js new file mode 100644 index 0000000..e9eb36e --- /dev/null +++ b/frontend/src/components/execution/ExecutionResults.js @@ -0,0 +1,262 @@ +import React, { useState, useEffect } from 'react'; +import { toast } from 'react-toastify'; +import api from '../../services/api'; +import { XMarkIcon, ChevronDownIcon, ChevronUpIcon } from '@heroicons/react/24/outline'; + +const ExecutionResults = ({ workflowId, executionId, onClose }) => { + const [execution, setExecution] = useState(null); + const [logs, setLogs] = useState([]); + const [loading, setLoading] = useState(true); + const [expandedNodes, setExpandedNodes] = useState({}); + + useEffect(() => { + const fetchExecutionResults = async () => { + try { + setLoading(true); + + // Fetch execution details + const executionResponse = await api.get(`/api/workflows/${workflowId}/executions/${executionId}`) + .catch(err => { + // If execution not found, handle gracefully + if (err.response && err.response.status === 404) { + console.log('Execution not found or still initializing'); + return { data: { execution: { status: 'initializing' } } }; + } + throw err; + }); + + setExecution(executionResponse.data.execution); + + // Only fetch logs if execution exists and is not initializing + if (executionResponse.data.execution && executionResponse.data.execution.status !== 'initializing') { + const logsResponse = await api.get(`/api/workflows/${workflowId}/executions/${executionId}/logs`) + .catch(err => { + // If logs not found, handle gracefully + if (err.response && err.response.status === 404) { + return { data: { logs: [] } }; + } + throw err; + }); + + setLogs(logsResponse.data.logs || []); + } + } catch (error) { + console.error('Error fetching execution results:', error); + toast.error('Failed to load execution results'); + } finally { + setLoading(false); + } + }; + + if (workflowId && executionId) { + fetchExecutionResults(); + } + }, [workflowId, executionId]); + + // Toggle expanded state for a node + const toggleNodeExpanded = (nodeId) => { + setExpandedNodes(prev => ({ + ...prev, + [nodeId]: !prev[nodeId] + })); + }; + + // Format timestamp + const formatTimestamp = (timestamp) => { + if (!timestamp) return ''; + const date = new Date(timestamp); + return date.toLocaleString(); + }; + + // Format duration + const formatDuration = (startTime, endTime) => { + if (!startTime || !endTime) return ''; + const start = new Date(startTime); + const end = new Date(endTime); + const durationMs = end - start; + + if (durationMs < 1000) { + return `${durationMs}ms`; + } else if (durationMs < 60000) { + return `${(durationMs / 1000).toFixed(2)}s`; + } else { + const minutes = Math.floor(durationMs / 60000); + const seconds = ((durationMs % 60000) / 1000).toFixed(0); + return `${minutes}m ${seconds}s`; + } + }; + + // Get status badge + const getStatusBadge = (status) => { + switch (status) { + case 'success': + return ( + + Success + + ); + case 'error': + return ( + + Error + + ); + case 'running': + return ( + + Running + + ); + case 'initializing': + return ( + + Initializing + + ); + default: + return ( + + {status} + + ); + } + }; + + return ( +
+ {/* Header */} +
+

+ Execution Results +

+ +
+ + {/* Content */} +
+ {loading ? ( +
+
+
+ ) : ( +
+ {/* Execution summary */} + {execution && ( +
+
+
+
Status
+
{getStatusBadge(execution.status)}
+
+
+
Started At
+
{formatTimestamp(execution.started_at)}
+
+
+
Completed At
+
{formatTimestamp(execution.completed_at)}
+
+
+
Duration
+
+ {formatDuration(execution.started_at, execution.completed_at)} +
+
+ {execution.error && ( +
+
Error
+
+ {execution.error} +
+
+ )} +
+
+ )} + + {/* Node execution logs */} +
+

Node Execution Logs

+ + {logs.length > 0 ? ( +
+ {logs.map((log) => ( +
+
toggleNodeExpanded(log.id)} + > +
+
{log.node_name}
+
({log.node_type})
+
{getStatusBadge(log.status)}
+
+
+
+ {formatDuration(log.started_at, log.completed_at)} +
+ {expandedNodes[log.id] ? ( + + ) : ( + + )} +
+
+ + {expandedNodes[log.id] && ( +
+
+
+
Input
+
+                                {JSON.stringify(log.input_data, null, 2)}
+                              
+
+
+
Output
+
+                                {JSON.stringify(log.output_data, null, 2)}
+                              
+
+
+ + {log.error && ( +
+
Error
+
+                                {log.error}
+                              
+
+ )} +
+ )} +
+ ))} +
+ ) : ( +

No execution logs available

+ )} +
+
+ )} +
+ + {/* Footer */} +
+ +
+
+ ); +}; + +export default ExecutionResults; diff --git a/frontend/src/components/layouts/AuthLayout.js b/frontend/src/components/layouts/AuthLayout.js new file mode 100644 index 0000000..c40a332 --- /dev/null +++ b/frontend/src/components/layouts/AuthLayout.js @@ -0,0 +1,37 @@ +import React from 'react'; +import { Outlet, Navigate } from 'react-router-dom'; +import { useAuth } from '../../hooks/useAuth'; + +const AuthLayout = () => { + const { isAuthenticated, isLoading } = useAuth(); + + // Show loading indicator while checking authentication + if (isLoading) { + return ( +
+
+
+ ); + } + + // Redirect to dashboard if already authenticated + if (isAuthenticated) { + return ; + } + + return ( +
+
+
+

FlowForge

+

+ Your personal automation platform +

+
+ +
+
+ ); +}; + +export default AuthLayout; diff --git a/frontend/src/components/layouts/MainLayout.js b/frontend/src/components/layouts/MainLayout.js new file mode 100644 index 0000000..c24da7e --- /dev/null +++ b/frontend/src/components/layouts/MainLayout.js @@ -0,0 +1,187 @@ +import React, { useState } from 'react'; +import { Outlet, Link, useLocation, useNavigate } from 'react-router-dom'; +import { useAuth } from '../../hooks/useAuth'; + +// Icons +import { + Bars3Icon, + XMarkIcon, + HomeIcon, + ArrowPathIcon, + DocumentDuplicateIcon, + UserIcon, + ArrowRightOnRectangleIcon +} from '@heroicons/react/24/outline'; + +const MainLayout = () => { + const [sidebarOpen, setSidebarOpen] = useState(false); + const { currentUser, logout } = useAuth(); + const location = useLocation(); + const navigate = useNavigate(); + + // Navigation items + const navigation = [ + { name: 'Dashboard', href: '/', icon: HomeIcon }, + { name: 'Workflows', href: '/workflows', icon: ArrowPathIcon }, + { name: 'Templates', href: '/templates', icon: DocumentDuplicateIcon }, + ]; + + // Check if a navigation item is active + const isActive = (path) => { + if (path === '/') { + return location.pathname === '/'; + } + return location.pathname.startsWith(path); + }; + + return ( +
+ {/* Mobile sidebar */} +
+
setSidebarOpen(false)}>
+
+
+ +
+
+

FlowForge

+
+
+ +
+
+
+
+

{currentUser?.email}

+ +
+
+
+
+
+ + {/* Desktop sidebar */} +
+
+
+
+

FlowForge

+
+
+ +
+
+
+
+

{currentUser?.email}

+ +
+
+
+
+
+
+ + {/* Main content */} +
+
+ +
+
+

+ {location.pathname === '/' && 'Dashboard'} + {location.pathname === '/workflows' && 'Workflows'} + {location.pathname.startsWith('/workflows/new') && 'Create Workflow'} + {location.pathname.match(/^\/workflows\/[^/]+$/) && 'Edit Workflow'} + {location.pathname === '/templates' && 'Automation Templates'} + {location.pathname === '/profile' && 'Profile'} +

+
+
+ +
+
+
+ +
+
+
+ +
+
+
+
+
+ ); +}; + +export default MainLayout; diff --git a/frontend/src/components/scheduling/CronScheduler.js b/frontend/src/components/scheduling/CronScheduler.js new file mode 100644 index 0000000..a8afa90 --- /dev/null +++ b/frontend/src/components/scheduling/CronScheduler.js @@ -0,0 +1,353 @@ +import React, { useState, useEffect } from 'react'; +import { toast } from 'react-toastify'; +import api from '../../services/api'; +import { + CalendarIcon, + ClockIcon, + PlusIcon, + TrashIcon +} from '@heroicons/react/24/outline'; + +const CronScheduler = ({ workflowId }) => { + const [schedules, setSchedules] = useState([]); + const [loading, setLoading] = useState(true); + const [showAddForm, setShowAddForm] = useState(false); + const [newSchedule, setNewSchedule] = useState({ + cron: '0 0 * * *', // Default: daily at midnight + enabled: true, + name: '' + }); + + // Predefined cron expressions + const cronPresets = [ + { label: 'Every minute', value: '* * * * *' }, + { label: 'Every hour', value: '0 * * * *' }, + { label: 'Every day at midnight', value: '0 0 * * *' }, + { label: 'Every Monday at 9:00 AM', value: '0 9 * * 1' }, + { label: 'Every month on the 1st at midnight', value: '0 0 1 * *' } + ]; + + // Fetch schedules + useEffect(() => { + const fetchSchedules = async () => { + try { + setLoading(true); + const response = await api.get(`/api/workflows/${workflowId}/schedules`) + .catch(err => { + // If schedules not found, handle gracefully (new workflow) + if (err.response && err.response.status === 404) { + console.log('No schedules found - this is likely a new workflow'); + return { data: { schedules: [] } }; + } + throw err; + }); + + setSchedules(response.data.schedules || []); + } catch (error) { + console.error('Error fetching schedules:', error); + // Don't show error toast for 404s as they're expected for new workflows + if (!error.response || error.response.status !== 404) { + toast.error('Failed to load workflow schedules'); + } + } finally { + setLoading(false); + } + }; + + if (workflowId) { + fetchSchedules(); + } + }, [workflowId]); + + // Add new schedule + const handleAddSchedule = async (e) => { + e.preventDefault(); + + if (!newSchedule.name.trim()) { + toast.error('Schedule name is required'); + return; + } + + try { + const response = await api.post(`/api/workflows/${workflowId}/schedules`, newSchedule); + setSchedules([...schedules, response.data.schedule]); + setNewSchedule({ + cron: '0 0 * * *', + enabled: true, + name: '' + }); + setShowAddForm(false); + toast.success('Schedule added successfully'); + } catch (error) { + console.error('Error adding schedule:', error); + toast.error('Failed to add schedule'); + } + }; + + // Delete schedule + const handleDeleteSchedule = async (scheduleId) => { + try { + await api.delete(`/api/workflows/${workflowId}/schedules/${scheduleId}`); + setSchedules(schedules.filter(s => s.id !== scheduleId)); + toast.success('Schedule deleted successfully'); + } catch (error) { + console.error('Error deleting schedule:', error); + toast.error('Failed to delete schedule'); + } + }; + + // Toggle schedule enabled/disabled + const handleToggleSchedule = async (schedule) => { + try { + const updatedSchedule = { ...schedule, enabled: !schedule.enabled }; + await api.put(`/api/workflows/${workflowId}/schedules/${schedule.id}`, updatedSchedule); + setSchedules(schedules.map(s => s.id === schedule.id ? { ...s, enabled: !s.enabled } : s)); + toast.success(`Schedule ${updatedSchedule.enabled ? 'enabled' : 'disabled'} successfully`); + } catch (error) { + console.error('Error updating schedule:', error); + toast.error('Failed to update schedule'); + } + }; + + // Get human-readable description of cron expression + const getCronDescription = (cron) => { + const preset = cronPresets.find(p => p.value === cron); + if (preset) { + return preset.label; + } + + // Basic descriptions for common patterns + const parts = cron.split(' '); + if (parts.length !== 5) return cron; + + const [minute, hour, dayOfMonth, month, dayOfWeek] = parts; + + if (minute === '*' && hour === '*' && dayOfMonth === '*' && month === '*' && dayOfWeek === '*') { + return 'Every minute'; + } + + if (minute === '0' && hour === '*' && dayOfMonth === '*' && month === '*' && dayOfWeek === '*') { + return 'Every hour'; + } + + if (minute === '0' && hour === '0' && dayOfMonth === '*' && month === '*' && dayOfWeek === '*') { + return 'Every day at midnight'; + } + + return cron; + }; + + // Format next run time + const formatNextRun = (nextRun) => { + if (!nextRun) return 'Not scheduled'; + + const date = new Date(nextRun); + return date.toLocaleString(); + }; + + return ( +
+
+
+

+ Scheduled Executions +

+

+ Set up automatic workflow execution on a schedule +

+
+ +
+ + {/* Add schedule form */} + {showAddForm && ( +
+
+
+
+ +
+ setNewSchedule({ ...newSchedule, name: e.target.value })} + className="shadow-sm focus:ring-primary-500 focus:border-primary-500 block w-full sm:text-sm border-gray-300 rounded-md" + placeholder="Daily Backup" + required + /> +
+
+ +
+ +
+ setNewSchedule({ ...newSchedule, cron: e.target.value })} + className="shadow-sm focus:ring-primary-500 focus:border-primary-500 block w-full sm:text-sm border-gray-300 rounded-md" + placeholder="0 0 * * *" + required + /> +
+
+ +
+ +
+ {cronPresets.map((preset) => ( + + ))} +
+

+ Format: minute hour day-of-month month day-of-week +

+
+ +
+
+
+ setNewSchedule({ ...newSchedule, enabled: e.target.checked })} + className="focus:ring-primary-500 h-4 w-4 text-primary-600 border-gray-300 rounded" + /> +
+
+ +

Enable this schedule immediately after creation

+
+
+
+
+ +
+ + +
+
+
+ )} + + {/* Schedules list */} + {loading ? ( +
+
+ {[...Array(2)].map((_, i) => ( +
+ ))} +
+
+ ) : schedules.length > 0 ? ( +
    + {schedules.map((schedule) => ( +
  • +
    +
    +
    + +
    +
    +
    +
    {schedule.name}
    + + {schedule.enabled ? 'Enabled' : 'Disabled'} + +
    +
    + {getCronDescription(schedule.cron)} +
    +
    + + Next run: {formatNextRun(schedule.next_run)} +
    +
    +
    +
    + + +
    +
    +
  • + ))} +
+ ) : ( +
+ +

No schedules

+

+ Create a schedule to run this workflow automatically. +

+ {!showAddForm && ( +
+ +
+ )} +
+ )} +
+ ); +}; + +export default CronScheduler; diff --git a/frontend/src/components/workflow/CustomNode.js b/frontend/src/components/workflow/CustomNode.js new file mode 100644 index 0000000..43081e3 --- /dev/null +++ b/frontend/src/components/workflow/CustomNode.js @@ -0,0 +1,108 @@ +import React, { memo } from 'react'; +import { Handle, Position } from 'reactflow'; + +const getNodeColor = (type) => { + switch (type) { + case 'webhook': + return 'border-blue-500'; + case 'http-request': + return 'border-green-500'; + case 'function': + return 'border-purple-500'; + case 'delay': + return 'border-amber-500'; + case 'email': + return 'border-red-500'; + case 'logger': + return 'border-gray-500'; + default: + return 'border-gray-300'; + } +}; + +const getNodeIcon = (type) => { + switch (type) { + case 'webhook': + return ( + + + + ); + case 'http-request': + return ( + + + + ); + case 'function': + return ( + + + + ); + case 'delay': + return ( + + + + ); + case 'email': + return ( + + + + ); + case 'logger': + return ( + + + + ); + default: + return ( + + + + ); + } +}; + +const CustomNode = ({ data, selected }) => { + const nodeColor = getNodeColor(data.nodeType); + const nodeIcon = getNodeIcon(data.nodeType); + + return ( +
+ {/* Input handle */} + + + {/* Node content */} +
+
+ {nodeIcon} +
+
+
+ {data.label} +
+
+ {data.nodeType} +
+
+
+ + {/* Output handle */} + +
+ ); +}; + +export default memo(CustomNode); diff --git a/frontend/src/components/workflow/NodeConfigPanel.js b/frontend/src/components/workflow/NodeConfigPanel.js new file mode 100644 index 0000000..acc1a70 --- /dev/null +++ b/frontend/src/components/workflow/NodeConfigPanel.js @@ -0,0 +1,275 @@ +import React, { useState, useEffect } from 'react'; +import { XMarkIcon } from '@heroicons/react/24/outline'; +import api from '../../services/api'; + +const NodeConfigPanel = ({ node, onConfigUpdate, onClose }) => { + const [nodeMeta, setNodeMeta] = useState(null); + const [config, setConfig] = useState({}); + const [nodeName, setNodeName] = useState(''); + const [loading, setLoading] = useState(true); + + useEffect(() => { + const fetchNodeMeta = async () => { + try { + setLoading(true); + const response = await api.get(`/api/nodes/${node.data.nodeType}/meta`); + setNodeMeta(response.data); + + // Initialize config with existing values or defaults + const initialConfig = { ...node.data.config }; + setConfig(initialConfig); + + // Set node name + setNodeName(node.data.label); + } catch (error) { + console.error('Error fetching node metadata:', error); + } finally { + setLoading(false); + } + }; + + if (node) { + fetchNodeMeta(); + } + }, [node]); + + // Handle config field change + const handleConfigChange = (key, value) => { + const updatedConfig = { ...config, [key]: value }; + setConfig(updatedConfig); + onConfigUpdate(node.id, updatedConfig); + }; + + // Handle node name change + const handleNameChange = (e) => { + const newName = e.target.value; + setNodeName(newName); + + // Update node data in parent component + const updatedNode = { + ...node, + data: { + ...node.data, + label: newName + } + }; + + // This will trigger a re-render of the node with the new name + onConfigUpdate(node.id, node.data.config, updatedNode); + }; + + // Render form field based on schema type + const renderFormField = (key, schema) => { + const value = config[key] !== undefined ? config[key] : (schema.default || ''); + + switch (schema.type) { + case 'string': + if (schema.enum) { + return ( + + ); + } + return ( + handleConfigChange(key, e.target.value)} + placeholder={schema.description || ''} + className="mt-1 block w-full border-gray-300 rounded-md shadow-sm focus:ring-primary-500 focus:border-primary-500 sm:text-sm" + /> + ); + + case 'number': + case 'integer': + return ( + handleConfigChange(key, parseFloat(e.target.value))} + placeholder={schema.description || ''} + min={schema.minimum} + max={schema.maximum} + step={schema.type === 'integer' ? 1 : 'any'} + className="mt-1 block w-full border-gray-300 rounded-md shadow-sm focus:ring-primary-500 focus:border-primary-500 sm:text-sm" + /> + ); + + case 'boolean': + return ( + handleConfigChange(key, e.target.checked)} + className="h-4 w-4 text-primary-600 focus:ring-primary-500 border-gray-300 rounded" + /> + ); + + case 'object': + return ( +