Best Practices for Business-as-Code
Production-ready best practices for building robust, scalable, and secure Business-as-Code systems
Production-ready patterns and practices for building autonomous businesses that are robust, scalable, secure, and maintainable.
Code Organization
Project Structure
Organize your Business-as-Code project for clarity and maintainability:
my-business/
├── src/
│ ├── business/ # Business entity definitions
│ │ ├── organization.ts
│ │ ├── products.ts
│ │ └── customers.ts
│ ├── workflows/ # Business workflows
│ │ ├── order-processing.ts
│ │ ├── customer-onboarding.ts
│ │ └── inventory-management.ts
│ ├── agents/ # Autonomous agents
│ │ ├── sales-agent.ts
│ │ ├── support-agent.ts
│ │ └── operations-agent.ts
│ ├── functions/ # Reusable business functions
│ │ ├── pricing.ts
│ │ ├── notifications.ts
│ │ └── analytics.ts
│ ├── integrations/ # External system integrations
│ │ ├── stripe.ts
│ │ ├── sendgrid.ts
│ │ └── shopify.ts
│ ├── schema/ # Data models and types
│ │ ├── types.ts
│ │ └── validations.ts
│ └── utils/ # Utility functions
│ ├── logger.ts
│ ├── retry.ts
│ └── cache.ts
├── tests/ # Test files
│ ├── unit/
│ ├── integration/
│ └── e2e/
├── config/ # Configuration
│ ├── development.ts
│ ├── staging.ts
│ └── production.ts
├── docs/ # Documentation
└── scripts/ # Build and deployment scriptsModule Organization
Group related functionality into cohesive modules:
// src/business/products.ts
import $, { db } from 'sdk.do'
export class ProductModule {
// Create product with validation
async create(data: ProductInput) {
const product = await $.Product.create({
...data,
$type: 'Product',
dateCreated: new Date(),
productionDate: new Date(),
})
return product
}
// Update pricing
async updatePrice(productId: string, newPrice: number) {
return db.Products.update(productId, {
offers: [
{
$type: 'Offer',
price: newPrice,
priceCurrency: 'USD',
priceValidUntil: this.calculatePriceExpiry(),
},
],
})
}
// Product catalog operations
async addToCategory(productId: string, categoryId: string) {
return db.relate(productId, $.belongsTo, categoryId)
}
private calculatePriceExpiry() {
const expiry = new Date()
expiry.setDate(expiry.getDate() + 30)
return expiry
}
}Function Composition
Build complex operations from simple, composable functions:
// src/functions/pricing.ts
import { ai } from 'sdk.do'
// Small, focused functions
export const calculateBasePrice = (cost: number, margin: number): number => {
return cost * (1 + margin)
}
export const applyDiscount = (price: number, discount: number): number => {
return price * (1 - discount)
}
export const roundToNearest = (price: number, nearest: number): number => {
return Math.round(price / nearest) * nearest
}
// Compose into higher-level function
export const calculateFinalPrice = async (cost: number, margin: number, customerTier: string) => {
// Base calculation
const basePrice = calculateBasePrice(cost, margin)
// AI-driven discount
const discount = await ai.decide('customer-discount', {
tier: customerTier,
basePrice,
})
// Apply discount and round
const discountedPrice = applyDiscount(basePrice, discount.percentage)
return roundToNearest(discountedPrice, 0.99)
}Workflow Organization
Structure workflows as discrete, testable units:
// src/workflows/order-processing.ts
import $, { on, send, db } from 'sdk.do'
export class OrderProcessingWorkflow {
async initialize() {
// Register event handlers
on($.Order.created, this.handleOrderCreated.bind(this))
on($.Payment.completed, this.handlePaymentCompleted.bind(this))
on($.Order.fulfilled, this.handleOrderFulfilled.bind(this))
}
private async handleOrderCreated(order: Order) {
// Validate order
const validation = await this.validateOrder(order)
if (!validation.valid) {
await send($.Order.cancel, { order, reason: validation.reason })
return
}
// Process payment
await send($.Payment.process, {
order,
amount: order.totalPrice,
method: order.paymentMethod,
})
}
private async handlePaymentCompleted(payment: Payment) {
const order = await db.related(payment, $.forOrder, $.Order)
// Update inventory
await send($.Inventory.reserve, {
items: order.orderedItem,
})
// Schedule fulfillment
await send($.Order.fulfill, { order })
}
private async handleOrderFulfilled(order: Order) {
// Send confirmation
await send($.Email.send, {
to: order.customer.email,
template: 'order-shipped',
data: { order },
})
// Update analytics
await send($.Analytics.track, {
event: 'order_completed',
properties: { orderId: order.$id, revenue: order.totalPrice },
})
}
private async validateOrder(order: Order): Promise<ValidationResult> {
// Validate inventory
for (const item of order.orderedItem) {
const available = await db.query($.Product, {
$id: item.orderedItem.$id,
'quantityAvailable:gte': item.orderQuantity,
})
if (!available) {
return {
valid: false,
reason: `Insufficient inventory for ${item.orderedItem.name}`,
}
}
}
return { valid: true }
}
}Agent Management
Organize agents with clear responsibilities:
// src/agents/sales-agent.ts
import $, { ai, db, send } from 'sdk.do'
export class SalesAgent {
private context: AgentContext
constructor(config: AgentConfig) {
this.context = {
businessId: config.businessId,
personality: config.personality || 'professional',
knowledgeBase: config.knowledgeBase,
}
}
// Handle customer inquiry
async handleInquiry(inquiry: CustomerInquiry): Promise<Response> {
// Understand intent
const intent = await ai.classify({
text: inquiry.message,
categories: ['product_question', 'pricing', 'support', 'sales'],
})
// Route to appropriate handler
switch (intent.category) {
case 'product_question':
return this.answerProductQuestion(inquiry)
case 'pricing':
return this.providePricing(inquiry)
case 'sales':
return this.handleSalesOpportunity(inquiry)
default:
return this.handoffToSupport(inquiry)
}
}
private async answerProductQuestion(inquiry: CustomerInquiry) {
// Retrieve relevant products
const products = await db.search($.Product, {
query: inquiry.message,
limit: 3,
})
// Generate response
const response = await ai.generate({
model: 'gpt-5',
prompt: `Answer customer question about products`,
context: {
question: inquiry.message,
products,
personality: this.context.personality,
},
})
return {
message: response.text,
suggestedProducts: products,
followUpActions: response.actions,
}
}
private async handleSalesOpportunity(inquiry: CustomerInquiry) {
// Create lead
const lead = await $.Lead.create({
customer: inquiry.customer,
source: 'chat',
inquiry: inquiry.message,
assignedTo: 'sales-agent',
})
// Qualify lead
const qualification = await ai.decide('lead-qualification', {
lead,
customer: inquiry.customer,
})
if (qualification.score > 0.7) {
// High-quality lead - engage immediately
await send($.Meeting.schedule, {
lead,
type: 'discovery_call',
})
}
return {
message: qualification.response,
nextSteps: qualification.actions,
}
}
}Error Handling
Error Types and Patterns
Define clear error types for different failure scenarios:
// src/utils/errors.ts
// Base error class
export class BusinessError extends Error {
constructor(
message: string,
public code: string,
public statusCode: number = 500,
public retryable: boolean = false,
public metadata?: Record<string, any>
) {
super(message)
this.name = this.constructor.name
Error.captureStackTrace(this, this.constructor)
}
}
// Specific error types
export class ValidationError extends BusinessError {
constructor(message: string, metadata?: Record<string, any>) {
super(message, 'VALIDATION_ERROR', 400, false, metadata)
}
}
export class ResourceNotFoundError extends BusinessError {
constructor(resource: string, id: string) {
super(`${resource} with id ${id} not found`, 'RESOURCE_NOT_FOUND', 404, false, { resource, id })
}
}
export class ExternalServiceError extends BusinessError {
constructor(service: string, originalError: Error) {
super(`External service ${service} failed: ${originalError.message}`, 'EXTERNAL_SERVICE_ERROR', 502, true, {
service,
originalError: originalError.message,
})
}
}
export class RateLimitError extends BusinessError {
constructor(retryAfter: number) {
super('Rate limit exceeded', 'RATE_LIMIT_EXCEEDED', 429, true, { retryAfter })
}
}Retry Strategies
Implement intelligent retry logic:
// src/utils/retry.ts
interface RetryOptions {
maxAttempts?: number
baseDelay?: number
maxDelay?: number
backoffMultiplier?: number
retryableErrors?: string[]
onRetry?: (error: Error, attempt: number) => void
}
export const withRetry = async <T>(operation: () => Promise<T>, options: RetryOptions = {}): Promise<T> => {
const {
maxAttempts = 3,
baseDelay = 1000,
maxDelay = 30000,
backoffMultiplier = 2,
retryableErrors = ['EXTERNAL_SERVICE_ERROR', 'RATE_LIMIT_EXCEEDED'],
onRetry,
} = options
let lastError: Error
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
return await operation()
} catch (error) {
lastError = error as Error
// Check if error is retryable
const isRetryable = error instanceof BusinessError && error.retryable && retryableErrors.includes(error.code)
if (!isRetryable || attempt === maxAttempts) {
throw error
}
// Calculate delay with exponential backoff
const delay = Math.min(baseDelay * Math.pow(backoffMultiplier, attempt - 1), maxDelay)
// Add jitter to prevent thundering herd
const jitter = delay * 0.1 * Math.random()
const totalDelay = delay + jitter
onRetry?.(error, attempt)
await new Promise((resolve) => setTimeout(resolve, totalDelay))
}
}
throw lastError!
}
// Usage example
export const callExternalAPI = async (endpoint: string, data: any) => {
return withRetry(
async () => {
const response = await fetch(endpoint, {
method: 'POST',
body: JSON.stringify(data),
})
if (!response.ok) {
throw new ExternalServiceError(endpoint, new Error(response.statusText))
}
return response.json()
},
{
maxAttempts: 3,
onRetry: (error, attempt) => {
console.log(`Retry attempt ${attempt} after error: ${error.message}`)
},
}
)
}Fallback Mechanisms
Provide graceful degradation:
// src/utils/fallback.ts
import { ai } from 'sdk.do'
export const withFallback = async <T>(
primary: () => Promise<T>,
fallback: () => Promise<T>,
options: {
timeout?: number
onFallback?: (error: Error) => void
} = {}
): Promise<T> => {
const { timeout = 5000, onFallback } = options
try {
// Try primary operation with timeout
return await Promise.race([primary(), new Promise<never>((_, reject) => setTimeout(() => reject(new Error('Operation timeout')), timeout))])
} catch (error) {
onFallback?.(error as Error)
return fallback()
}
}
// Example: AI with fallback to rule-based system
export const classifyCustomerIntent = async (message: string) => {
return withFallback(
// Primary: AI classification
async () => {
return ai.classify({
text: message,
categories: ['question', 'complaint', 'sales', 'support'],
})
},
// Fallback: Rule-based classification
async () => {
const lowerMessage = message.toLowerCase()
if (lowerMessage.includes('buy') || lowerMessage.includes('purchase')) {
return { category: 'sales', confidence: 0.6 }
}
if (lowerMessage.includes('problem') || lowerMessage.includes('issue')) {
return { category: 'complaint', confidence: 0.6 }
}
if (lowerMessage.includes('how') || lowerMessage.includes('?')) {
return { category: 'question', confidence: 0.6 }
}
return { category: 'support', confidence: 0.5 }
},
{
onFallback: (error) => {
console.warn('AI classification failed, using rule-based fallback:', error)
},
}
)
}Error Reporting
Centralized error tracking:
// src/utils/error-reporter.ts
interface ErrorContext {
userId?: string
businessId?: string
operation?: string
metadata?: Record<string, any>
}
class ErrorReporter {
private static instance: ErrorReporter
static getInstance(): ErrorReporter {
if (!ErrorReporter.instance) {
ErrorReporter.instance = new ErrorReporter()
}
return ErrorReporter.instance
}
async report(error: Error, context: ErrorContext = {}) {
// Log to console
console.error('Error occurred:', {
error: {
name: error.name,
message: error.message,
stack: error.stack,
},
context,
timestamp: new Date().toISOString(),
})
// Send to monitoring service
await this.sendToMonitoring(error, context)
// Store in database for analysis
await this.storeError(error, context)
// Alert if critical
if (this.isCritical(error)) {
await this.sendAlert(error, context)
}
}
private async sendToMonitoring(error: Error, context: ErrorContext) {
// Send to Sentry, Datadog, etc.
try {
await fetch(process.env.MONITORING_ENDPOINT!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
error: {
message: error.message,
stack: error.stack,
type: error.name,
},
context,
timestamp: Date.now(),
}),
})
} catch (monitoringError) {
// Don't fail the application if monitoring fails
console.error('Failed to send error to monitoring:', monitoringError)
}
}
private async storeError(error: Error, context: ErrorContext) {
const { db } = await import('sdk.do')
try {
await db.create('$.ErrorLog', {
$type: 'ErrorLog',
errorType: error.name,
message: error.message,
stack: error.stack,
context,
timestamp: new Date(),
})
} catch (dbError) {
console.error('Failed to store error in database:', dbError)
}
}
private isCritical(error: Error): boolean {
return error instanceof BusinessError && (error.code === 'DATABASE_ERROR' || error.code === 'PAYMENT_ERROR' || error.statusCode >= 500)
}
private async sendAlert(error: Error, context: ErrorContext) {
const { send } = await import('sdk.do')
await send('$.Alert.send', {
severity: 'critical',
title: `Critical error: ${error.name}`,
message: error.message,
context,
})
}
}
export const reportError = ErrorReporter.getInstance().report.bind(ErrorReporter.getInstance())Recovery Workflows
Implement automatic recovery:
// src/workflows/recovery.ts
import $, { on, send, db } from 'sdk.do'
export class RecoveryWorkflow {
async initialize() {
// Monitor failed operations
on($.Operation.failed, this.handleFailedOperation.bind(this))
}
private async handleFailedOperation(operation: Operation) {
const recoveryStrategy = this.determineRecoveryStrategy(operation)
switch (recoveryStrategy) {
case 'retry':
await this.retryOperation(operation)
break
case 'compensate':
await this.compensateOperation(operation)
break
case 'manual':
await this.escalateToHuman(operation)
break
}
}
private determineRecoveryStrategy(operation: Operation): RecoveryStrategy {
if (operation.retryCount < 3 && operation.error?.retryable) {
return 'retry'
}
if (operation.compensatable) {
return 'compensate'
}
return 'manual'
}
private async retryOperation(operation: Operation) {
await send($.Operation.retry, {
operationId: operation.$id,
attempt: operation.retryCount + 1,
})
}
private async compensateOperation(operation: Operation) {
// Execute compensating transaction
const compensationSteps = await this.getCompensationSteps(operation)
for (const step of compensationSteps) {
await send(step.action, step.params)
}
// Mark as compensated
await db.update('$.Operation', operation.$id, {
status: 'compensated',
compensatedAt: new Date(),
})
}
private async escalateToHuman(operation: Operation) {
await send('$.Task.create', {
$type: 'Task',
title: `Manual intervention required: ${operation.name}`,
description: `Operation ${operation.$id} failed and requires manual review`,
priority: 'high',
assignedTo: 'operations-team',
metadata: {
operationId: operation.$id,
error: operation.error,
},
})
}
}Testing Strategies
Unit Testing Functions
Test individual functions in isolation:
// tests/unit/pricing.test.ts
import { describe, it, expect, vi } from 'vitest'
import { calculateFinalPrice, calculateBasePrice } from '@/functions/pricing'
import { ai } from 'sdk.do'
describe('Pricing Functions', () => {
describe('calculateBasePrice', () => {
it('should calculate base price with margin', () => {
expect(calculateBasePrice(100, 0.5)).toBe(150)
})
it('should handle zero margin', () => {
expect(calculateBasePrice(100, 0)).toBe(100)
})
})
describe('calculateFinalPrice', () => {
it('should calculate final price with AI discount', async () => {
// Mock AI decision
vi.spyOn(ai, 'decide').mockResolvedValue({
percentage: 0.1,
})
const finalPrice = await calculateFinalPrice(100, 0.5, 'premium')
// Base: 150, Discount 10%: 135, Rounded: 134.99
expect(finalPrice).toBe(134.99)
expect(ai.decide).toHaveBeenCalledWith('customer-discount', {
tier: 'premium',
basePrice: 150,
})
})
it('should handle AI failure gracefully', async () => {
vi.spyOn(ai, 'decide').mockRejectedValue(new Error('AI service down'))
await expect(calculateFinalPrice(100, 0.5, 'premium')).rejects.toThrow('AI service down')
})
})
})Integration Testing Workflows
Test workflow interactions:
// tests/integration/order-processing.test.ts
import { describe, it, expect, beforeEach, afterEach } from 'vitest'
import { OrderProcessingWorkflow } from '@/workflows/order-processing'
import $, { db, send } from 'sdk.do'
describe('Order Processing Workflow', () => {
let workflow: OrderProcessingWorkflow
let testOrder: Order
beforeEach(async () => {
workflow = new OrderProcessingWorkflow()
await workflow.initialize()
// Create test data
testOrder = await $.Order.create({
customer: await $.Customer.create({ email: '[email protected]' }),
orderedItem: [
{
$type: 'OrderItem',
orderedItem: await $.Product.create({ name: 'Test Product', quantityAvailable: 10 }),
orderQuantity: 2,
},
],
totalPrice: 100,
paymentMethod: 'credit_card',
})
})
afterEach(async () => {
// Cleanup test data
await db.delete('$.Order', testOrder.$id)
})
it('should process order successfully', async () => {
// Trigger order created event
await send('$.Order.created', testOrder)
// Wait for processing
await new Promise((resolve) => setTimeout(resolve, 1000))
// Verify payment was processed
const payments = await db.related(testOrder, '$.hasPayment', '$.Payment')
expect(payments).toHaveLength(1)
expect(payments[0].status).toBe('completed')
// Verify inventory was reserved
const product = testOrder.orderedItem[0].orderedItem
const updatedProduct = await db.get('$.Product', product.$id)
expect(updatedProduct.quantityAvailable).toBe(8)
})
it('should cancel order if validation fails', async () => {
// Create order with insufficient inventory
const invalidOrder = await $.Order.create({
customer: await $.Customer.create({ email: '[email protected]' }),
orderedItem: [
{
$type: 'OrderItem',
orderedItem: await $.Product.create({ name: 'Low Stock', quantityAvailable: 1 }),
orderQuantity: 10,
},
],
totalPrice: 100,
})
await send('$.Order.created', invalidOrder)
await new Promise((resolve) => setTimeout(resolve, 1000))
const order = await db.get('$.Order', invalidOrder.$id)
expect(order.status).toBe('cancelled')
})
})E2E Testing Agents
Test complete agent interactions:
// tests/e2e/sales-agent.test.ts
import { describe, it, expect, beforeAll, afterAll } from 'vitest'
import { SalesAgent } from '@/agents/sales-agent'
import { db } from 'sdk.do'
describe('Sales Agent E2E', () => {
let agent: SalesAgent
let testBusiness: Business
beforeAll(async () => {
// Setup test environment
testBusiness = await db.create('$.Organization', {
name: 'Test Business',
})
agent = new SalesAgent({
businessId: testBusiness.$id,
personality: 'professional',
})
})
afterAll(async () => {
// Cleanup
await db.delete('$.Organization', testBusiness.$id)
})
it('should handle product inquiry end-to-end', async () => {
const inquiry = {
customer: await db.create('$.Customer', {
email: '[email protected]',
}),
message: 'Tell me about your premium subscription',
channel: 'chat',
}
const response = await agent.handleInquiry(inquiry)
expect(response.message).toBeTruthy()
expect(response.suggestedProducts).toBeInstanceOf(Array)
expect(response.followUpActions).toBeDefined()
})
it('should create and qualify leads', async () => {
const inquiry = {
customer: await db.create('$.Customer', {
email: '[email protected]',
company: 'Enterprise Corp',
}),
message: 'We need enterprise solution for 100+ users',
channel: 'chat',
}
const response = await agent.handleInquiry(inquiry)
// Verify lead was created
const leads = await db.query('$.Lead', {
'customer.$id': inquiry.customer.$id,
})
expect(leads).toHaveLength(1)
expect(leads[0].source).toBe('chat')
// Verify meeting was scheduled for high-quality lead
const meetings = await db.related(leads[0], '$.hasMeeting', '$.Meeting')
expect(meetings.length).toBeGreaterThan(0)
})
})Mock Data Patterns
Create reusable test fixtures:
// tests/fixtures/mock-data.ts
import $ from 'sdk.do'
export const mockCustomer = (overrides = {}) => ({
$type: 'Customer',
email: '[email protected]',
name: 'Mock Customer',
dateCreated: new Date(),
...overrides,
})
export const mockProduct = (overrides = {}) => ({
$type: 'Product',
name: 'Mock Product',
description: 'A test product',
offers: [
{
$type: 'Offer',
price: 99.99,
priceCurrency: 'USD',
},
],
quantityAvailable: 100,
...overrides,
})
export const mockOrder = (overrides = {}) => ({
$type: 'Order',
customer: mockCustomer(),
orderedItem: [
{
$type: 'OrderItem',
orderedItem: mockProduct(),
orderQuantity: 1,
},
],
totalPrice: 99.99,
orderStatus: 'pending',
...overrides,
})
// Test data builder
export class TestDataBuilder {
private data: any = {}
customer(overrides = {}) {
this.data.customer = mockCustomer(overrides)
return this
}
product(overrides = {}) {
if (!this.data.products) this.data.products = []
this.data.products.push(mockProduct(overrides))
return this
}
order(overrides = {}) {
this.data.order = mockOrder({
customer: this.data.customer,
orderedItem: this.data.products?.map((p) => ({
$type: 'OrderItem',
orderedItem: p,
orderQuantity: 1,
})),
...overrides,
})
return this
}
build() {
return this.data
}
}
// Usage
const testData = new TestDataBuilder()
.customer({ email: '[email protected]' })
.product({ name: 'Premium Widget', quantityAvailable: 5 })
.product({ name: 'Basic Widget', quantityAvailable: 50 })
.order({ totalPrice: 199.98 })
.build()Test Automation
Continuous testing in CI/CD:
# .github/workflows/test.yml
name: Test Suite
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v2
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'pnpm'
- name: Install dependencies
run: pnpm install
- name: Run unit tests
run: pnpm test:unit --coverage
- name: Run integration tests
run: pnpm test:integration
env:
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
- name: Run E2E tests
run: pnpm test:e2e
env:
API_KEY: ${{ secrets.TEST_API_KEY }}
- name: Upload coverage
uses: codecov/codecov-action@v3
with:
files: ./coverage/coverage-final.json
quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup
uses: pnpm/action-setup@v2
- name: Lint
run: pnpm lint
- name: Type check
run: pnpm typecheck
- name: Check formatting
run: pnpm format:checkPerformance Optimization
Function Optimization
Write efficient business functions:
// Inefficient: Multiple database calls
async function getOrderDetails(orderId: string) {
const order = await db.get('$.Order', orderId)
const customer = await db.get('$.Customer', order.customer.$id)
const products = await Promise.all(order.orderedItem.map((item) => db.get('$.Product', item.orderedItem.$id)))
return { order, customer, products }
}
// Efficient: Single query with includes
async function getOrderDetailsOptimized(orderId: string) {
const order = await db.get('$.Order', orderId, {
include: [
'customer',
'orderedItem.orderedItem', // Include nested products
],
})
return {
order,
customer: order.customer,
products: order.orderedItem.map((item) => item.orderedItem),
}
}
// Efficient: Batch operations
async function updateProductPrices(updates: Array<{ id: string; price: number }>) {
// Inefficient: Loop with individual updates
// for (const update of updates) {
// await db.update('$.Product', update.id, { price: update.price })
// }
// Efficient: Batch update
await db.batchUpdate(
'$.Product',
updates.map((u) => ({
$id: u.id,
offers: [{ $type: 'Offer', price: u.price, priceCurrency: 'USD' }],
}))
)
}Workflow Efficiency
Optimize workflow execution:
// src/workflows/optimized-processing.ts
import $, { on, send, db } from 'sdk.do'
export class OptimizedWorkflow {
async initialize() {
on($.Order.created, this.handleOrderCreated.bind(this))
}
private async handleOrderCreated(order: Order) {
// Inefficient: Sequential operations
// await send($.Payment.process, { order })
// await send($.Inventory.reserve, { order })
// await send($.Email.send, { to: order.customer.email })
// Efficient: Parallel independent operations
await Promise.all([
send($.Payment.process, { order }),
send($.Inventory.reserve, { order }),
send($.Email.send, {
to: order.customer.email,
template: 'order-confirmation',
}),
send($.Analytics.track, {
event: 'order_created',
properties: { orderId: order.$id },
}),
])
}
// Use streaming for large data sets
private async processLargeInventoryUpdate(productIds: string[]) {
// Inefficient: Load all products into memory
// const products = await db.query('$.Product', { $id: { $in: productIds } })
// for (const product of products) { ... }
// Efficient: Stream processing
const stream = db.stream('$.Product', { $id: { $in: productIds } })
for await (const product of stream) {
await this.updateProduct(product)
// Memory is freed after each iteration
}
}
}Agent Resource Management
Manage agent resources efficiently:
// src/agents/optimized-agent.ts
import { ai, db } from 'sdk.do'
export class OptimizedAgent {
private cache = new Map<string, CachedResponse>()
private requestQueue: RequestQueue
constructor() {
// Implement request batching
this.requestQueue = new RequestQueue({
maxBatchSize: 10,
maxWaitMs: 100,
})
}
async processInquiry(inquiry: string): Promise<Response> {
// Check cache first
const cached = this.cache.get(inquiry)
if (cached && !this.isCacheExpired(cached)) {
return cached.response
}
// Batch AI requests
const response = await this.requestQueue.add(async (batch) => {
// Process multiple inquiries in single AI call
return ai.generate({
model: 'gpt-5',
prompt: 'Answer customer inquiries',
context: { inquiries: batch },
})
})
// Cache result
this.cache.set(inquiry, {
response,
timestamp: Date.now(),
ttl: 3600000, // 1 hour
})
return response
}
// Implement context pruning for long conversations
private async pruneContext(context: Message[]): Promise<Message[]> {
if (context.length <= 10) return context
// Keep first message (system context) and last 9 messages
return [context[0], ...context.slice(-9)]
}
// Lazy load knowledge base
private knowledgeBase: KnowledgeBase | null = null
private async getKnowledgeBase(): Promise<KnowledgeBase> {
if (!this.knowledgeBase) {
this.knowledgeBase = await db.get('$.KnowledgeBase', this.config.knowledgeBaseId)
}
return this.knowledgeBase
}
}Caching Strategies
Implement multi-level caching:
// src/utils/cache.ts
interface CacheOptions {
ttl?: number
prefix?: string
}
class CacheManager {
private memoryCache = new Map<string, CachedValue>()
private redis: RedisClient // Distributed cache
constructor(redisClient: RedisClient) {
this.redis = redisClient
}
// L1: Memory cache (fastest, instance-local)
private async getFromMemory(key: string): Promise<any | null> {
const cached = this.memoryCache.get(key)
if (!cached) return null
if (Date.now() > cached.expiry) {
this.memoryCache.delete(key)
return null
}
return cached.value
}
// L2: Redis cache (fast, distributed)
private async getFromRedis(key: string): Promise<any | null> {
const cached = await this.redis.get(key)
if (!cached) return null
const parsed = JSON.parse(cached)
// Populate L1 cache
this.memoryCache.set(key, {
value: parsed.value,
expiry: parsed.expiry,
})
return parsed.value
}
// Get with automatic fallback through cache levels
async get<T>(key: string, fetcher: () => Promise<T>, options: CacheOptions = {}): Promise<T> {
const fullKey = options.prefix ? `${options.prefix}:${key}` : key
// Try L1 cache
let value = await this.getFromMemory(fullKey)
if (value !== null) return value
// Try L2 cache
value = await this.getFromRedis(fullKey)
if (value !== null) return value
// Fetch from source
value = await fetcher()
// Populate all cache levels
await this.set(fullKey, value, options)
return value
}
async set(key: string, value: any, options: CacheOptions = {}) {
const ttl = options.ttl || 3600000 // Default 1 hour
const expiry = Date.now() + ttl
// Set L1 cache
this.memoryCache.set(key, { value, expiry })
// Set L2 cache
await this.redis.setex(key, Math.floor(ttl / 1000), JSON.stringify({ value, expiry }))
}
async invalidate(pattern: string) {
// Clear from memory
for (const key of this.memoryCache.keys()) {
if (key.match(pattern)) {
this.memoryCache.delete(key)
}
}
// Clear from Redis
const keys = await this.redis.keys(pattern)
if (keys.length > 0) {
await this.redis.del(...keys)
}
}
}
// Usage example
const cache = new CacheManager(redisClient)
export const getProductWithCache = async (productId: string) => {
return cache.get(
productId,
async () => {
// This only runs if cache miss
return db.get('$.Product', productId)
},
{
ttl: 300000, // 5 minutes
prefix: 'product',
}
)
}
// Invalidate cache on updates
export const updateProduct = async (productId: string, data: Partial<Product>) => {
const updated = await db.update('$.Product', productId, data)
// Invalidate related caches
await cache.invalidate(`product:${productId}`)
await cache.invalidate('product-list:*')
return updated
}Database Optimization
Optimize database queries:
// src/utils/database-optimization.ts
import { db } from 'sdk.do'
// Use indexes effectively
export const setupIndexes = async () => {
await db.createIndex('$.Order', ['customer.$id', 'orderDate'])
await db.createIndex('$.Product', ['category', 'price'])
await db.createIndex('$.Customer', ['email']) // Unique index
}
// Efficient pagination
export const paginateOrders = async (page: number, pageSize: number = 20) => {
// Inefficient: Load all and slice
// const all = await db.query('$.Order', {})
// return all.slice(page * pageSize, (page + 1) * pageSize)
// Efficient: Database-level pagination
return db.query('$.Order', {
$sort: { orderDate: -1 },
$skip: page * pageSize,
$limit: pageSize,
})
}
// Use projections to reduce data transfer
export const getOrderList = async () => {
// Inefficient: Fetch all fields
// const orders = await db.query('$.Order', {})
// Efficient: Fetch only needed fields
return db.query('$.Order', {
$select: ['$id', 'orderNumber', 'totalPrice', 'orderDate', 'orderStatus'],
})
}
// Aggregate in database
export const getOrderStatistics = async (customerId: string) => {
// Inefficient: Fetch all orders and calculate in code
// const orders = await db.query('$.Order', { 'customer.$id': customerId })
// const total = orders.reduce((sum, o) => sum + o.totalPrice, 0)
// Efficient: Database aggregation
const [stats] = await db.aggregate('$.Order', [
{ $match: { 'customer.$id': customerId } },
{
$group: {
_id: '$customer.$id',
totalOrders: { $sum: 1 },
totalRevenue: { $sum: '$totalPrice' },
averageOrderValue: { $avg: '$totalPrice' },
},
},
])
return stats
}
// Implement read replicas for heavy read workloads
export const getProductCatalog = async () => {
// Use read replica for queries that don't need strong consistency
return db.query(
'$.Product',
{},
{
readPreference: 'secondary',
}
)
}Rate Limiting
Protect resources with rate limiting:
// src/utils/rate-limiter.ts
interface RateLimitConfig {
maxRequests: number
windowMs: number
keyGenerator?: (context: any) => string
}
export class RateLimiter {
private requests = new Map<string, number[]>()
constructor(private config: RateLimitConfig) {}
async checkLimit(context: any): Promise<boolean> {
const key = this.config.keyGenerator?.(context) || 'global'
const now = Date.now()
const windowStart = now - this.config.windowMs
// Get existing requests within window
const existing = this.requests.get(key) || []
const validRequests = existing.filter((time) => time > windowStart)
// Check limit
if (validRequests.length >= this.config.maxRequests) {
throw new RateLimitError(Math.ceil((validRequests[0] + this.config.windowMs - now) / 1000))
}
// Add new request
validRequests.push(now)
this.requests.set(key, validRequests)
return true
}
async reset(context: any) {
const key = this.config.keyGenerator?.(context) || 'global'
this.requests.delete(key)
}
}
// Usage: Rate limit by customer
const customerRateLimiter = new RateLimiter({
maxRequests: 100,
windowMs: 60000, // 1 minute
keyGenerator: (ctx) => ctx.customerId,
})
export const handleCustomerRequest = async (customerId: string, request: any) => {
await customerRateLimiter.checkLimit({ customerId })
// Process request
return processRequest(request)
}
// Usage: Rate limit AI operations
const aiRateLimiter = new RateLimiter({
maxRequests: 10,
windowMs: 60000,
keyGenerator: (ctx) => `ai:${ctx.operation}`,
})
export const generateAIResponse = async (operation: string, prompt: string) => {
await aiRateLimiter.checkLimit({ operation })
return ai.generate({ model: 'gpt-5', prompt })
}Security Considerations
Authentication and Authorization
Implement secure authentication:
// src/security/auth.ts
import { db } from 'sdk.do'
import bcrypt from 'bcrypt'
import jwt from 'jsonwebtoken'
export class AuthService {
private jwtSecret = process.env.JWT_SECRET!
private jwtExpiry = '24h'
// User registration
async register(email: string, password: string): Promise<User> {
// Validate password strength
this.validatePassword(password)
// Hash password
const hashedPassword = await bcrypt.hash(password, 12)
// Create user
const user = await db.create('$.Person', {
$type: 'Person',
email,
hashedPassword,
emailVerified: false,
createdAt: new Date(),
})
// Send verification email
await this.sendVerificationEmail(user)
return user
}
// User login
async login(email: string, password: string): Promise<AuthToken> {
// Find user
const [user] = await db.query('$.Person', { email })
if (!user) {
throw new ValidationError('Invalid credentials')
}
// Verify password
const valid = await bcrypt.compare(password, user.hashedPassword)
if (!valid) {
throw new ValidationError('Invalid credentials')
}
// Check if email is verified
if (!user.emailVerified) {
throw new ValidationError('Email not verified')
}
// Generate tokens
const accessToken = this.generateAccessToken(user)
const refreshToken = this.generateRefreshToken(user)
// Store refresh token
await db.update('$.Person', user.$id, {
refreshToken,
lastLoginAt: new Date(),
})
return {
accessToken,
refreshToken,
expiresIn: this.jwtExpiry,
}
}
// Verify token
async verifyToken(token: string): Promise<User> {
try {
const decoded = jwt.verify(token, this.jwtSecret) as { userId: string }
const user = await db.get('$.Person', decoded.userId)
if (!user) {
throw new Error('User not found')
}
return user
} catch (error) {
throw new ValidationError('Invalid token')
}
}
private generateAccessToken(user: User): string {
return jwt.sign(
{
userId: user.$id,
email: user.email,
roles: user.roles || [],
},
this.jwtSecret,
{ expiresIn: this.jwtExpiry }
)
}
private generateRefreshToken(user: User): string {
return jwt.sign({ userId: user.$id, type: 'refresh' }, this.jwtSecret, { expiresIn: '30d' })
}
private validatePassword(password: string) {
if (password.length < 8) {
throw new ValidationError('Password must be at least 8 characters')
}
if (!/[A-Z]/.test(password)) {
throw new ValidationError('Password must contain uppercase letter')
}
if (!/[a-z]/.test(password)) {
throw new ValidationError('Password must contain lowercase letter')
}
if (!/[0-9]/.test(password)) {
throw new ValidationError('Password must contain number')
}
}
private async sendVerificationEmail(user: User) {
const token = jwt.sign({ userId: user.$id, type: 'verification' }, this.jwtSecret, { expiresIn: '24h' })
await send('$.Email.send', {
to: user.email,
template: 'email-verification',
data: {
verificationLink: `https://app.do/verify?token=${token}`,
},
})
}
}Data Encryption
Encrypt sensitive data:
// src/security/encryption.ts
import crypto from 'crypto'
export class EncryptionService {
private algorithm = 'aes-256-gcm'
private keyLength = 32
private ivLength = 16
constructor(private masterKey: string) {
if (masterKey.length !== this.keyLength) {
throw new Error('Master key must be 32 characters')
}
}
// Encrypt data
encrypt(plaintext: string): EncryptedData {
// Generate random IV
const iv = crypto.randomBytes(this.ivLength)
// Create cipher
const cipher = crypto.createCipheriv(this.algorithm, Buffer.from(this.masterKey), iv)
// Encrypt
let encrypted = cipher.update(plaintext, 'utf8', 'hex')
encrypted += cipher.final('hex')
// Get auth tag
const authTag = cipher.getAuthTag()
return {
encrypted,
iv: iv.toString('hex'),
authTag: authTag.toString('hex'),
}
}
// Decrypt data
decrypt(encryptedData: EncryptedData): string {
// Create decipher
const decipher = crypto.createDecipheriv(this.algorithm, Buffer.from(this.masterKey), Buffer.from(encryptedData.iv, 'hex'))
// Set auth tag
decipher.setAuthTag(Buffer.from(encryptedData.authTag, 'hex'))
// Decrypt
let decrypted = decipher.update(encryptedData.encrypted, 'hex', 'utf8')
decrypted += decipher.final('utf8')
return decrypted
}
// Hash data (one-way)
hash(data: string): string {
return crypto.createHash('sha256').update(data).digest('hex')
}
}
// Usage: Encrypt sensitive customer data
const encryption = new EncryptionService(process.env.MASTER_KEY!)
export const createCustomerWithSensitiveData = async (data: CustomerData) => {
// Encrypt SSN
const encryptedSSN = data.ssn ? encryption.encrypt(data.ssn) : null
// Encrypt credit card
const encryptedCC = data.creditCard ? encryption.encrypt(data.creditCard) : null
return db.create('$.Customer', {
...data,
ssn: encryptedSSN,
creditCard: encryptedCC,
ssnHash: data.ssn ? encryption.hash(data.ssn) : null, // For lookup without decryption
})
}API Key Management
Securely manage API keys:
// src/security/api-keys.ts
import { db } from 'sdk.do'
import crypto from 'crypto'
export class APIKeyService {
// Generate API key
async generateKey(name: string, permissions: string[], expiresIn?: number): Promise<APIKeyResult> {
// Generate random key
const key = `sk_${crypto.randomBytes(32).toString('hex')}`
// Hash key for storage
const hashedKey = crypto.createHash('sha256').update(key).digest('hex')
// Calculate expiry
const expiresAt = expiresIn ? new Date(Date.now() + expiresIn) : null
// Store key metadata
await db.create('$.APIKey', {
$type: 'APIKey',
name,
hashedKey,
permissions,
expiresAt,
createdAt: new Date(),
lastUsedAt: null,
usageCount: 0,
})
// Return plain key (only shown once)
return {
key,
name,
permissions,
expiresAt,
}
}
// Validate API key
async validateKey(key: string): Promise<APIKeyData | null> {
const hashedKey = crypto.createHash('sha256').update(key).digest('hex')
const [apiKey] = await db.query('$.APIKey', { hashedKey })
if (!apiKey) {
return null
}
// Check expiry
if (apiKey.expiresAt && new Date() > apiKey.expiresAt) {
return null
}
// Update usage
await db.update('$.APIKey', apiKey.$id, {
lastUsedAt: new Date(),
usageCount: apiKey.usageCount + 1,
})
return {
$id: apiKey.$id,
permissions: apiKey.permissions,
name: apiKey.name,
}
}
// Revoke API key
async revokeKey(keyId: string) {
await db.update('$.APIKey', keyId, {
revokedAt: new Date(),
})
}
// Rotate API key
async rotateKey(oldKeyId: string): Promise<APIKeyResult> {
const oldKey = await db.get('$.APIKey', oldKeyId)
// Generate new key
const newKey = await this.generateKey(oldKey.name, oldKey.permissions, oldKey.expiresAt ? oldKey.expiresAt.getTime() - Date.now() : undefined)
// Revoke old key
await this.revokeKey(oldKeyId)
return newKey
}
}
// Middleware: Verify API key
export const requireAPIKey = (requiredPermissions: string[] = []) => {
return async (context: Context) => {
const authHeader = context.req.header('Authorization')
if (!authHeader?.startsWith('Bearer ')) {
throw new ValidationError('Missing API key')
}
const key = authHeader.substring(7)
const apiKeyService = new APIKeyService()
const apiKey = await apiKeyService.validateKey(key)
if (!apiKey) {
throw new ValidationError('Invalid API key')
}
// Check permissions
for (const permission of requiredPermissions) {
if (!apiKey.permissions.includes(permission)) {
throw new ValidationError(`Missing permission: ${permission}`)
}
}
// Add to context
context.set('apiKey', apiKey)
}
}RBAC Patterns
Role-based access control:
// src/security/rbac.ts
import { db } from 'sdk.do'
export enum Permission {
// Product permissions
PRODUCT_CREATE = 'product:create',
PRODUCT_READ = 'product:read',
PRODUCT_UPDATE = 'product:update',
PRODUCT_DELETE = 'product:delete',
// Order permissions
ORDER_CREATE = 'order:create',
ORDER_READ = 'order:read',
ORDER_UPDATE = 'order:update',
ORDER_CANCEL = 'order:cancel',
// Customer permissions
CUSTOMER_READ = 'customer:read',
CUSTOMER_UPDATE = 'customer:update',
// Admin permissions
ADMIN_ALL = 'admin:*',
}
export const Roles = {
admin: [Permission.ADMIN_ALL],
manager: [
Permission.PRODUCT_CREATE,
Permission.PRODUCT_READ,
Permission.PRODUCT_UPDATE,
Permission.ORDER_READ,
Permission.ORDER_UPDATE,
Permission.CUSTOMER_READ,
],
sales: [Permission.PRODUCT_READ, Permission.ORDER_CREATE, Permission.ORDER_READ, Permission.CUSTOMER_READ, Permission.CUSTOMER_UPDATE],
support: [Permission.PRODUCT_READ, Permission.ORDER_READ, Permission.CUSTOMER_READ],
customer: [Permission.ORDER_CREATE, Permission.ORDER_READ],
}
export class RBACService {
// Check if user has permission
async checkPermission(userId: string, permission: Permission): Promise<boolean> {
const user = await db.get('$.Person', userId)
if (!user.roles || user.roles.length === 0) {
return false
}
// Admin has all permissions
if (user.roles.includes('admin')) {
return true
}
// Check role permissions
for (const role of user.roles) {
const permissions = Roles[role] || []
if (permissions.includes(Permission.ADMIN_ALL)) {
return true
}
if (permissions.includes(permission)) {
return true
}
}
return false
}
// Assign role to user
async assignRole(userId: string, role: string) {
const user = await db.get('$.Person', userId)
const roles = user.roles || []
if (!roles.includes(role)) {
await db.update('$.Person', userId, {
roles: [...roles, role],
})
}
}
// Remove role from user
async removeRole(userId: string, role: string) {
const user = await db.get('$.Person', userId)
const roles = user.roles || []
await db.update('$.Person', userId, {
roles: roles.filter((r) => r !== role),
})
}
}
// Middleware: Require permission
export const requirePermission = (permission: Permission) => {
return async (context: Context) => {
const user = context.get('user')
if (!user) {
throw new ValidationError('Authentication required')
}
const rbac = new RBACService()
const hasPermission = await rbac.checkPermission(user.$id, permission)
if (!hasPermission) {
throw new ValidationError(`Missing permission: ${permission}`)
}
}
}
// Usage in routes
import { Hono } from 'hono'
const app = new Hono()
app.post('/products', requirePermission(Permission.PRODUCT_CREATE), async (c) => {
// Create product
const product = await $.Product.create(await c.req.json())
return c.json(product)
})
app.delete('/products/:id', requirePermission(Permission.PRODUCT_DELETE), async (c) => {
await db.delete('$.Product', c.req.param('id'))
return c.json({ success: true })
})Audit Logging
Track all security-relevant events:
// src/security/audit.ts
import { db } from 'sdk.do'
export enum AuditAction {
USER_LOGIN = 'user.login',
USER_LOGOUT = 'user.logout',
USER_REGISTER = 'user.register',
PASSWORD_CHANGE = 'password.change',
ROLE_ASSIGNED = 'role.assigned',
PERMISSION_GRANTED = 'permission.granted',
DATA_ACCESS = 'data.access',
DATA_MODIFY = 'data.modify',
DATA_DELETE = 'data.delete',
API_KEY_CREATED = 'api_key.created',
API_KEY_REVOKED = 'api_key.revoked',
}
export class AuditLogger {
async log(
action: AuditAction,
actor: { type: 'user' | 'system' | 'api_key'; id: string },
target?: { type: string; id: string },
metadata?: Record<string, any>
) {
await db.create('$.AuditLog', {
$type: 'AuditLog',
action,
actor,
target,
metadata,
timestamp: new Date(),
ipAddress: metadata?.ipAddress,
userAgent: metadata?.userAgent,
})
}
async query(filters: AuditLogFilters) {
return db.query('$.AuditLog', {
...filters,
$sort: { timestamp: -1 },
})
}
}
// Usage
const audit = new AuditLogger()
export const loginWithAudit = async (email: string, password: string, metadata: any) => {
const authService = new AuthService()
try {
const result = await authService.login(email, password)
// Log successful login
await audit.log(AuditAction.USER_LOGIN, { type: 'user', id: result.user.$id }, undefined, {
success: true,
ipAddress: metadata.ipAddress,
userAgent: metadata.userAgent,
})
return result
} catch (error) {
// Log failed login attempt
await audit.log(AuditAction.USER_LOGIN, { type: 'user', id: email }, undefined, {
success: false,
error: error.message,
ipAddress: metadata.ipAddress,
userAgent: metadata.userAgent,
})
throw error
}
}Compliance
Ensure regulatory compliance:
// src/security/compliance.ts
import { db } from 'sdk.do'
// GDPR: Data subject rights
export class GDPRService {
// Right to access
async exportUserData(userId: string): Promise<UserDataExport> {
const user = await db.get('$.Person', userId)
const orders = await db.related(user, '$.makesOrder', '$.Order')
const reviews = await db.related(user, '$.author', '$.Review')
return {
profile: user,
orders,
reviews,
exportedAt: new Date(),
}
}
// Right to erasure
async deleteUserData(userId: string) {
const user = await db.get('$.Person', userId)
// Anonymize orders (keep for business records)
const orders = await db.related(user, '$.makesOrder', '$.Order')
for (const order of orders) {
await db.update('$.Order', order.$id, {
customer: null,
shippingAddress: '[REDACTED]',
billingAddress: '[REDACTED]',
})
}
// Delete reviews
const reviews = await db.related(user, '$.author', '$.Review')
for (const review of reviews) {
await db.delete('$.Review', review.$id)
}
// Delete user
await db.delete('$.Person', userId)
// Log deletion
await audit.log(AuditAction.DATA_DELETE, { type: 'user', id: userId }, { type: 'Person', id: userId }, { reason: 'gdpr_erasure' })
}
// Right to portability
async exportDataPortable(userId: string): Promise<string> {
const data = await this.exportUserData(userId)
// Export as JSON
return JSON.stringify(data, null, 2)
}
}
// SOC 2: Security controls
export class SOC2Compliance {
// Implement access review
async performAccessReview(): Promise<AccessReviewReport> {
const users = await db.query('$.Person', {})
const report: AccessReviewReport = {
reviewDate: new Date(),
findings: [],
}
for (const user of users) {
// Check last login
if (user.lastLoginAt) {
const daysSinceLogin = (Date.now() - user.lastLoginAt.getTime()) / (1000 * 60 * 60 * 24)
if (daysSinceLogin > 90) {
report.findings.push({
type: 'inactive_user',
userId: user.$id,
message: `User has not logged in for ${Math.floor(daysSinceLogin)} days`,
})
}
}
// Check role assignment
if (user.roles?.includes('admin')) {
report.findings.push({
type: 'admin_access',
userId: user.$id,
message: 'User has admin access - verify need',
})
}
}
return report
}
// Implement data retention policy
async enforceRetentionPolicy() {
const retentionPeriod = 7 * 365 * 24 * 60 * 60 * 1000 // 7 years
const cutoffDate = new Date(Date.now() - retentionPeriod)
// Delete old audit logs
await db.deleteMany('$.AuditLog', {
'timestamp:lt': cutoffDate,
})
// Archive old orders
const oldOrders = await db.query('$.Order', {
'orderDate:lt': cutoffDate,
})
for (const order of oldOrders) {
// Move to archive storage
await this.archiveOrder(order)
await db.delete('$.Order', order.$id)
}
}
}
// HIPAA: Healthcare compliance
export class HIPAACompliance {
// Encrypt PHI (Protected Health Information)
async storePHI(patientId: string, phi: HealthData) {
const encryption = new EncryptionService(process.env.PHI_KEY!)
return db.create('$.HealthRecord', {
$type: 'HealthRecord',
patient: patientId,
data: encryption.encrypt(JSON.stringify(phi)),
createdAt: new Date(),
accessLog: [],
})
}
// Log all PHI access
async accessPHI(recordId: string, userId: string, purpose: string) {
const record = await db.get('$.HealthRecord', recordId)
// Log access
await db.update('$.HealthRecord', recordId, {
accessLog: [
...record.accessLog,
{
userId,
purpose,
timestamp: new Date(),
},
],
})
// Audit log
await audit.log(AuditAction.DATA_ACCESS, { type: 'user', id: userId }, { type: 'HealthRecord', id: recordId }, { purpose })
// Decrypt and return
const encryption = new EncryptionService(process.env.PHI_KEY!)
return JSON.parse(encryption.decrypt(record.data))
}
}Monitoring & Observability
Metrics and KPIs
Track business and system metrics:
// src/monitoring/metrics.ts
import { db, send } from 'sdk.do'
export class MetricsCollector {
private metrics: Map<string, Metric> = new Map()
// Business metrics
async trackBusinessMetric(name: string, value: number, tags: Record<string, string> = {}) {
await this.recordMetric({
name,
value,
type: 'gauge',
tags,
timestamp: Date.now(),
})
}
async incrementCounter(name: string, tags: Record<string, string> = {}) {
const existing = this.metrics.get(this.getMetricKey(name, tags)) || { value: 0 }
await this.recordMetric({
name,
value: existing.value + 1,
type: 'counter',
tags,
timestamp: Date.now(),
})
}
async recordDuration(name: string, durationMs: number, tags: Record<string, string> = {}) {
await this.recordMetric({
name,
value: durationMs,
type: 'histogram',
tags,
timestamp: Date.now(),
})
}
private async recordMetric(metric: Metric) {
const key = this.getMetricKey(metric.name, metric.tags)
this.metrics.set(key, metric)
// Send to monitoring service
await this.sendToMonitoring(metric)
// Store in database for analysis
await db.create('$.Metric', {
$type: 'Metric',
...metric,
})
}
private getMetricKey(name: string, tags: Record<string, string>): string {
const tagString = Object.entries(tags)
.sort(([a], [b]) => a.localeCompare(b))
.map(([k, v]) => `${k}:${v}`)
.join(',')
return `${name}{${tagString}}`
}
private async sendToMonitoring(metric: Metric) {
// Send to Datadog, Prometheus, etc.
await fetch(process.env.METRICS_ENDPOINT!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(metric),
})
}
}
// Usage: Track business KPIs
const metrics = new MetricsCollector()
export const trackOrderMetrics = async (order: Order) => {
await metrics.trackBusinessMetric('order.revenue', order.totalPrice, {
currency: 'USD',
customer_tier: order.customer.tier,
})
await metrics.incrementCounter('order.count', {
status: order.orderStatus,
})
}
// Usage: Track system performance
export const trackFunctionPerformance = async <T>(name: string, fn: () => Promise<T>): Promise<T> => {
const start = Date.now()
try {
const result = await fn()
const duration = Date.now() - start
await metrics.recordDuration(`function.${name}.duration`, duration, {
status: 'success',
})
return result
} catch (error) {
const duration = Date.now() - start
await metrics.recordDuration(`function.${name}.duration`, duration, {
status: 'error',
})
throw error
}
}Logging Patterns
Structured logging for debugging:
// src/monitoring/logger.ts
export enum LogLevel {
DEBUG = 'debug',
INFO = 'info',
WARN = 'warn',
ERROR = 'error',
}
interface LogContext {
userId?: string
businessId?: string
requestId?: string
operation?: string
[key: string]: any
}
class Logger {
private context: LogContext = {}
constructor(private level: LogLevel = LogLevel.INFO) {}
// Set context for all logs
setContext(context: LogContext) {
this.context = { ...this.context, ...context }
}
// Clear context
clearContext() {
this.context = {}
}
debug(message: string, data?: any) {
this.log(LogLevel.DEBUG, message, data)
}
info(message: string, data?: any) {
this.log(LogLevel.INFO, message, data)
}
warn(message: string, data?: any) {
this.log(LogLevel.WARN, message, data)
}
error(message: string, error?: Error, data?: any) {
this.log(LogLevel.ERROR, message, {
...data,
error: error
? {
name: error.name,
message: error.message,
stack: error.stack,
}
: undefined,
})
}
private log(level: LogLevel, message: string, data?: any) {
if (!this.shouldLog(level)) {
return
}
const logEntry = {
level,
message,
timestamp: new Date().toISOString(),
context: this.context,
data,
}
// Console output
console.log(JSON.stringify(logEntry))
// Send to logging service
this.sendToLoggingService(logEntry)
}
private shouldLog(level: LogLevel): boolean {
const levels = Object.values(LogLevel)
return levels.indexOf(level) >= levels.indexOf(this.level)
}
private async sendToLoggingService(logEntry: any) {
// Send to Elasticsearch, Loki, CloudWatch, etc.
try {
await fetch(process.env.LOGGING_ENDPOINT!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(logEntry),
})
} catch (error) {
// Don't fail application if logging fails
console.error('Failed to send log:', error)
}
}
}
export const logger = new Logger((process.env.LOG_LEVEL as LogLevel) || LogLevel.INFO)
// Usage
logger.setContext({ businessId: 'biz-123', userId: 'user-456' })
logger.info('Processing order', { orderId: 'order-789' })
logger.error('Failed to process payment', error, { orderId: 'order-789' })Alerting Strategies
Proactive alerting:
// src/monitoring/alerting.ts
import { send } from 'sdk.do'
export enum AlertSeverity {
INFO = 'info',
WARNING = 'warning',
ERROR = 'error',
CRITICAL = 'critical',
}
export class AlertManager {
private alertThresholds = new Map<string, ThresholdConfig>()
constructor() {
this.setupDefaultThresholds()
}
private setupDefaultThresholds() {
// Error rate threshold
this.alertThresholds.set('error_rate', {
warning: 0.01, // 1%
critical: 0.05, // 5%
windowMs: 300000, // 5 minutes
})
// Response time threshold
this.alertThresholds.set('response_time_p95', {
warning: 1000, // 1s
critical: 5000, // 5s
windowMs: 300000,
})
// Order failure rate
this.alertThresholds.set('order_failure_rate', {
warning: 0.02, // 2%
critical: 0.1, // 10%
windowMs: 600000, // 10 minutes
})
}
async checkThreshold(metricName: string, value: number) {
const config = this.alertThresholds.get(metricName)
if (!config) return
let severity: AlertSeverity | null = null
if (value >= config.critical) {
severity = AlertSeverity.CRITICAL
} else if (value >= config.warning) {
severity = AlertSeverity.WARNING
}
if (severity) {
await this.sendAlert({
severity,
metric: metricName,
value,
threshold: severity === AlertSeverity.CRITICAL ? config.critical : config.warning,
message: `${metricName} is ${value}, threshold: ${severity === AlertSeverity.CRITICAL ? config.critical : config.warning}`,
})
}
}
async sendAlert(alert: Alert) {
// Send to appropriate channels based on severity
const channels = this.getChannelsForSeverity(alert.severity)
for (const channel of channels) {
switch (channel) {
case 'slack':
await this.sendToSlack(alert)
break
case 'pagerduty':
await this.sendToPagerDuty(alert)
break
case 'email':
await this.sendToEmail(alert)
break
}
}
// Store alert
await db.create('$.Alert', {
$type: 'Alert',
...alert,
timestamp: new Date(),
})
}
private getChannelsForSeverity(severity: AlertSeverity): string[] {
switch (severity) {
case AlertSeverity.CRITICAL:
return ['pagerduty', 'slack', 'email']
case AlertSeverity.ERROR:
return ['slack', 'email']
case AlertSeverity.WARNING:
return ['slack']
default:
return []
}
}
private async sendToSlack(alert: Alert) {
await fetch(process.env.SLACK_WEBHOOK_URL!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
text: `[${alert.severity.toUpperCase()}] ${alert.message}`,
attachments: [
{
color: this.getSeverityColor(alert.severity),
fields: [
{ title: 'Metric', value: alert.metric, short: true },
{ title: 'Value', value: alert.value.toString(), short: true },
{ title: 'Threshold', value: alert.threshold.toString(), short: true },
],
},
],
}),
})
}
private getSeverityColor(severity: AlertSeverity): string {
switch (severity) {
case AlertSeverity.CRITICAL:
return 'danger'
case AlertSeverity.ERROR:
return 'warning'
case AlertSeverity.WARNING:
return 'warning'
default:
return 'good'
}
}
}
// Usage: Monitor metrics and alert
const alertManager = new AlertManager()
export const monitorErrorRate = async () => {
const window = 300000 // 5 minutes
const cutoff = new Date(Date.now() - window)
const [metrics] = await db.aggregate('$.Request', [
{ $match: { timestamp: { $gte: cutoff } } },
{
$group: {
_id: null,
total: { $sum: 1 },
errors: {
$sum: { $cond: [{ $gte: ['$statusCode', 400] }, 1, 0] },
},
},
},
])
const errorRate = metrics.errors / metrics.total
await alertManager.checkThreshold('error_rate', errorRate)
}Distributed Tracing
Track requests across services:
// src/monitoring/tracing.ts
export class Tracer {
async startSpan(name: string, parentSpanId?: string): Promise<Span> {
const span: Span = {
spanId: this.generateSpanId(),
traceId: parentSpanId ? this.getTraceId(parentSpanId) : this.generateTraceId(),
parentSpanId,
name,
startTime: Date.now(),
tags: {},
logs: [],
}
return span
}
async finishSpan(span: Span) {
span.endTime = Date.now()
span.duration = span.endTime - span.startTime
// Send to tracing backend
await this.sendToTracingBackend(span)
}
addTag(span: Span, key: string, value: any) {
span.tags[key] = value
}
addLog(span: Span, message: string, data?: any) {
span.logs.push({
timestamp: Date.now(),
message,
data,
})
}
private generateSpanId(): string {
return Math.random().toString(36).substring(2, 15)
}
private generateTraceId(): string {
return Math.random().toString(36).substring(2, 15)
}
private getTraceId(spanId: string): string {
// In production, look up trace ID from span ID
return spanId
}
private async sendToTracingBackend(span: Span) {
// Send to Jaeger, Zipkin, DataDog APM, etc.
await fetch(process.env.TRACING_ENDPOINT!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(span),
})
}
}
// Usage: Trace workflow execution
const tracer = new Tracer()
export const processOrderWithTracing = async (order: Order) => {
const span = await tracer.startSpan('process_order')
tracer.addTag(span, 'order.id', order.$id)
tracer.addTag(span, 'order.total', order.totalPrice)
try {
// Payment processing
const paymentSpan = await tracer.startSpan('process_payment', span.spanId)
try {
await processPayment(order)
tracer.addTag(paymentSpan, 'payment.status', 'success')
} finally {
await tracer.finishSpan(paymentSpan)
}
// Inventory reservation
const inventorySpan = await tracer.startSpan('reserve_inventory', span.spanId)
try {
await reserveInventory(order)
tracer.addTag(inventorySpan, 'inventory.status', 'reserved')
} finally {
await tracer.finishSpan(inventorySpan)
}
tracer.addLog(span, 'Order processed successfully')
} catch (error) {
tracer.addTag(span, 'error', true)
tracer.addLog(span, 'Error processing order', { error: error.message })
throw error
} finally {
await tracer.finishSpan(span)
}
}Error Tracking
Centralized error tracking and analysis:
// src/monitoring/error-tracking.ts
export class ErrorTracker {
async captureError(error: Error, context: ErrorContext = {}) {
const errorReport: ErrorReport = {
error: {
name: error.name,
message: error.message,
stack: error.stack,
},
context: {
...context,
timestamp: new Date(),
environment: process.env.NODE_ENV,
},
fingerprint: this.generateFingerprint(error),
}
// Send to error tracking service
await this.sendToErrorTracking(errorReport)
// Store for analysis
await db.create('$.ErrorReport', {
$type: 'ErrorReport',
...errorReport,
})
// Check if error requires immediate attention
if (this.isCriticalError(error)) {
await alertManager.sendAlert({
severity: AlertSeverity.CRITICAL,
message: `Critical error: ${error.message}`,
metric: 'error',
value: 1,
threshold: 0,
})
}
}
private generateFingerprint(error: Error): string {
// Group similar errors together
const key = `${error.name}:${error.message}:${this.getStackSignature(error)}`
return crypto.createHash('md5').update(key).digest('hex')
}
private getStackSignature(error: Error): string {
if (!error.stack) return ''
// Extract first few stack frames
const frames = error.stack.split('\n').slice(0, 3)
return frames.join('|')
}
private isCriticalError(error: Error): boolean {
return error instanceof DatabaseError || error instanceof PaymentError || error.message.includes('CRITICAL')
}
private async sendToErrorTracking(errorReport: ErrorReport) {
// Send to Sentry, Rollbar, Bugsnag, etc.
await fetch(process.env.ERROR_TRACKING_ENDPOINT!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(errorReport),
})
}
// Analyze error trends
async analyzeErrorTrends(hoursBack: number = 24): Promise<ErrorAnalysis> {
const cutoff = new Date(Date.now() - hoursBack * 60 * 60 * 1000)
const errors = await db.query('$.ErrorReport', {
'context.timestamp:gte': cutoff,
})
// Group by fingerprint
const grouped = new Map<string, ErrorReport[]>()
for (const error of errors) {
const existing = grouped.get(error.fingerprint) || []
grouped.set(error.fingerprint, [...existing, error])
}
// Find trending errors
const trending = Array.from(grouped.entries())
.map(([fingerprint, errors]) => ({
fingerprint,
count: errors.length,
error: errors[0].error,
firstSeen: errors[0].context.timestamp,
lastSeen: errors[errors.length - 1].context.timestamp,
}))
.sort((a, b) => b.count - a.count)
return {
totalErrors: errors.length,
uniqueErrors: grouped.size,
trending: trending.slice(0, 10),
}
}
}
export const errorTracker = new ErrorTracker()Scalability Patterns
Horizontal Scaling
Scale across multiple instances:
// src/scaling/load-balancer.ts
export class LoadBalancer {
private instances: Instance[] = []
private currentIndex = 0
addInstance(instance: Instance) {
this.instances.push(instance)
}
removeInstance(instanceId: string) {
this.instances = this.instances.filter((i) => i.id !== instanceId)
}
// Round-robin load balancing
getNextInstance(): Instance {
if (this.instances.length === 0) {
throw new Error('No instances available')
}
const instance = this.instances[this.currentIndex]
this.currentIndex = (this.currentIndex + 1) % this.instances.length
return instance
}
// Weighted load balancing
getInstanceWeighted(): Instance {
const totalWeight = this.instances.reduce((sum, i) => sum + i.weight, 0)
let random = Math.random() * totalWeight
for (const instance of this.instances) {
random -= instance.weight
if (random <= 0) {
return instance
}
}
return this.instances[0]
}
// Least connections
getInstanceLeastConnections(): Instance {
return this.instances.reduce((min, instance) => (instance.activeConnections < min.activeConnections ? instance : min))
}
// Health check
async checkHealth(instance: Instance): Promise<boolean> {
try {
const response = await fetch(`${instance.url}/health`, {
signal: AbortSignal.timeout(5000),
})
return response.ok
} catch {
return false
}
}
// Remove unhealthy instances
async pruneUnhealthy() {
const healthChecks = await Promise.all(
this.instances.map(async (instance) => ({
instance,
healthy: await this.checkHealth(instance),
}))
)
this.instances = healthChecks.filter((check) => check.healthy).map((check) => check.instance)
}
}Load Balancing
Distribute work efficiently:
// src/scaling/work-distribution.ts
import { db, send } from 'sdk.do'
export class WorkDistributor {
// Distribute tasks across workers
async distributeTasks(tasks: Task[], workers: Worker[]) {
// Group tasks by priority
const highPriority = tasks.filter((t) => t.priority === 'high')
const normalPriority = tasks.filter((t) => t.priority === 'normal')
const lowPriority = tasks.filter((t) => t.priority === 'low')
// Distribute high priority first
await this.distributeToWorkers(highPriority, workers)
await this.distributeToWorkers(normalPriority, workers)
await this.distributeToWorkers(lowPriority, workers)
}
private async distributeToWorkers(tasks: Task[], workers: Worker[]) {
// Get worker capacities
const capacities = await Promise.all(
workers.map(async (w) => ({
worker: w,
available: w.maxConcurrent - w.activeTasks,
}))
)
// Sort by available capacity
capacities.sort((a, b) => b.available - a.available)
// Assign tasks
for (const task of tasks) {
const workerCapacity = capacities.find((c) => c.available > 0)
if (!workerCapacity) {
// No capacity, queue for later
await this.queueTask(task)
continue
}
// Assign task
await send('$.Task.assign', {
taskId: task.$id,
workerId: workerCapacity.worker.id,
})
workerCapacity.available--
}
}
private async queueTask(task: Task) {
await db.update('$.Task', task.$id, {
status: 'queued',
queuedAt: new Date(),
})
}
}
// Auto-scaling based on queue depth
export class AutoScaler {
async checkAndScale() {
const queueDepth = await this.getQueueDepth()
const currentWorkers = await this.getCurrentWorkerCount()
const targetWorkers = this.calculateTargetWorkers(queueDepth)
if (targetWorkers > currentWorkers) {
await this.scaleUp(targetWorkers - currentWorkers)
} else if (targetWorkers < currentWorkers) {
await this.scaleDown(currentWorkers - targetWorkers)
}
}
private async getQueueDepth(): Promise<number> {
const queued = await db.count('$.Task', {
status: 'queued',
})
return queued
}
private calculateTargetWorkers(queueDepth: number): number {
const minWorkers = 2
const maxWorkers = 20
const tasksPerWorker = 10
const target = Math.ceil(queueDepth / tasksPerWorker)
return Math.max(minWorkers, Math.min(maxWorkers, target))
}
private async scaleUp(count: number) {
logger.info(`Scaling up by ${count} workers`)
for (let i = 0; i < count; i++) {
await send('$.Worker.create', {
type: 'task-processor',
maxConcurrent: 10,
})
}
}
private async scaleDown(count: number) {
logger.info(`Scaling down by ${count} workers`)
const workers = await db.query('$.Worker', {
$sort: { activeTasks: 1 }, // Remove least busy workers
$limit: count,
})
for (const worker of workers) {
if (worker.activeTasks === 0) {
await send('$.Worker.terminate', { workerId: worker.$id })
}
}
}
}Queue Management
Implement reliable message queues:
// src/scaling/queue-manager.ts
export class QueueManager {
private queues = new Map<string, Queue>()
// Enqueue message
async enqueue(queueName: string, message: any, options: QueueOptions = {}) {
const queue = await this.getOrCreateQueue(queueName)
await db.create('$.QueueMessage', {
$type: 'QueueMessage',
queue: queueName,
payload: message,
priority: options.priority || 0,
delay: options.delayMs || 0,
maxRetries: options.maxRetries || 3,
retryCount: 0,
status: 'pending',
enqueueTime: new Date(),
visibleAt: new Date(Date.now() + (options.delayMs || 0)),
})
}
// Dequeue message
async dequeue(queueName: string, count: number = 1): Promise<QueueMessage[]> {
const now = new Date()
const messages = await db.query('$.QueueMessage', {
queue: queueName,
status: 'pending',
'visibleAt:lte': now,
$sort: { priority: -1, enqueueTime: 1 },
$limit: count,
})
// Mark as processing
for (const message of messages) {
await db.update('$.QueueMessage', message.$id, {
status: 'processing',
processingStartTime: new Date(),
visibilityTimeout: new Date(Date.now() + 300000), // 5 minutes
})
}
return messages
}
// Acknowledge message (delete after successful processing)
async ack(messageId: string) {
await db.delete('$.QueueMessage', messageId)
}
// Negative acknowledge (return to queue for retry)
async nack(messageId: string, error?: Error) {
const message = await db.get('$.QueueMessage', messageId)
if (message.retryCount >= message.maxRetries) {
// Move to dead letter queue
await this.moveToDeadLetter(message, error)
return
}
// Retry with exponential backoff
const backoffMs = Math.pow(2, message.retryCount) * 1000
await db.update('$.QueueMessage', messageId, {
status: 'pending',
retryCount: message.retryCount + 1,
lastError: error?.message,
visibleAt: new Date(Date.now() + backoffMs),
})
}
private async moveToDeadLetter(message: QueueMessage, error?: Error) {
await db.create('$.DeadLetterMessage', {
$type: 'DeadLetterMessage',
originalMessage: message,
reason: error?.message || 'Max retries exceeded',
timestamp: new Date(),
})
await db.delete('$.QueueMessage', message.$id)
}
// Process queue with workers
async processQueue(queueName: string, processor: (message: any) => Promise<void>, concurrency: number = 5) {
const workers: Promise<void>[] = []
for (let i = 0; i < concurrency; i++) {
workers.push(this.queueWorker(queueName, processor))
}
await Promise.all(workers)
}
private async queueWorker(queueName: string, processor: (message: any) => Promise<void>) {
while (true) {
const [message] = await this.dequeue(queueName)
if (!message) {
// No messages, wait before polling again
await new Promise((resolve) => setTimeout(resolve, 1000))
continue
}
try {
await processor(message.payload)
await this.ack(message.$id)
} catch (error) {
await this.nack(message.$id, error as Error)
logger.error('Error processing message', error as Error, {
messageId: message.$id,
queue: queueName,
})
}
}
}
}
// Usage
const queueManager = new QueueManager()
// Enqueue work
await queueManager.enqueue('order-processing', {
orderId: 'order-123',
action: 'fulfill',
})
// Process queue
await queueManager.processQueue(
'order-processing',
async (message) => {
await fulfillOrder(message.orderId)
},
10 // 10 concurrent workers
)Data Partitioning
Partition data for scalability:
// src/scaling/partitioning.ts
export class DataPartitioner {
// Partition by hash (consistent hashing)
partitionByHash(key: string, partitionCount: number): number {
const hash = crypto.createHash('md5').update(key).digest('hex')
const hashInt = parseInt(hash.substring(0, 8), 16)
return hashInt % partitionCount
}
// Partition by range
partitionByRange(value: number, ranges: number[]): number {
for (let i = 0; i < ranges.length; i++) {
if (value < ranges[i]) {
return i
}
}
return ranges.length
}
// Partition customers by region
async getCustomerPartition(customerId: string): Promise<string> {
const customer = await db.get('$.Customer', customerId)
// Partition by country
const region = this.getRegionForCountry(customer.addressCountry)
return `customers-${region}`
}
private getRegionForCountry(country: string): string {
const regions = {
'US,CA,MX': 'north-america',
'GB,DE,FR,IT,ES': 'europe',
'JP,CN,KR,IN': 'asia',
'BR,AR,CL': 'south-america',
}
for (const [countries, region] of Object.entries(regions)) {
if (countries.split(',').includes(country)) {
return region
}
}
return 'other'
}
// Shard database queries
async queryAcrossShards<T>(type: string, query: any, shardCount: number): Promise<T[]> {
const shardPromises: Promise<T[]>[] = []
for (let shard = 0; shard < shardCount; shard++) {
shardPromises.push(
db.query(type, query, {
shard,
shardCount,
})
)
}
const results = await Promise.all(shardPromises)
return results.flat()
}
}Caching Layers
Multi-tier caching for performance:
Deployment & CI/CD
Deployment Strategies
Choose the right deployment strategy:
// src/deployment/strategies.ts
export class DeploymentManager {
// Blue-Green Deployment
async blueGreenDeploy(newVersion: string) {
logger.info('Starting blue-green deployment', { version: newVersion })
// Deploy to green environment
await this.deployToEnvironment('green', newVersion)
// Run health checks
const healthy = await this.healthCheck('green')
if (!healthy) {
throw new Error('Green environment health check failed')
}
// Run smoke tests
await this.runSmokeTests('green')
// Switch traffic to green
await this.switchTraffic('blue', 'green')
logger.info('Blue-green deployment completed')
// Keep blue as rollback option for 24 hours
setTimeout(() => this.cleanupOldEnvironment('blue'), 24 * 60 * 60 * 1000)
}
// Canary Deployment
async canaryDeploy(newVersion: string) {
logger.info('Starting canary deployment', { version: newVersion })
// Deploy canary (5% traffic)
await this.deployCanary(newVersion, 0.05)
// Monitor for 30 minutes
await this.monitorCanary(30 * 60 * 1000)
// Check error rates
const canaryMetrics = await this.getCanaryMetrics()
const productionMetrics = await this.getProductionMetrics()
if (canaryMetrics.errorRate > productionMetrics.errorRate * 1.5) {
// Canary has 50% more errors - rollback
await this.rollbackCanary()
throw new Error('Canary deployment failed: high error rate')
}
// Gradually increase traffic
await this.deployCanary(newVersion, 0.25) // 25%
await this.monitorCanary(30 * 60 * 1000)
await this.deployCanary(newVersion, 0.5) // 50%
await this.monitorCanary(30 * 60 * 1000)
// Complete deployment
await this.deployCanary(newVersion, 1.0) // 100%
logger.info('Canary deployment completed')
}
// Rolling Deployment
async rollingDeploy(newVersion: string) {
logger.info('Starting rolling deployment', { version: newVersion })
const instances = await this.getInstances()
const batchSize = Math.ceil(instances.length * 0.2) // 20% at a time
for (let i = 0; i < instances.length; i += batchSize) {
const batch = instances.slice(i, i + batchSize)
logger.info(`Deploying to batch ${i / batchSize + 1}`, {
instances: batch.map((inst) => inst.id),
})
// Deploy to batch
for (const instance of batch) {
await this.deployToInstance(instance.id, newVersion)
}
// Wait for health
await this.waitForHealthy(batch)
// Monitor before continuing
await new Promise((resolve) => setTimeout(resolve, 60000)) // 1 minute
}
logger.info('Rolling deployment completed')
}
private async healthCheck(environment: string): Promise<boolean> {
try {
const response = await fetch(`https://${environment}.app.do/health`)
return response.ok
} catch {
return false
}
}
private async runSmokeTests(environment: string) {
// Run critical path tests
const tests = [() => this.testUserLogin(environment), () => this.testOrderCreation(environment), () => this.testPaymentProcessing(environment)]
for (const test of tests) {
const passed = await test()
if (!passed) {
throw new Error('Smoke tests failed')
}
}
}
}Environment Management
Manage multiple environments:
// src/deployment/environments.ts
export const environments = {
development: {
name: 'development',
url: 'https://dev.app.do',
database: process.env.DEV_DATABASE_URL,
aiModel: 'gpt-4', // Cheaper model for dev
features: {
experimental: true,
analytics: false,
},
},
staging: {
name: 'staging',
url: 'https://staging.app.do',
database: process.env.STAGING_DATABASE_URL,
aiModel: 'gpt-5',
features: {
experimental: true,
analytics: true,
},
},
production: {
name: 'production',
url: 'https://app.do',
database: process.env.PROD_DATABASE_URL,
aiModel: 'gpt-5',
features: {
experimental: false,
analytics: true,
},
},
}
export const getEnvironment = () => {
const env = process.env.NODE_ENV || 'development'
return environments[env]
}
// Feature flags
export class FeatureFlags {
async isEnabled(feature: string): Promise<boolean> {
const env = getEnvironment()
// Check environment config
if (env.features[feature] !== undefined) {
return env.features[feature]
}
// Check database for runtime flags
const [flag] = await db.query('$.FeatureFlag', {
name: feature,
environment: env.name,
})
return flag?.enabled || false
}
async enableForUser(feature: string, userId: string) {
await db.create('$.FeatureFlagOverride', {
$type: 'FeatureFlagOverride',
feature,
userId,
enabled: true,
})
}
async enableForPercentage(feature: string, percentage: number) {
await db.upsert('$.FeatureFlag', {
name: feature,
environment: getEnvironment().name,
enabled: true,
rolloutPercentage: percentage,
})
}
}
// Environment-specific configuration
export const getConfig = () => {
const env = getEnvironment()
return {
database: {
url: env.database,
poolSize: env.name === 'production' ? 20 : 5,
ssl: env.name === 'production',
},
ai: {
model: env.aiModel,
timeout: env.name === 'production' ? 30000 : 60000,
},
cache: {
ttl: env.name === 'production' ? 3600 : 60,
},
monitoring: {
enabled: env.name !== 'development',
sampleRate: env.name === 'production' ? 0.1 : 1.0,
},
}
}Version Control
Implement semantic versioning:
// src/deployment/versioning.ts
export class VersionManager {
private currentVersion: string
constructor() {
this.currentVersion = process.env.APP_VERSION || '0.0.0'
}
// Parse semantic version
parseVersion(version: string): SemanticVersion {
const [major, minor, patch] = version.split('.').map(Number)
return { major, minor, patch }
}
// Compare versions
compare(v1: string, v2: string): number {
const version1 = this.parseVersion(v1)
const version2 = this.parseVersion(v2)
if (version1.major !== version2.major) {
return version1.major - version2.major
}
if (version1.minor !== version2.minor) {
return version1.minor - version2.minor
}
return version1.patch - version2.patch
}
// Check compatibility
isCompatible(clientVersion: string, serverVersion: string): boolean {
const client = this.parseVersion(clientVersion)
const server = this.parseVersion(serverVersion)
// Major version must match
if (client.major !== server.major) {
return false
}
// Client can be older minor/patch
return true
}
// Increment version
bump(type: 'major' | 'minor' | 'patch'): string {
const version = this.parseVersion(this.currentVersion)
switch (type) {
case 'major':
version.major++
version.minor = 0
version.patch = 0
break
case 'minor':
version.minor++
version.patch = 0
break
case 'patch':
version.patch++
break
}
return `${version.major}.${version.minor}.${version.patch}`
}
// Migration compatibility check
async checkMigrationCompatibility(targetVersion: string): Promise<boolean> {
const migrations = await db.query('$.Migration', {
$sort: { version: 1 },
})
const currentVersionParsed = this.parseVersion(this.currentVersion)
const targetVersionParsed = this.parseVersion(targetVersion)
// Check if all migrations between versions exist
for (const migration of migrations) {
const migrationVersion = this.parseVersion(migration.version)
if (this.compare(migration.version, this.currentVersion) > 0 && this.compare(migration.version, targetVersion) <= 0) {
if (!migration.tested) {
logger.warn('Untested migration', { version: migration.version })
return false
}
}
}
return true
}
}CI/CD Pipelines
Automated testing and deployment:
# .github/workflows/ci-cd.yml
name: CI/CD Pipeline
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
env:
NODE_VERSION: '20'
jobs:
# Code Quality Checks
quality:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v2
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Lint
run: pnpm lint
- name: Type check
run: pnpm typecheck
- name: Check formatting
run: pnpm format:check
# Unit and Integration Tests
test:
runs-on: ubuntu-latest
needs: quality
services:
postgres:
image: postgres:15
env:
POSTGRES_PASSWORD: test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
redis:
image: redis:7
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v2
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Run unit tests
run: pnpm test:unit --coverage
- name: Run integration tests
run: pnpm test:integration
env:
DATABASE_URL: postgres://postgres:test@localhost:5432/test
REDIS_URL: redis://localhost:6379
- name: Upload coverage
uses: codecov/codecov-action@v3
with:
files: ./coverage/coverage-final.json
# E2E Tests
e2e:
runs-on: ubuntu-latest
needs: test
steps:
- uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v2
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build application
run: pnpm build
- name: Run E2E tests
run: pnpm test:e2e
env:
API_URL: http://localhost:3000
API_KEY: ${{ secrets.TEST_API_KEY }}
# Security Scanning
security:
runs-on: ubuntu-latest
needs: quality
steps:
- uses: actions/checkout@v4
- name: Run Snyk security scan
uses: snyk/actions/node@master
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
with:
scan-type: 'fs'
scan-ref: '.'
format: 'sarif'
output: 'trivy-results.sarif'
- name: Upload Trivy results
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: 'trivy-results.sarif'
# Build and Push Docker Image
build:
runs-on: ubuntu-latest
needs: [test, e2e, security]
if: github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v4
with:
images: ghcr.io/${{ github.repository }}
tags: |
type=sha
type=ref,event=branch
type=semver,pattern={{version}}
- name: Build and push
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
# Deploy to Staging
deploy-staging:
runs-on: ubuntu-latest
needs: build
if: github.ref == 'refs/heads/main'
environment:
name: staging
url: https://staging.app.do
steps:
- uses: actions/checkout@v4
- name: Deploy to staging
run: |
curl -X POST ${{ secrets.STAGING_WEBHOOK_URL }} \
-H "Authorization: Bearer ${{ secrets.DEPLOY_TOKEN }}" \
-H "Content-Type: application/json" \
-d '{
"image": "ghcr.io/${{ github.repository }}:sha-${{ github.sha }}",
"environment": "staging"
}'
- name: Wait for deployment
run: sleep 60
- name: Run smoke tests
run: pnpm test:smoke
env:
API_URL: https://staging.app.do
API_KEY: ${{ secrets.STAGING_API_KEY }}
# Deploy to Production
deploy-production:
runs-on: ubuntu-latest
needs: deploy-staging
if: github.ref == 'refs/heads/main'
environment:
name: production
url: https://app.do
steps:
- uses: actions/checkout@v4
- name: Deploy to production
run: |
curl -X POST ${{ secrets.PRODUCTION_WEBHOOK_URL }} \
-H "Authorization: Bearer ${{ secrets.DEPLOY_TOKEN }}" \
-H "Content-Type: application/json" \
-d '{
"image": "ghcr.io/${{ github.repository }}:sha-${{ github.sha }}",
"environment": "production",
"strategy": "canary"
}'
- name: Monitor deployment
run: |
for i in {1..30}; do
STATUS=$(curl -s https://app.do/health | jq -r '.status')
if [ "$STATUS" = "healthy" ]; then
echo "Deployment healthy"
exit 0
fi
echo "Waiting for healthy status... ($i/30)"
sleep 60
done
echo "Deployment timeout"
exit 1
- name: Rollback on failure
if: failure()
run: |
curl -X POST ${{ secrets.PRODUCTION_WEBHOOK_URL }}/rollback \
-H "Authorization: Bearer ${{ secrets.DEPLOY_TOKEN }}"
# Post-deployment verification
verify:
runs-on: ubuntu-latest
needs: deploy-production
if: github.ref == 'refs/heads/main'
steps:
- uses: actions/checkout@v4
- name: Run production smoke tests
run: pnpm test:smoke
env:
API_URL: https://app.do
API_KEY: ${{ secrets.PRODUCTION_API_KEY }}
- name: Check error rates
run: |
ERROR_RATE=$(curl -s https://app.do/metrics/error-rate | jq -r '.rate')
if (( $(echo "$ERROR_RATE > 0.01" | bc -l) )); then
echo "Error rate too high: $ERROR_RATE"
exit 1
fi
- name: Notify deployment success
run: |
curl -X POST ${{ secrets.SLACK_WEBHOOK_URL }} \
-H "Content-Type: application/json" \
-d '{
"text": "Deployment to production successful",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": ":white_check_mark: Deployment to production successful\n*Commit:* ${{ github.sha }}\n*Author:* ${{ github.actor }}"
}
}
]
}'Rollback Procedures
Quick rollback when issues arise:
// src/deployment/rollback.ts
export class RollbackManager {
// Automatic rollback trigger
async monitorAndRollback(deploymentId: string) {
const deployment = await db.get('$.Deployment', deploymentId)
// Monitor for 15 minutes
const monitoringDuration = 15 * 60 * 1000
const startTime = Date.now()
while (Date.now() - startTime < monitoringDuration) {
const metrics = await this.getMetrics()
// Check error rate
if (metrics.errorRate > 0.05) {
logger.error('High error rate detected, initiating rollback', {
errorRate: metrics.errorRate,
})
await this.rollback(deployment.previousVersion)
return
}
// Check response time
if (metrics.p95ResponseTime > 5000) {
logger.error('High response time detected, initiating rollback', {
p95ResponseTime: metrics.p95ResponseTime,
})
await this.rollback(deployment.previousVersion)
return
}
// Wait before next check
await new Promise((resolve) => setTimeout(resolve, 60000)) // 1 minute
}
logger.info('Deployment monitoring completed successfully')
}
// Manual rollback
async rollback(targetVersion: string) {
logger.info('Starting rollback', { targetVersion })
// Record rollback event
const rollbackEvent = await db.create('$.RollbackEvent', {
$type: 'RollbackEvent',
targetVersion,
reason: 'Manual rollback',
timestamp: new Date(),
})
try {
// Quick rollback: Switch traffic to previous version
await this.switchTraffic(targetVersion)
// Verify rollback
const healthy = await this.verifyHealth()
if (!healthy) {
throw new Error('Rollback failed health check')
}
logger.info('Rollback completed successfully')
// Update rollback event
await db.update('$.RollbackEvent', rollbackEvent.$id, {
status: 'completed',
completedAt: new Date(),
})
// Alert team
await alertManager.sendAlert({
severity: AlertSeverity.WARNING,
message: `Rolled back to version ${targetVersion}`,
metric: 'deployment',
value: 0,
threshold: 0,
})
} catch (error) {
logger.error('Rollback failed', error)
await db.update('$.RollbackEvent', rollbackEvent.$id, {
status: 'failed',
error: error.message,
completedAt: new Date(),
})
throw error
}
}
// Database migration rollback
async rollbackMigration(migrationId: string) {
const migration = await db.get('$.Migration', migrationId)
if (!migration.down) {
throw new Error('Migration does not have rollback script')
}
logger.info('Rolling back migration', { version: migration.version })
try {
// Execute down migration
await db.executeSql(migration.down)
// Mark as rolled back
await db.update('$.Migration', migrationId, {
status: 'rolled_back',
rolledBackAt: new Date(),
})
logger.info('Migration rollback completed')
} catch (error) {
logger.error('Migration rollback failed', error)
throw error
}
}
private async switchTraffic(version: string) {
// Implementation depends on infrastructure
// Could be updating load balancer, DNS, or service mesh
await fetch(`${process.env.LOAD_BALANCER_API}/switch`, {
method: 'POST',
headers: {
Authorization: `Bearer ${process.env.LB_API_KEY}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({ version }),
})
}
private async verifyHealth(): Promise<boolean> {
try {
const response = await fetch('https://app.do/health')
return response.ok
} catch {
return false
}
}
private async getMetrics() {
// Fetch from monitoring service
const response = await fetch(`${process.env.METRICS_API}/current`)
return response.json()
}
}Summary
This guide covered production best practices across eight critical areas:
-
Code Organization: Structure your Business-as-Code projects for maintainability with clear module boundaries and composable functions
-
Error Handling: Build resilient systems with proper error types, retry strategies, fallback mechanisms, and recovery workflows
-
Testing Strategies: Ensure reliability with comprehensive unit, integration, and E2E tests, plus automated CI/CD testing
-
Performance Optimization: Scale efficiently with optimized functions, workflows, caching, and database queries
-
Security Considerations: Protect your business with proper authentication, encryption, RBAC, audit logging, and compliance controls
-
Monitoring & Observability: Track system health with metrics, logging, alerting, tracing, and error tracking
-
Scalability Patterns: Handle growth with horizontal scaling, load balancing, queue management, data partitioning, and multi-tier caching
-
Deployment & CI/CD: Deploy reliably with automated pipelines, multiple deployment strategies, environment management, and quick rollback procedures
Next Steps
- Implement progressively: Start with code organization and testing, then add monitoring and security
- Measure everything: Use metrics to guide optimization efforts
- Automate relentlessly: Invest in CI/CD and automated testing
- Plan for failure: Implement error handling and recovery from day one
- Scale proactively: Design for scalability before you need it
For migration strategies, see the Migration Guide