Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 61 additions & 0 deletions .github/workflows/harness-image.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: Harness Worker Image

on:
push:
branches: [main, certification-worker]
paths:
- "Dockerfile"
- "harness/**"
- "src/**"
- "package.json"
- "package-lock.json"
- ".github/workflows/harness-image.yml"
release:
types: [published]
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
build-and-push:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up QEMU
uses: docker/setup-qemu-action@v3

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Log in to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Docker metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/conductor-oss/javascript-sdk/harness-worker
tags: |
type=raw,value=latest
type=raw,value=${{ github.event.release.tag_name }},enable=${{ github.event_name == 'release' }}

- name: Build and push
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
target: harness
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.meta.outputs.tags }}
27 changes: 27 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
FROM node:24-alpine AS build
WORKDIR /package
COPY package*.json ./
RUN npm ci
COPY . .
RUN npm run build

FROM build AS harness-build
RUN npx tsup harness/main.ts \
--outDir /app \
--format cjs \
--target node24 \
--no-splitting

FROM node:24-alpine AS harness-deps
WORKDIR /package
COPY package*.json ./
RUN npm ci --omit=dev

FROM node:24-alpine AS harness
RUN adduser -D -u 65532 nonroot
USER nonroot
WORKDIR /app
COPY --from=harness-deps /package/node_modules /app/node_modules
COPY --from=harness-deps /package/package.json /app/package.json
COPY --from=harness-build /app/main.js /app/main.js
ENTRYPOINT ["node", "main.js"]
78 changes: 78 additions & 0 deletions harness/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# JS SDK Docker Harness

Two Docker targets built from the root `Dockerfile`: an **SDK build** and a **long-running worker harness**.

## Worker Harness

A self-feeding worker that runs indefinitely. On startup it registers five simulated tasks (`js_worker_0` through `js_worker_4`) and the `js_simulated_tasks_workflow`, then runs two background services:

- **WorkflowGovernor** -- starts a configurable number of `js_simulated_tasks_workflow` instances per second (default 2), indefinitely.
- **SimulatedTaskWorkers** -- five task handlers, each with a codename and a default sleep duration. Each worker supports configurable delay types, failure simulation, and output generation via task input parameters. The workflow chains them in sequence: quickpulse (1s) → whisperlink (2s) → shadowfetch (3s) → ironforge (4s) → deepcrawl (5s).

### Building Locally

```bash
docker build --target harness -t js-sdk-harness .
```

### Multiplatform Build and Push

To build for both `linux/amd64` and `linux/arm64` and push to GHCR:

```bash
# One-time: create a buildx builder if you don't have one
docker buildx create --name multiarch --use --bootstrap

# Build and push
docker buildx build \
--platform linux/amd64,linux/arm64 \
--target harness \
-t ghcr.io/conductor-oss/javascript-sdk/harness-worker:latest \
--push .
```

> **Note:** Multi-platform builds require `docker buildx` and a builder that supports cross-compilation. On macOS this works out of the box with Docker Desktop. On Linux you may need to install QEMU user-space emulators:
>
> ```bash
> docker run --privileged --rm tonistiigi/binfmt --install all
> ```

### Running

```bash
docker run -d \
-e CONDUCTOR_SERVER_URL=https://your-cluster.example.com/api \
-e CONDUCTOR_AUTH_KEY=$CONDUCTOR_AUTH_KEY \
-e CONDUCTOR_AUTH_SECRET=$CONDUCTOR_AUTH_SECRET \
-e HARNESS_WORKFLOWS_PER_SEC=4 \
js-sdk-harness
```

You can also run the harness locally without Docker:

```bash
export CONDUCTOR_SERVER_URL=https://your-cluster.example.com/api
export CONDUCTOR_AUTH_KEY=$CONDUCTOR_AUTH_KEY
export CONDUCTOR_AUTH_SECRET=$CONDUCTOR_AUTH_SECRET

npx tsx harness/main.ts
```

Override defaults with environment variables as needed:

```bash
HARNESS_WORKFLOWS_PER_SEC=4 HARNESS_BATCH_SIZE=10 npx tsx harness/main.ts
```

All resource names use a `js_` prefix so multiple SDK harnesses (C#, Python, Go, etc.) can coexist on the same cluster.

### Environment Variables

| Variable | Required | Default | Description |
|---|---|---|---|
| `CONDUCTOR_SERVER_URL` | yes | -- | Conductor API base URL |
| `CONDUCTOR_AUTH_KEY` | no | -- | Orkes auth key |
| `CONDUCTOR_AUTH_SECRET` | no | -- | Orkes auth secret |
| `HARNESS_WORKFLOWS_PER_SEC` | no | 2 | Workflows to start per second |
| `HARNESS_BATCH_SIZE` | no | 20 | Number of tasks each worker polls per batch |
| `HARNESS_POLL_INTERVAL_MS` | no | 100 | Milliseconds between poll cycles |
122 changes: 122 additions & 0 deletions harness/main.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import {
OrkesClients,
ConductorWorkflow,
TaskHandler,
simpleTask,
} from "../src/sdk";
import { MetadataResource } from "../src/open-api/generated";
import type { ConductorWorker } from "../src/sdk/clients/worker/types";
import { SimulatedTaskWorker } from "./simulatedTaskWorker";
import { WorkflowGovernor } from "./workflowGovernor";

const WORKFLOW_NAME = "js_simulated_tasks_workflow";

const SIMULATED_WORKERS: {
taskName: string;
codename: string;
sleepSeconds: number;
}[] = [
{ taskName: "js_worker_0", codename: "quickpulse", sleepSeconds: 1 },
{ taskName: "js_worker_1", codename: "whisperlink", sleepSeconds: 2 },
{ taskName: "js_worker_2", codename: "shadowfetch", sleepSeconds: 3 },
{ taskName: "js_worker_3", codename: "ironforge", sleepSeconds: 4 },
{ taskName: "js_worker_4", codename: "deepcrawl", sleepSeconds: 5 },
];

function envIntOrDefault(key: string, defaultVal: number): number {
const s = process.env[key];
if (!s) return defaultVal;
const v = parseInt(s, 10);
return isNaN(v) ? defaultVal : v;
}

async function registerMetadata(
client: Awaited<ReturnType<typeof OrkesClients.prototype.getClient>>,
workflowClient: ReturnType<typeof OrkesClients.prototype.getWorkflowClient>,
): Promise<void> {
const taskDefs = SIMULATED_WORKERS.map((def) => ({
name: def.taskName,
description: `JS SDK harness simulated task (${def.codename}, default delay ${def.sleepSeconds}s)`,
retryCount: 1,
timeoutSeconds: 300,
responseTimeoutSeconds: 300,
totalTimeoutSeconds: 0,
}));

await MetadataResource.registerTaskDef({
client,
body: taskDefs,
});

const wf = new ConductorWorkflow(workflowClient, WORKFLOW_NAME)
.version(1)
.description("JS SDK harness simulated task workflow")
.ownerEmail("js-sdk-harness@conductor.io");

for (const def of SIMULATED_WORKERS) {
wf.add(simpleTask(def.codename, def.taskName, {}));
}

await wf.register(true);

console.log(
`Registered workflow ${WORKFLOW_NAME} with ${SIMULATED_WORKERS.length} tasks`,
);
}

async function main(): Promise<void> {
const clients = await OrkesClients.from();
const workflowClient = clients.getWorkflowClient();
const client = clients.getClient();

await registerMetadata(client, workflowClient);

const workflowsPerSec = envIntOrDefault("HARNESS_WORKFLOWS_PER_SEC", 2);
const batchSize = envIntOrDefault("HARNESS_BATCH_SIZE", 20);
const pollIntervalMs = envIntOrDefault("HARNESS_POLL_INTERVAL_MS", 100);

const workers: ConductorWorker[] = SIMULATED_WORKERS.map((def) => {
const sim = new SimulatedTaskWorker(
def.taskName,
def.codename,
def.sleepSeconds,
batchSize,
pollIntervalMs,
);
return {
taskDefName: sim.taskName,
execute: sim.execute.bind(sim),
concurrency: sim.batchSize,
pollInterval: sim.pollInterval,
};
});

const handler = new TaskHandler({
client,
workers,
scanForDecorated: false,
});
await handler.startWorkers();

const governor = new WorkflowGovernor(
workflowClient,
WORKFLOW_NAME,
workflowsPerSec,
);
governor.start();

const shutdown = async () => {
console.log("Shutting down...");
governor.stop();
await handler.stopWorkers();
process.exit(0);
};

process.on("SIGINT", shutdown);
process.on("SIGTERM", shutdown);
}

main().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});
Loading
Loading