Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 118 additions & 12 deletions packages/cli/src/deploy-command.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
import path from 'node:path';
import {
createTerminalIO,
deploy,
resolveWorkspaceToken,
type DeployMode,
type DeployOptions,
type ModeLaunchHandle
} from '@agentworkforce/deploy';

const DEFAULT_CLOUD_URL = 'https://agentrelay.com';

/**
* Argv parser + dispatcher for `workforce deploy <persona-path> [flags]`.
* Keeps cli.ts itself slim — the file is already a large dispatcher and
Expand Down Expand Up @@ -62,14 +66,34 @@ export async function runLogin(args: readonly string[]): Promise<void> {
process.stdout.write(LOGIN_USAGE);
process.exit(0);
}
process.stderr.write(
'The browser-based workforce login flow is rolling out in stages and is not on by default yet.\n' +
'For now, export your workspace credentials in the shell:\n\n' +
' export WORKFORCE_WORKSPACE_ID=<workspace-id>\n' +
' export WORKFORCE_WORKSPACE_TOKEN=<workspace-token>\n\n' +
'Then re-run `workforce deploy ./your-persona.json`.\n'

const opts = parseLoginArgs(args);
const io = createTerminalIO();
const workspace = opts.workspace
?? process.env.WORKFORCE_WORKSPACE_ID?.trim()
?? (await io.prompt('Workspace ID')).trim();
if (!workspace) {
process.stderr.write('workforce login failed: workspace is required; pass --workspace or set WORKFORCE_WORKSPACE_ID\n');
process.exit(1);
}

const cloudUrl = normalizeCloudUrl(
opts.cloudUrl
?? process.env.WORKFORCE_DEPLOY_CLOUD_URL
?? process.env.WORKFORCE_CLOUD_URL
?? DEFAULT_CLOUD_URL
);
process.exit(1);

try {
await resolveWorkspaceToken({ workspace, cloudUrl, io });
process.stdout.write(`\nlogged in: ${workspace}\n`);
process.exit(0);
} catch (err) {
process.stderr.write(
`\nworkforce login failed: ${err instanceof Error ? err.message : String(err)}\n`
);
process.exit(1);
}
}

const DEPLOY_USAGE = `usage: workforce deploy <persona-path> [flags]
Expand All @@ -83,19 +107,29 @@ Flags:
--bundle-out <dir> Emit the bundle to <dir> and exit (no launch)
--dry-run Validate the persona and exit before any side effects
--cloud-url <url> Override the workforce cloud base URL
--no-prompt Fail instead of prompting for cloud setup
--harness-source <source> Cloud harness source: plan, byok, or oauth
--byok-key <key> API key for --harness-source byok
--on-exists <choice> Existing cloud persona behavior: cancel, update, or destroy
--input <key>=<value> Override a declared persona input (repeatable)
-h, --help Print this message
`;

const LOGIN_USAGE = `usage: workforce login
const LOGIN_USAGE = `usage: workforce login [flags]

Connect this machine to a workforce workspace. The full OAuth flow ships
once the cloud login surface is live; until then, set:
Connect this machine to a workforce workspace using the browser OAuth flow.
The resulting workspace token is stored in the OS keychain when available,
falling back to ~/.agentworkforce/login.json.

export WORKFORCE_WORKSPACE_ID=...
export WORKFORCE_WORKSPACE_TOKEN=...
Flags:
--workspace <name> Workforce workspace; defaults to WORKFORCE_WORKSPACE_ID or prompt
--cloud-url <url> Override the workforce cloud base URL
-h, --help Print this message
`;

const HARNESS_SOURCES = ['plan', 'byok', 'oauth'] as const;
const ON_EXISTS_CHOICES = ['update', 'destroy', 'cancel'] as const;

export function parseDeployArgs(args: readonly string[]): DeployOptions {
let personaPath: string | undefined;
let mode: DeployMode | undefined;
Expand All @@ -106,6 +140,10 @@ export function parseDeployArgs(args: readonly string[]): DeployOptions {
let bundleOut: string | undefined;
let dryRun = false;
let cloudUrl: string | undefined;
let noPrompt = false;
let harnessSource: DeployOptions['harnessSource'];
let byokKey: string | undefined;
let onExists: DeployOptions['onExists'];
const inputs: Record<string, string> = {};

for (let i = 0; i < args.length; i += 1) {
Expand Down Expand Up @@ -133,6 +171,23 @@ export function parseDeployArgs(args: readonly string[]): DeployOptions {
dryRun = true;
} else if (a === '--cloud-url') {
cloudUrl = expectValue('--cloud-url', args[++i]);
} else if (a.startsWith('--cloud-url=')) {
cloudUrl = expectInlineValue('--cloud-url', a.slice('--cloud-url='.length));
} else if (a === '--no-prompt') {
noPrompt = true;
noConnect = true;
} else if (a === '--harness-source') {
harnessSource = expectChoice('--harness-source', expectValue('--harness-source', args[++i]), HARNESS_SOURCES);
} else if (a.startsWith('--harness-source=')) {
harnessSource = expectChoice('--harness-source', expectInlineValue('--harness-source', a.slice('--harness-source='.length)), HARNESS_SOURCES);
} else if (a === '--byok-key') {
byokKey = expectValue('--byok-key', args[++i]);
} else if (a.startsWith('--byok-key=')) {
byokKey = expectInlineValue('--byok-key', a.slice('--byok-key='.length));
} else if (a === '--on-exists') {
onExists = expectChoice('--on-exists', expectValue('--on-exists', args[++i]), ON_EXISTS_CHOICES);
} else if (a.startsWith('--on-exists=')) {
onExists = expectChoice('--on-exists', expectInlineValue('--on-exists', a.slice('--on-exists='.length)), ON_EXISTS_CHOICES);
} else if (a === '--input') {
parseDeployInputValue(expectDeployInputValue(args[++i]), inputs);
} else if (a.startsWith('--input=')) {
Expand Down Expand Up @@ -160,6 +215,10 @@ export function parseDeployArgs(args: readonly string[]): DeployOptions {
...(bundleOut ? { bundleOut } : {}),
...(dryRun ? { dryRun: true } : {}),
...(cloudUrl ? { cloudUrl } : {}),
...(noPrompt ? { noPrompt: true } : {}),
...(harnessSource ? { harnessSource } : {}),
...(byokKey ? { byokKey } : {}),
...(onExists ? { onExists } : {}),
...(Object.keys(inputs).length > 0 ? { inputs } : {})
};
}
Expand Down Expand Up @@ -193,6 +252,53 @@ function expectValue(flag: string, value: string | undefined): string {
return value;
}

function expectInlineValue(flag: string, value: string): string {
if (!value.trim()) {
die(`${flag}: missing value`);
}
return value;
}

function expectChoice<T extends string>(flag: string, value: string, allowed: readonly T[]): T {
if (!allowed.includes(value as T)) {
die(`${flag}: expected one of ${allowed.join('|')}; got "${value}"`);
}
return value as T;
}

function parseLoginArgs(args: readonly string[]): { workspace?: string; cloudUrl?: string } {
let workspace: string | undefined;
let cloudUrl: string | undefined;

for (let i = 0; i < args.length; i += 1) {
const a = args[i];
if (a === '-h' || a === '--help') {
process.stdout.write(LOGIN_USAGE);
process.exit(0);
} else if (a === '--workspace') {
workspace = expectValue('--workspace', args[++i]);
} else if (a.startsWith('--workspace=')) {
workspace = expectInlineValue('--workspace', a.slice('--workspace='.length));
} else if (a === '--cloud-url') {
cloudUrl = expectValue('--cloud-url', args[++i]);
} else if (a.startsWith('--cloud-url=')) {
cloudUrl = expectInlineValue('--cloud-url', a.slice('--cloud-url='.length));
} else {
die(`login: unknown argument "${a}"`);
}
}

return {
...(workspace ? { workspace } : {}),
...(cloudUrl ? { cloudUrl } : {})
};
}

function normalizeCloudUrl(url: string): string {
const trimmed = url.trim();
return trimmed ? trimmed.replace(/\/+$/, '') : DEFAULT_CLOUD_URL;
}

function die(message: string): never {
process.stderr.write(`${message}\n`);
process.exit(1);
Expand Down
168 changes: 142 additions & 26 deletions packages/deploy/src/deploy.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,39 @@ async function withTempPersona(
};
}

async function withWorkspaceEnv<T>(
env: { workspace?: string; token?: string },
fn: () => Promise<T>
): Promise<T> {
const previousWorkspace = process.env.WORKFORCE_WORKSPACE_ID;
const previousToken = process.env.WORKFORCE_WORKSPACE_TOKEN;
if (env.workspace === undefined) {
delete process.env.WORKFORCE_WORKSPACE_ID;
} else {
process.env.WORKFORCE_WORKSPACE_ID = env.workspace;
}
if (env.token === undefined) {
delete process.env.WORKFORCE_WORKSPACE_TOKEN;
} else {
process.env.WORKFORCE_WORKSPACE_TOKEN = env.token;
}

try {
return await fn();
} finally {
if (previousWorkspace === undefined) {
delete process.env.WORKFORCE_WORKSPACE_ID;
} else {
process.env.WORKFORCE_WORKSPACE_ID = previousWorkspace;
}
if (previousToken === undefined) {
delete process.env.WORKFORCE_WORKSPACE_TOKEN;
} else {
process.env.WORKFORCE_WORKSPACE_TOKEN = previousToken;
}
}
}

test('preflightPersona accepts a valid deploy-shaped persona', async () => {
const { personaPath, cleanup } = await withTempPersona(basePersonaJson());
try {
Expand Down Expand Up @@ -294,42 +327,125 @@ test('deploy --bundle-out emits to the supplied dir and skips launch', async ()
}
});

test('--mode cloud throws a clear "not yet available" error', async () => {
test('--mode cloud skips local integration resolver and hands off to the cloud launcher', async () => {
const { personaPath, cleanup } = await withTempPersona(basePersonaJson());
const io = createBufferedIO();
let launched = false;
try {
await assert.rejects(
deploy(
{ personaPath, mode: 'cloud', io },
{
workspaceAuth: {
async resolveWorkspace() {
return { workspace: 'w', token: 't' };
}
},
integrations: {
async isConnected() {
return true;
},
async connect() {
throw new Error('unreachable');
}
const result = await deploy(
{ personaPath, mode: 'cloud', io },
{
workspaceAuth: {
async resolveWorkspace() {
return { workspace: 'w', token: 't' };
}
},
integrations: {
async isConnected() {
throw new Error('cloud mode should not use local integration resolver');
},
bundle: {
async stage() {
async connect() {
throw new Error('cloud mode should not use local integration resolver');
}
},
bundle: {
async stage(input) {
await mkdir(input.outDir, { recursive: true });
const runner = path.join(input.outDir, 'runner.mjs');
const bundle = path.join(input.outDir, 'agent.bundle.mjs');
const personaCopy = path.join(input.outDir, 'persona.json');
const pkg = path.join(input.outDir, 'package.json');
await Promise.all([
writeFile(runner, '', 'utf8'),
writeFile(bundle, '', 'utf8'),
writeFile(personaCopy, '{}', 'utf8'),
writeFile(pkg, '{}', 'utf8')
]);
return {
runnerPath: runner,
bundlePath: bundle,
personaCopyPath: personaCopy,
packageJsonPath: pkg,
sizeBytes: 0
};
}
},
modes: {
cloud: {
async launch(input) {
launched = true;
assert.equal(input.workspace, 'w');
return {
runnerPath: '/tmp/r',
bundlePath: '/tmp/b',
personaCopyPath: '/tmp/p',
packageJsonPath: '/tmp/k',
sizeBytes: 0
id: 'agent-cloud',
async stop() {
/* no-op */
},
done: Promise.resolve({ code: 0 })
};
}
}
}
),
/--mode cloud is not yet available/
}
);
assert.equal(result.mode, 'cloud');
assert.equal(launched, true);
assert.deepEqual(result.connectedIntegrations, []);
} finally {
await cleanup();
}
});

test('--mode cloud does not require an env workspace token before launching', async () => {
const { personaPath, cleanup } = await withTempPersona(basePersonaJson());
const io = createBufferedIO();
let launched = false;
try {
const result = await withWorkspaceEnv({ workspace: 'w-cloud' }, () => deploy(
{ personaPath, mode: 'cloud', io },
{
bundle: {
async stage(input) {
await mkdir(input.outDir, { recursive: true });
const runner = path.join(input.outDir, 'runner.mjs');
const bundle = path.join(input.outDir, 'agent.bundle.mjs');
const personaCopy = path.join(input.outDir, 'persona.json');
const pkg = path.join(input.outDir, 'package.json');
await Promise.all([
writeFile(runner, '', 'utf8'),
writeFile(bundle, '', 'utf8'),
writeFile(personaCopy, '{}', 'utf8'),
writeFile(pkg, '{}', 'utf8')
]);
return {
runnerPath: runner,
bundlePath: bundle,
personaCopyPath: personaCopy,
packageJsonPath: pkg,
sizeBytes: 0
};
}
},
modes: {
cloud: {
async launch(input) {
launched = true;
assert.equal(input.workspace, 'w-cloud');
assert.equal(input.workspaceToken, undefined);
return {
id: 'agent-cloud',
async stop() {
/* no-op */
},
done: Promise.resolve({ code: 0 })
};
}
}
}
}
));
assert.equal(result.mode, 'cloud');
assert.equal(result.workspace, 'w-cloud');
assert.equal(launched, true);
} finally {
await cleanup();
}
Expand Down
Loading
Loading