From 5771665e03cc0eb58a63edb1dc53974b9be61530 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Wed, 21 Jan 2026 17:36:37 +0000 Subject: [PATCH 01/34] project: support forked_from key --- .changeset/moody-ducks-warn.md | 5 ++ packages/project/src/Project.ts | 1 + packages/project/src/parse/from-fs.ts | 4 ++ packages/project/src/util/config.ts | 5 ++ packages/project/src/util/omit-nil.ts | 6 ++- packages/project/test/parse/from-fs.test.ts | 33 ++++++++++-- packages/project/test/serialize/to-fs.test.ts | 54 +++++++++++++++++++ packages/project/test/util/config.test.ts | 6 +++ 8 files changed, 109 insertions(+), 5 deletions(-) create mode 100644 .changeset/moody-ducks-warn.md diff --git a/.changeset/moody-ducks-warn.md b/.changeset/moody-ducks-warn.md new file mode 100644 index 000000000..7e1a6a94f --- /dev/null +++ b/.changeset/moody-ducks-warn.md @@ -0,0 +1,5 @@ +--- +'@openfn/project': patch +--- + +Support forked_from metadata key in openfn.yaml diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index e1c425ed8..b3aeb1900 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -31,6 +31,7 @@ type UUIDMap = { type CLIMeta = { version?: number; alias?: string; + forked_from?: string; }; export class Project { diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index d1ff194d2..380641b78 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -12,6 +12,7 @@ import { } from '../util/config'; import { omit } from 'lodash-es'; import { Logger } from '@openfn/logger'; +import omitNil from '../util/omit-nil'; export type FromFsConfig = { root: string; @@ -36,6 +37,9 @@ export const parseProject = async (options: FromFsConfig) => { openfn: omit(context.project, ['id']), config: config, workflows: [], + cli: omitNil({ + forked_from: context.project.forked_from, + }), }; // now find all the workflows diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index c1d80c433..e30937294 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -30,6 +30,11 @@ export const extractConfig = (source: Project, format?: 'yaml' | 'json') => { if (source.name) { project.name = source.name; } + + if (source.cli.forked_from) { + project.forked_from = source.cli.forked_from; + } + const workspace = { ...source.config, }; diff --git a/packages/project/src/util/omit-nil.ts b/packages/project/src/util/omit-nil.ts index 40a8974e8..1e05f887f 100644 --- a/packages/project/src/util/omit-nil.ts +++ b/packages/project/src/util/omit-nil.ts @@ -1,8 +1,10 @@ import { omitBy, isNil } from 'lodash-es'; -export const omitNil = (obj: any, key: string) => { - if (obj[key]) { +export const omitNil = (obj: any, key?: string) => { + if (key && obj[key]) { obj[key] = omitBy(obj[key], isNil); + } else { + return omitBy(obj, isNil); } }; export default omitNil; diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 6a0785f52..87adc29cc 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -22,7 +22,7 @@ function mockFile(path: string, content: string | object) { mock(files); } -test.serial('should include multiple workflows', async (t) => { +test.serial('should include multiple workflows (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); mockFile('/ws/workflows/workflow-1/workflow-1.yaml', { @@ -66,7 +66,7 @@ test.serial('should include multiple workflows', async (t) => { t.is(wf2.name, 'Workflow 2'); }); -test.serial('should load a workflow expression', async (t) => { +test.serial('should load a workflow expression (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); mockFile('/ws/workflows/my-workflow/my-workflow.yaml', { @@ -104,7 +104,7 @@ test.serial( ); test.serial( - 'should load a workflow from the file system and expand shorthand links', + 'should load a workflow from the file system and expand shorthand links (legacy format)', async (t) => { mockFile('/ws/openfn.yaml', buildConfig()); @@ -139,3 +139,30 @@ test.serial( t.is(typeof wf.steps[1].next.c, 'object'); } ); + +test.serial.only('should track forked_from', async (t) => { + mockFile('/ws/openfn.yaml', { + workspace: buildConfig(), + project: { + uuid: '', + forked_from: 'abcd', + }, + }); + + mockFile('/ws/workflows/workflow-1/workflow-1.yaml', { + id: 'workflow-1', + name: 'Workflow 1', + steps: [ + { + id: 'a', + expression: 'job.js', + }, + ], + }); + + mockFile('/ws/workflows/workflow-1/job.js', `fn(s => s)`); + + const project = await parseProject({ root: '/ws' }); + + t.is(project.cli.forked_from, 'abcd'); +}); diff --git a/packages/project/test/serialize/to-fs.test.ts b/packages/project/test/serialize/to-fs.test.ts index 63c29c4d2..484a0e244 100644 --- a/packages/project/test/serialize/to-fs.test.ts +++ b/packages/project/test/serialize/to-fs.test.ts @@ -233,4 +233,58 @@ test('toFs: extract a project with 1 workflow and 1 step', (t) => { t.is(files['workflows/my-workflow/step.js'], 'fn(s => s)'); }); +test('toFs: extract a project with forked_from meta', (t) => { + const project = new Project( + { + name: 'My Project', + workflows: [ + { + id: 'my-workflow', + steps: [step], + }, + ], + cli: { + forked_from: 'abcd', + }, + }, + { + formats: { + openfn: 'json', // for easier testing + workflow: 'json', + }, + } + ); + + const files = toFs(project); + + // Ensure that all the right files have been created + t.deepEqual(Object.keys(files), [ + 'openfn.json', + 'workflows/my-workflow/my-workflow.json', + 'workflows/my-workflow/step.js', + ]); + + // rough test on the file contents + // (this should be validated in more detail by each step) + const config = JSON.parse(files['openfn.json']); + t.deepEqual(config, { + workspace: { + credentials: 'credentials.yaml', + formats: { openfn: 'json', project: 'yaml', workflow: 'json' }, + dirs: { projects: '.projects', workflows: 'workflows' }, + }, + project: { + id: 'my-project', + name: 'My Project', + forked_from: 'abcd', + }, + }); + + const workflow = JSON.parse(files['workflows/my-workflow/my-workflow.json']); + t.is(workflow.id, 'my-workflow'); + t.is(workflow.steps.length, 1); + + t.is(files['workflows/my-workflow/step.js'], 'fn(s => s)'); +}); + // TODO we need many more tests on this, with options diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index db6837c09..2c2de1609 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -29,6 +29,7 @@ project: env: dev inserted_at: 2025-10-21T17:10:57Z updated_at: 2025-10-21T17:10:57Z + forked_from: abcd `; const result = loadWorkspaceFile(yaml); @@ -51,6 +52,7 @@ project: env: 'dev', inserted_at: '2025-10-21T17:10:57Z', updated_at: '2025-10-21T17:10:57Z', + forked_from: 'abcd', }); }); @@ -161,6 +163,9 @@ test('generate openfn.yaml', (t) => { openfn: { uuid: 1234, }, + cli: { + forked_from: 'abcd', + }, }, { formats: { @@ -176,6 +181,7 @@ test('generate openfn.yaml', (t) => { uuid: 1234 id: my-project name: My Project + forked_from: abcd workspace: credentials: credentials.yaml formats: From 043867203333a53318b4c34391de8786134e19e1 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 14:29:01 +0000 Subject: [PATCH 02/34] update forked_from to a map --- packages/project/src/Project.ts | 2 +- packages/project/src/Workflow.ts | 8 ++++---- packages/project/test/parse/from-fs.test.ts | 8 +++++--- packages/project/test/util/config.test.ts | 7 +++++-- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index b3aeb1900..8bb9f7283 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -31,7 +31,7 @@ type UUIDMap = { type CLIMeta = { version?: number; alias?: string; - forked_from?: string; + forked_from?: Record; }; export class Project { diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index e1066f981..9b649cc48 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -71,6 +71,10 @@ class Workflow { this.workflow.start = s; } + get history() { + return this.workflow.history ?? []; + } + _buildIndex() { for (const step of this.workflow.steps) { const s = step as any; @@ -191,10 +195,6 @@ class Workflow { this.workflow.history?.push(versionHash); } - get history() { - return this.workflow.history ?? []; - } - // return true if the current workflow can be merged into the target workflow without losing any changes canMergeInto(target: Workflow) { const thisHistory = diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index 87adc29cc..ed8115d35 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -140,12 +140,14 @@ test.serial( } ); -test.serial.only('should track forked_from', async (t) => { +test.serial('should track forked_from', async (t) => { mockFile('/ws/openfn.yaml', { workspace: buildConfig(), project: { uuid: '', - forked_from: 'abcd', + forked_from: { + w1: 'abcd', + }, }, }); @@ -164,5 +166,5 @@ test.serial.only('should track forked_from', async (t) => { const project = await parseProject({ root: '/ws' }); - t.is(project.cli.forked_from, 'abcd'); + t.deepEqual(project.cli.forked_from, { w1: 'abcd' }); }); diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index 2c2de1609..0a1e123c4 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -29,7 +29,8 @@ project: env: dev inserted_at: 2025-10-21T17:10:57Z updated_at: 2025-10-21T17:10:57Z - forked_from: abcd + forked_from: + w1: abcd `; const result = loadWorkspaceFile(yaml); @@ -52,7 +53,9 @@ project: env: 'dev', inserted_at: '2025-10-21T17:10:57Z', updated_at: '2025-10-21T17:10:57Z', - forked_from: 'abcd', + forked_from: { + w1: 'abcd', + }, }); }); From 132e15b7f4e5e27c82b29060c89b4ecaacf7ba72 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 14:56:18 +0000 Subject: [PATCH 03/34] ensure history serializes --- packages/project/src/Workflow.ts | 4 ++-- packages/project/test/fixtures/sample-v2-project.ts | 6 ++++-- packages/project/test/parse/from-project.test.ts | 7 +++++-- packages/project/test/serialize/to-project.test.ts | 3 +++ 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 9b649cc48..07b976a0e 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -27,8 +27,8 @@ class Workflow { this.workflow = clone(workflow); - // history needs to be on workflow object. - this.workflow.history = workflow.history?.length ? workflow.history : []; + // history needs to be on workflow object + this.workflow.history = workflow.history ?? []; const { id, diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index b8202ecf9..4029eb90f 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -33,7 +33,7 @@ export const json: SerializedProject = { name: 'Workflow', id: 'workflow', openfn: { uuid: 1 }, - history: [], + history: ['a', 'b'], start: 'trigger', }, ], @@ -72,7 +72,9 @@ workflows: id: workflow openfn: uuid: 1 - history: [] + history: + - a + - b start: trigger sandbox: parentId: abcd diff --git a/packages/project/test/parse/from-project.test.ts b/packages/project/test/parse/from-project.test.ts index 6a328b1bb..c41b4dca6 100644 --- a/packages/project/test/parse/from-project.test.ts +++ b/packages/project/test/parse/from-project.test.ts @@ -26,6 +26,9 @@ workflows: lock_version: 1 deleted_at: null concurrency: null + version_history: + - a + - b jobs: transform-data: name: Transform data @@ -93,7 +96,7 @@ test('import from a v2 project as JSON', async (t) => { openfn: { uuid: 1, }, - history: [], + history: ['a', 'b'], start: 'trigger', steps: [ { @@ -152,7 +155,7 @@ test('import from a v2 project as YAML', async (t) => { uuid: 1, }, start: 'trigger', - history: [], + history: ['a', 'b'], steps: [ { name: 'b', diff --git a/packages/project/test/serialize/to-project.test.ts b/packages/project/test/serialize/to-project.test.ts index 76eeced78..ea4f1cba5 100644 --- a/packages/project/test/serialize/to-project.test.ts +++ b/packages/project/test/serialize/to-project.test.ts @@ -37,6 +37,9 @@ const createProject = (props: Partial = {}) => { // hack delete proj.workflows[0].steps[0].name; proj.workflows[0].start = 'trigger'; + + // add some history + proj.workflows[0].workflow.history = ['a', 'b']; return proj; }; From 1703deb3eba6eadce209812c05fb9ca4cfa29e56 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 15:44:12 +0000 Subject: [PATCH 04/34] only include forked_from if it has values --- packages/project/src/util/config.ts | 2 +- packages/project/test/util/config.test.ts | 37 +++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/project/src/util/config.ts b/packages/project/src/util/config.ts index e30937294..17bd59ecc 100644 --- a/packages/project/src/util/config.ts +++ b/packages/project/src/util/config.ts @@ -31,7 +31,7 @@ export const extractConfig = (source: Project, format?: 'yaml' | 'json') => { project.name = source.name; } - if (source.cli.forked_from) { + if (source.cli.forked_from && Object.keys(source.cli.forked_from).length) { project.forked_from = source.cli.forked_from; } diff --git a/packages/project/test/util/config.test.ts b/packages/project/test/util/config.test.ts index 0a1e123c4..02cdb5ffd 100644 --- a/packages/project/test/util/config.test.ts +++ b/packages/project/test/util/config.test.ts @@ -198,6 +198,43 @@ workspace: ); }); +test("exclude forked_from if it's not set", (t) => { + const proj = new Project( + { + id: 'my-project', + name: 'My Project', + openfn: { + uuid: 1234, + }, + cli: {}, + }, + { + formats: { + openfn: 'yaml', + }, + } + ); + const result = extractConfig(proj); + t.is(result.path, 'openfn.yaml'), + t.deepEqual( + result.content, + `project: + uuid: 1234 + id: my-project + name: My Project +workspace: + credentials: credentials.yaml + formats: + openfn: yaml + project: yaml + workflow: yaml + dirs: + projects: .projects + workflows: workflows +` + ); +}); + test.todo('generate openfn.json'); test('include project name', (t) => { From 1601edc2fe7d7ebbf4ac0bec1514efc26b646672 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 15:48:31 +0000 Subject: [PATCH 05/34] add forked_from on checkkout --- packages/cli/src/projects/checkout.ts | 11 +++++++++ packages/cli/test/projects/checkout.test.ts | 26 ++++++++++++++++++++- packages/lexicon/core.d.ts | 1 + 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 33369e34c..467278600 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -69,6 +69,17 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { await tidyWorkflowDir(currentProject!, switchProject); } + // write the forked from map + switchProject.cli.forked_from = switchProject.workflows.reduce( + (obj: any, wf) => { + if (wf.history.length) { + obj[wf.id] = wf.history.at(-1); + } + return obj; + }, + {} + ); + // expand project into directory const files: any = switchProject.serialize('fs'); for (const f in files) { diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index 44fcb0fa3..fac0c1e29 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -3,7 +3,7 @@ import { createMockLogger } from '@openfn/logger'; import { handler as checkoutHandler } from '../../src/projects/checkout'; import mock from 'mock-fs'; import fs from 'fs'; -import { jsonToYaml, Workspace } from '@openfn/project'; +import { jsonToYaml, Workspace, yamlToJson } from '@openfn/project'; test.beforeEach(() => { mock({ @@ -28,6 +28,7 @@ test.beforeEach(() => { { name: 'simple-workflow', id: 'wf-id', + history: ['a'], jobs: [ { name: 'Transform data to FHIR standard', @@ -56,6 +57,7 @@ test.beforeEach(() => { { name: 'another-workflow', id: 'another-id', + history: ['b'], jobs: [ { name: 'Transform data to FHIR standard', @@ -83,6 +85,7 @@ test.beforeEach(() => { }, ], }), + // TODO this is actually a v1 state file for some reason, which is wierd '/ws/.projects/project@app.openfn.org.yaml': jsonToYaml({ id: '', name: 'My Project', @@ -90,6 +93,7 @@ test.beforeEach(() => { { name: 'simple-workflow-main', id: 'wf-id-main', + version_history: ['a'], jobs: [ { name: 'Transform data to FHIR standard', @@ -118,6 +122,7 @@ test.beforeEach(() => { { name: 'another-workflow-main', id: 'another-id', + version_history: ['b'], jobs: [ { name: 'Transform data to FHIR standard', @@ -217,6 +222,25 @@ test.serial('checkout: same id as active', async (t) => { ); }); +test.serial( + 'checkout: writes forked_from based on version history', + async (t) => { + const bcheckout = new Workspace('/ws'); + t.is(bcheckout.activeProject!.id, 'my-project'); + + await checkoutHandler( + { command: 'project-checkout', project: 'my-project', workspace: '/ws' }, + logger + ); + + const openfn = yamlToJson(fs.readFileSync('/ws/openfn.yaml', 'utf8')); + t.deepEqual(openfn.project.forked_from, { + 'simple-workflow-main': 'a', + 'another-workflow-main': 'b', + }); + } +); + test.serial('checkout: switching to and back between projects', async (t) => { // before checkout. my-project is active and expanded const bcheckout = new Workspace('/ws'); diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 187da091f..07e9b6490 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -118,6 +118,7 @@ export interface ProjectMeta { env?: string; inserted_at?: string; updated_at?: string; + forked_from?: Record; [key: string]: unknown; } From f3fd21e4d9543bcca94f49b35bb02648b66fad95 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 16:40:24 +0000 Subject: [PATCH 06/34] project: omit forked_from from openfn object when loading from fs --- packages/project/src/parse/from-fs.ts | 2 +- packages/project/test/parse/from-fs.test.ts | 1 + .../test/serialize/to-app-state.test.ts | 37 +++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index 380641b78..1ced41426 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -34,7 +34,7 @@ export const parseProject = async (options: FromFsConfig) => { const proj: any = { id: context.project?.id, name: context.project?.name, - openfn: omit(context.project, ['id']), + openfn: omit(context.project, ['id', 'forked_from']), config: config, workflows: [], cli: omitNil({ diff --git a/packages/project/test/parse/from-fs.test.ts b/packages/project/test/parse/from-fs.test.ts index ed8115d35..a3f8f7639 100644 --- a/packages/project/test/parse/from-fs.test.ts +++ b/packages/project/test/parse/from-fs.test.ts @@ -167,4 +167,5 @@ test.serial('should track forked_from', async (t) => { const project = await parseProject({ root: '/ws' }); t.deepEqual(project.cli.forked_from, { w1: 'abcd' }); + t.falsy(project.openfn!.forked_from); }); diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index de6e4117e..b6a7fa570 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -262,6 +262,43 @@ test('should handle credentials', (t) => { t.is(step.project_credential_id, 'p'); }); +test.only('should ignore forked_from', (t) => { + const data = { + id: 'my-project', + workflows: [ + { + id: 'wf', + name: 'wf', + steps: [ + { + id: 'trigger', + type: 'webhook', + next: { + step: {}, + }, + }, + { + id: 'step', + expression: '.', + configuration: 'p', + openfn: { + keychain_credential_id: 'k', + }, + }, + ], + }, + ], + cli: { + forked_form: { wf: 'a' }, + }, + }; + const proj = new Project(data); + console.log(proj); + const state = toAppState(proj, { format: 'json' }); + console.log(state); + t.falsy((state as any).forked_form); +}); + test('should ignore workflow start keys', (t) => { const data = { id: 'my-project', From 45c3e1028053f83fa134e9d503bcd8b182bfa3a9 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 22 Jan 2026 17:35:35 +0000 Subject: [PATCH 07/34] deploy changes against local server --- packages/cli/src/projects/checkout.ts | 13 +--- packages/cli/src/projects/deploy.ts | 77 +++++++++++++++++++---- packages/cli/src/projects/fetch.ts | 2 + packages/cli/src/projects/util.ts | 11 ++++ packages/cli/test/projects/deploy.test.ts | 51 +++++++++++++++ packages/project/src/Project.ts | 10 ++- 6 files changed, 140 insertions(+), 24 deletions(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 467278600..1bb2886db 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -10,7 +10,7 @@ import * as o from '../options'; import * as po from './options'; import type { Opts } from './options'; -import { tidyWorkflowDir } from './util'; +import { tidyWorkflowDir, updateForkedFrom } from './util'; export type CheckoutOptions = Pick< Opts, @@ -70,17 +70,10 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { } // write the forked from map - switchProject.cli.forked_from = switchProject.workflows.reduce( - (obj: any, wf) => { - if (wf.history.length) { - obj[wf.id] = wf.history.at(-1); - } - return obj; - }, - {} - ); + updateForkedFrom(switchProject); // expand project into directory + // TODO: only write files with a diff const files: any = switchProject.serialize('fs'); for (const f in files) { if (files[f]) { diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 25fa87a93..42bc34e8f 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -1,6 +1,8 @@ import yargs from 'yargs'; import Project from '@openfn/project'; import c from 'chalk'; +import { writeFile } from 'node:fs/promises'; +import path from 'node:path'; import * as o from '../options'; import * as o2 from './options'; @@ -10,6 +12,7 @@ import { fetchProject, serialize, getSerializePath, + updateForkedFrom, } from './util'; import { build, ensure } from '../util/command-builders'; @@ -64,6 +67,34 @@ export const command: yargs.CommandModule = { handler: ensure('project-deploy', options), }; +export const hasRemoteDiverged = ( + local: Project, + remote: Project +): string[] | null => { + let diverged: string[] | null = null; + + const refs = local.cli.forked_from ?? {}; + + // for each workflow, check that the local fetched_from is the head of the remote history + for (const wf of local.workflows) { + if (wf.id in refs) { + const forkedVersion = refs[wf.id]; + const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); + if (forkedVersion !== remoteVersion) { + diverged ??= []; + diverged.push(wf.id); + } + } else { + // TODO what if there's no forked from for this workflow? + // Do we assume divergence because we don't know? Do we warn? + } + } + + // TODO what if a workflow is removed locally? + + return diverged; +}; + export async function handler(options: DeployOptions, logger: Logger) { logger.warn( 'WARNING: the project deploy command is in BETA and may not be stable. Use cautiously on production projects.' @@ -132,32 +163,41 @@ Pass --force to override this error and deploy anyway.`); // Skip divergence testing if the remote has no history in its workflows // (this will only happen on older versions of lightning) + // TODO now maybe skip if there's no forked_from const skipVersionTest = - localProject.workflows.find((wf) => wf.history.length === 0) || + // localProject.workflows.find((wf) => wf.history.length === 0) || remoteProject.workflows.find((wf) => wf.history.length === 0); + // localProject.workflows.forEach((w) => console.log(w.history)); + if (skipVersionTest) { logger.warn( 'Skipping compatibility check as no local version history detected' ); logger.warn('Pushing these changes may overrite changes made to the app'); - } else if (!localProject.canMergeInto(remoteProject!)) { - if (!options.force) { - logger.error(`Error: Projects have diverged! + } else { + const divergentWorkflows = hasRemoteDiverged(localProject, remoteProject!); + if (divergentWorkflows) { + logger.warn( + `The following workflows have diverged: ${divergentWorkflows}` + ); + if (!options.force) { + logger.error(`Error: Projects have diverged! -The remote project has been edited since the local project was branched. Changes may be lost. + The remote project has been edited since the local project was branched. Changes may be lost. -Pass --force to override this error and deploy anyway.`); - return; + Pass --force to override this error and deploy anyway.`); + return; + } else { + logger.warn( + 'Remote project has not diverged from local project! Pushing anyway as -f passed' + ); + } } else { - logger.warn( - 'Remote project has not diverged from local project! Pushing anyway as -f passed' + logger.info( + 'Remote project has not diverged from local project - it is safe to deploy 🎉' ); } - } else { - logger.info( - 'Remote project has not diverged from local project - it is safe to deploy 🎉' - ); } logger.info('Merging changes into remote project'); @@ -180,6 +220,8 @@ Pass --force to override this error and deploy anyway.`); // TODO not totally sold on endpoint handling right now config.endpoint ??= localProject.openfn?.endpoint!; + // TODO: I want to report diff HERE, after the merged state and stuff has been built + if (options.dryRun) { logger.always('dryRun option set: skipping upload step'); } else { @@ -218,6 +260,14 @@ Pass --force to override this error and deploy anyway.`); merged.config ); + // TODO why isn't this right? oh, because the outpu path isn't quite right + updateForkedFrom(finalProject); + const configData = finalProject.generateConfig(); + await writeFile( + path.resolve(options.workspace, configData.path), + configData.content + ); + const finalOutputPath = getSerializePath(localProject, options.workspace!); logger.debug('Updating local project at ', finalOutputPath); await serialize(finalProject, finalOutputPath); @@ -267,3 +317,4 @@ export const reportDiff = (local: Project, remote: Project, logger: Logger) => { return diffs; }; +``; diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 37cf67373..d1d618c79 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -331,6 +331,8 @@ To ignore this error and override the local file, pass --force (-f) options.force || // The user forced the checkout !hasAnyHistory; // the remote project has no history (can happen in old apps) + // TODO temporarily force skip + // TODO canMergeInto needs to return a reason if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { // TODO allow rename throw new Error('Error! An incompatible project exists at this location'); diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index be6f8b1a0..d367be7a2 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -216,3 +216,14 @@ export async function tidyWorkflowDir( // Return and sort for testing return toRemove.sort(); } + +export const updateForkedFrom = (proj: Project) => { + proj.cli.forked_from = proj.workflows.reduce((obj: any, wf) => { + if (wf.history.length) { + obj[wf.id] = wf.history.at(-1); + } + return obj; + }, {}); + + return proj; +}; diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts index 903dabf52..47ddf0d45 100644 --- a/packages/cli/test/projects/deploy.test.ts +++ b/packages/cli/test/projects/deploy.test.ts @@ -10,6 +10,7 @@ import createLightningServer, { import { handler as deployHandler, + hasRemoteDiverged, reportDiff, } from '../../src/projects/deploy'; import { myProject_yaml, myProject_v1 } from './fixtures'; @@ -278,3 +279,53 @@ test.serial.skip( t.truthy(expectedLog); } ); + +test('hasRemoteDiverged: 1 workflow, no diverged', (t) => { + const local = { + workflows: [ + { + id: 'w', + }, + ], + cli: { + forked_from: { + w: 'a', + }, + }, + } as unknown as Project; + + const remote = { + getWorkflow: () => ({ + id: 'w', + history: ['a'], + }), + } as unknown as Project; + + const diverged = hasRemoteDiverged(local, remote); + t.falsy(diverged); +}); + +test('hasRemoteDiverged: 1 workflow, 1 diverged', (t) => { + const local = { + workflows: [ + { + id: 'w', + }, + ], + cli: { + forked_from: { + w: 'w', + }, + }, + } as unknown as Project; + + const remote = { + getWorkflow: () => ({ + id: 'w', + history: ['a', 'b'], + }), + } as unknown as Project; + + const diverged = hasRemoteDiverged(local, remote); + t.deepEqual(diverged, ['w']); +}); diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 8bb9f7283..e4800fd0b 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -12,7 +12,7 @@ import { getUuidForEdge, getUuidForStep } from './util/uuid'; import { merge, MergeProjectOptions } from './merge/merge-project'; import { diff as projectDiff } from './util/project-diff'; import { Workspace } from './Workspace'; -import { buildConfig } from './util/config'; +import { buildConfig, extractConfig } from './util/config'; import { Provisioner } from '@openfn/lexicon/lightning'; import { SandboxMeta, UUID, WorkspaceConfig } from '@openfn/lexicon'; @@ -256,6 +256,14 @@ export class Project { } return true; } + + /** + * Generates the contents of the openfn.yaml file, + * plus its file path + */ + generateConfig() { + return extractConfig(this); + } } export default Project; From 5335257856a7d4d4b5573c8df9c9ee3b1af5d710 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Fri, 23 Jan 2026 09:36:58 +0000 Subject: [PATCH 08/34] tweak error --- packages/cli/CHANGELOG.md | 2 +- packages/cli/src/projects/fetch.ts | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 451eefcab..ad44a9741 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -4,7 +4,7 @@ ### Minor Changes -- 8b9f402: fetch: allow state files to be writtem to JSON with --format +- 8b9f402: fetch: allow state files to be written to JSON with --format ### Patch Changes diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index d1d618c79..8ca84ab40 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -335,7 +335,13 @@ To ignore this error and override the local file, pass --force (-f) // TODO canMergeInto needs to return a reason if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { // TODO allow rename - throw new Error('Error! An incompatible project exists at this location'); + const e = new Error( + `Error! An incompatible project exists at this location.` + ); + + delete e.stack; + + throw e; } } } From be505e65bf5a164c25ab3176e979b18c279c48d9 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Thu, 29 Jan 2026 09:11:06 +0100 Subject: [PATCH 09/34] Update version hash (#1238) version hash now matches Lightning --- integration-tests/cli/test/sync.test.ts | 9 +- packages/cli/CHANGELOG.md | 2 +- packages/cli/src/projects/deploy.ts | 4 +- packages/cli/src/projects/fetch.ts | 8 +- packages/cli/test/projects/fetch.test.ts | 2 +- packages/cli/test/projects/fixtures.ts | 4 +- packages/project/README.md | 2 + packages/project/src/Workflow.ts | 15 +- packages/project/src/gen/generator.ts | 22 +- packages/project/src/gen/workflow.ohm | 2 +- packages/project/src/parse/from-app-state.ts | 2 +- .../project/src/serialize/to-app-state.ts | 11 +- packages/project/src/util/version.ts | 145 +++++-- .../test/fixtures/sample-v2-project.ts | 4 +- packages/project/test/gen/generator.test.ts | 53 ++- .../project/test/parse/from-app-state.test.ts | 34 +- .../test/serialize/to-app-state.test.ts | 39 +- .../test/util/version-workflow.test.ts | 388 +++++++++++++++++- 18 files changed, 662 insertions(+), 84 deletions(-) diff --git a/integration-tests/cli/test/sync.test.ts b/integration-tests/cli/test/sync.test.ts index 13e407b5a..38179a93e 100644 --- a/integration-tests/cli/test/sync.test.ts +++ b/integration-tests/cli/test/sync.test.ts @@ -30,7 +30,10 @@ const initWorkspace = (t: any) => { }; }; -const gen = (name = 'patients', workflows = ['trigger-job(body="fn()")']) => { +const gen = ( + name = 'patients', + workflows = ['trigger-job(expression="fn()")'] +) => { // generate a project const project = generateProject(name, workflows, { openfnUuid: true, @@ -44,7 +47,7 @@ test('fetch a new project', async (t) => { const { workspace, read } = initWorkspace(t); const project = gen(); - await run( + const { stdout } = await run( `openfn project fetch \ --workspace ${workspace} \ --endpoint ${endpoint} \ @@ -239,7 +242,7 @@ test('pull an update to project', async (t) => { test('checkout by alias', async (t) => { const { workspace, read } = initWorkspace(t); const main = gen(); - const staging = gen('patients-staging', ['trigger-job(body="fn(x)")']); + const staging = gen('patients-staging', ['trigger-job(expression="fn(x)")']); await run( `openfn project fetch \ diff --git a/packages/cli/CHANGELOG.md b/packages/cli/CHANGELOG.md index 451eefcab..ad44a9741 100644 --- a/packages/cli/CHANGELOG.md +++ b/packages/cli/CHANGELOG.md @@ -4,7 +4,7 @@ ### Minor Changes -- 8b9f402: fetch: allow state files to be writtem to JSON with --format +- 8b9f402: fetch: allow state files to be written to JSON with --format ### Patch Changes diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 42bc34e8f..319653f45 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -153,6 +153,8 @@ Pass --force to override this error and deploy anyway.`); return false; } + // this fails now because the local project has no UUIDs + // But should that matter ,actually? const diffs = reportDiff(remoteProject!, localProject, logger); if (!diffs.length) { logger.success('Nothing to deploy'); @@ -264,7 +266,7 @@ Pass --force to override this error and deploy anyway.`); updateForkedFrom(finalProject); const configData = finalProject.generateConfig(); await writeFile( - path.resolve(options.workspace, configData.path), + path.resolve(options.workspace!, configData.path), configData.content ); diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index d1d618c79..8ca84ab40 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -335,7 +335,13 @@ To ignore this error and override the local file, pass --force (-f) // TODO canMergeInto needs to return a reason if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { // TODO allow rename - throw new Error('Error! An incompatible project exists at this location'); + const e = new Error( + `Error! An incompatible project exists at this location.` + ); + + delete e.stack; + + throw e; } } } diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 301d656e7..398f9e58a 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -458,7 +458,7 @@ test.serial( lock_version: 1, }, id: 'my-workflow', - history: ['cli:02582f3bb088'], + history: ['cli:ba19e179317f'], }, ], }; diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index f3d8b9eb9..4b77482d0 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -43,7 +43,7 @@ export const myProject_v1: Provisioner.Project = { lock_version: 1, deleted_at: null, version_history: [ - 'cli:02582f3bb088', // alterstate + 'cli:ba19e179317f', // alterstate ], }, }, @@ -94,7 +94,7 @@ workflows: openfn: uuid: a9a3adef-b394-4405-814d-3ac4323f4b4b history: - - cli:02582f3bb088 + - cli:ba19e179317f openfn: uuid: 72ca3eb0-042c-47a0-a2a1-a545ed4a8406 inserted_at: 2025-04-23T11:19:32Z diff --git a/packages/project/README.md b/packages/project/README.md index 6d0d6d33e..1150b254b 100644 --- a/packages/project/README.md +++ b/packages/project/README.md @@ -91,3 +91,5 @@ Reference: parent(propName=propValue,x=y)-child a-b # can comment here to ``` + +Use special names `webhook` and `cron` to create trigger nodes (when converting into app state, the difference between a step and a trigger becomes important). diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 07b976a0e..2d323bfbb 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -1,6 +1,6 @@ import * as l from '@openfn/lexicon'; import slugify from './util/slugify'; -import { generateHash } from './util/version'; +import { generateHash, HashOptions } from './util/version'; const clone = (obj: any) => JSON.parse(JSON.stringify(obj)); @@ -113,7 +113,14 @@ class Workflow { // Get properties on any step or edge by id or uuid get(id: string): WithMeta { - const item = this.index.edges[id] || this.index.steps[id]; + // first check if we're passed a UUID - in which case we map it to an id + if (id in this.index.id) { + id = this.index.id[id]; + } + + // now look up the item proper + let item = this.index.edges[id] || this.index.steps[id]; + if (!item) { throw new Error(`step/edge with id "${id}" does not exist in workflow`); } @@ -187,8 +194,8 @@ class Workflow { return this.index.uuid; } - getVersionHash() { - return generateHash(this); + getVersionHash(options?: HashOptions) { + return generateHash(this, options); } pushHistory(versionHash: string) { diff --git a/packages/project/src/gen/generator.ts b/packages/project/src/gen/generator.ts index cf41384a2..9cb09d930 100644 --- a/packages/project/src/gen/generator.ts +++ b/packages/project/src/gen/generator.ts @@ -57,12 +57,19 @@ const initOperations = (options: any = {}) => { if (!nodes[name]) { const id = slugify(name); nodes[name] = { - name: name, id, - openfn: { - uuid: uuid(id), - }, }; + if (/^(cron|webhook)$/.test(name)) { + // This sets up the node as a trigger + nodes[name].type = name; + } else { + nodes[name].name = name; + } + if (options.openfnUuid !== false) { + nodes[name].openfn = { + uuid: uuid(id), + }; + } } return nodes[name]; }; @@ -107,11 +114,14 @@ const initOperations = (options: any = {}) => { const n1 = parent.buildWorkflow(); const n2 = child.buildWorkflow(); const e = edge.buildWorkflow(); - e.openfn.uuid = uuid(`${n1.id}-${n2.id}`); + + if (options.openfnUuid !== false) { + e.openfn.uuid = uuid(`${n1.id}-${n2.id}`); + } n1.next ??= {}; - n1.next[n2.name] = e; + n1.next[n2.id ?? slugify(n2.name)] = e; return [n1, n2]; }, diff --git a/packages/project/src/gen/workflow.ohm b/packages/project/src/gen/workflow.ohm index fe2ee2502..a8ab5efa0 100644 --- a/packages/project/src/gen/workflow.ohm +++ b/packages/project/src/gen/workflow.ohm @@ -29,7 +29,7 @@ Workflow { prop = (alnum | "-" | "_")+ "=" propValue - propValue = quoted_prop | bool | int | alnum+ + propValue = quoted_prop | bool | int | alnum+ // TODO we only parse numbers as positive ints right now // fine for tests diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index ef3c355de..2587c8725 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -77,7 +77,7 @@ export const mapEdge = (edge: Provisioner.Edge) => { } if (edge.condition_label) { - e.name = edge.condition_label; + e.label = edge.condition_label; } // Do this last so that it serializes last diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index 2eb4cb8f4..8d428eb9a 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -57,7 +57,7 @@ export default function ( return state; } -const mapWorkflow = (workflow: Workflow) => { +export const mapWorkflow = (workflow: Workflow) => { if (workflow instanceof Workflow) { // @ts-ignore workflow = workflow.toJSON(); @@ -96,10 +96,10 @@ const mapWorkflow = (workflow: Workflow) => { let isTrigger = false; let node: Provisioner.Job | Provisioner.Trigger; - if (s.type && !s.expression) { + if (s.type) { isTrigger = true; node = { - type: s.type, + type: s.type ?? 'webhook', // this is mostly for tests ...renameKeys(s.openfn, { uuid: 'id' }), } as Provisioner.Trigger; wfState.triggers[node.type] = node; @@ -147,6 +147,11 @@ const mapWorkflow = (workflow: Workflow) => { e.source_job_id = node.id; } + if (rules.label) { + // TODO needs unit test + e.condition_label = rules.label; + } + if (rules.condition) { if (typeof rules.condition === 'boolean') { e.condition_type = rules.condition ? 'always' : 'never'; diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index ee73a560d..69ed2e054 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -1,73 +1,140 @@ -import { ConditionalStepEdge, Job, Trigger, Workflow } from '@openfn/lexicon'; import crypto from 'node:crypto'; +import { get } from 'lodash-es'; +import { mapWorkflow } from '../serialize/to-app-state'; +import Workflow from '../Workflow'; const SHORT_HASH_LENGTH = 12; -export const project = () => {}; - function isDefined(v: any) { return v !== undefined && v !== null; } -export const generateHash = (workflow: Workflow, source = 'cli') => { +export const parse = (version: string) => { + const [source, hash] = version.split(':'); + return { source, hash }; +}; + +export type HashOptions = { + source?: string; + sha?: boolean; +}; + +export const generateHash = ( + workflow: Workflow, + { source = 'cli', sha = true }: HashOptions = {} +) => { const parts: string[] = []; + // convert the workflow into a v1 state object + // this means we can match keys with lightning + // and everything gets cleaner + const wfState = mapWorkflow(workflow); + // These are the keys we hash against - const wfKeys = ['name', 'credentials'].sort() as Array; + const wfKeys = ['name', 'positions'].sort(); + + // These keys are manually sorted to match lightning equivalents const stepKeys = [ 'name', - 'adaptors', - 'adaptor', // there's both adaptor & adaptors key in steps somehow - 'expression', - 'configuration', // assumes a string credential id - 'expression', - - // TODO need to model trigger types in this, which I think are currently ignored - ].sort() as Array; + 'adaptor', + 'keychain_credential_id', + 'project_credential_id', + 'body', + ].sort(); + + const triggerKeys = ['type', 'cron_expression', 'enabled'].sort(); + const edgeKeys = [ - 'condition', + 'name', // generated 'label', - 'disabled', // This feels more like an option - should be excluded? + 'condition_type', + 'condition_label', + 'condition_expression', + 'enabled', ].sort(); wfKeys.forEach((key) => { - if (isDefined(workflow[key])) { - parts.push(key, serializeValue(workflow[key])); + const value = get(workflow, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } }); - const steps = (workflow.steps || []).slice().sort((a, b) => { - const aName = a.name ?? ''; - const bName = b.name ?? ''; - return aName.localeCompare(bName); + // do the trigger first + for (const triggerId in wfState.triggers) { + const trigger = wfState.triggers[triggerId]; + triggerKeys.forEach((key) => { + const value = get(trigger, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); + } + }); + } + + // Now do all steps + const steps = Object.values(wfState.jobs).sort((a, b) => { + const aName = a.name ?? a.id ?? ''; + const bName = b.name ?? b.id ?? ''; + return aName.toLowerCase().localeCompare(bName.toLowerCase()); }); + for (const step of steps) { stepKeys.forEach((key) => { - if (isDefined((step as any)[key])) { - parts.push(key, serializeValue((step as any)[key])); + const value = get(step, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } }); + } + + // this is annoying + const uuidMap: any = {}; + for (const t in wfState.triggers) { + const uuid = wfState.triggers[t].id; + uuidMap[uuid] = wfState.triggers[t]; + // set the type as the trigger name, to get the right value in the map + (wfState.triggers[t] as any).name = wfState.triggers[t].type; + } + for (const j in wfState.jobs) { + const uuid = wfState.jobs[j].id; + uuidMap[uuid] = wfState.jobs[j]; + } + + const edges = Object.values(wfState.edges) + .map((edge) => { + const source = uuidMap[edge.source_trigger_id! ?? edge.source_job_id]; + const target = uuidMap[edge.target_job_id]; + + (edge as any).name = `${source.name ?? source.id}-${ + target.name ?? target.id + }`; + return edge; + }) + .sort((a: any, b: any) => { + // sort edges by name + // where name is sourcename-target name + const aName = a.name ?? ''; + const bName = b.name ?? ''; + return aName.localeCompare(bName); + }); - if (step.next && Array.isArray(step.next)) { - const steps = step.next.slice() as Array; - steps.slice().sort((a: ConditionalStepEdge, b: ConditionalStepEdge) => { - const aLabel = a.label || ''; - const bLabel = b.label || ''; - return aLabel.localeCompare(bLabel); - }); - for (const edge of step.next) { - edgeKeys.forEach((key) => { - if (isDefined(edge[key])) { - parts.push(key, serializeValue(edge[key])); - } - }); + // now do edges + for (const edge of edges) { + edgeKeys.forEach((key) => { + const value = get(edge, key); + if (isDefined(value)) { + parts.push(serializeValue(value)); } - } + }); } const str = parts.join(''); - const hash = crypto.createHash('sha256').update(str).digest('hex'); - return `${source}:${hash.substring(0, SHORT_HASH_LENGTH)}`; + if (sha) { + const hash = crypto.createHash('sha256').update(str).digest('hex'); + return `${source}:${hash.substring(0, SHORT_HASH_LENGTH)}`; + } else { + return `${source}:${str}`; + } }; function serializeValue(val: unknown) { diff --git a/packages/project/test/fixtures/sample-v2-project.ts b/packages/project/test/fixtures/sample-v2-project.ts index 4029eb90f..6b9cbeb8d 100644 --- a/packages/project/test/fixtures/sample-v2-project.ts +++ b/packages/project/test/fixtures/sample-v2-project.ts @@ -53,8 +53,8 @@ options: color: red workflows: - steps: - - name: b - id: b + - id: b + name: b openfn: uuid: 3 project_credential_id: x diff --git a/packages/project/test/gen/generator.test.ts b/packages/project/test/gen/generator.test.ts index 1ad5c2319..94e522b96 100644 --- a/packages/project/test/gen/generator.test.ts +++ b/packages/project/test/gen/generator.test.ts @@ -4,15 +4,17 @@ import { generateWorkflow, generateProject } from '../../src/gen/generator'; import * as fixtures from './fixtures'; import Workflow from '../../src/Workflow'; +const LOG_OUTPUTS = false; + // Generate a workflow with a fixed UUID seed // Pass test context to log the result -const gen = (src: string, t: ExecutionContext, options = {}) => { +const gen = (src: string, t?: ExecutionContext, options = {}) => { const result = generateWorkflow(src, { uuidSeed: 1, printErrors: false, ...options, }); - if (t) { + if (LOG_OUTPUTS && t) { t.log(JSON.stringify(result.toJSON(), null, 2)); } return result.toJSON(); @@ -24,6 +26,24 @@ test('it should generate a simple workflow', (t) => { t.deepEqual(result, fixtures.ab); }); +test('it should generate a simple workflow without UUIDs', (t) => { + const result = gen('a-b', t, { + openfnUuid: false, + }); + + t.log(JSON.stringify(result)); + + t.deepEqual(result, { + steps: [ + { name: 'a', id: 'a', next: { b: { openfn: {} } } }, + { name: 'b', id: 'b' }, + ], + name: 'Workflow', + id: 'workflow', + history: [], + }); +}); + test('it should return a Workflow instance', (t) => { const result = generateWorkflow('a-b'); @@ -125,7 +145,6 @@ test('it should generate a workflow with openfn meta', (t) => { a-b`, t ); - t.log(result); t.deepEqual(result.openfn, { lock_version: 123, concurrency: 3, @@ -395,6 +414,34 @@ test('it should generate several node pairs', (t) => { t.deepEqual(result, expected); }); +test('it should generate a cron trigger', (t) => { + const result = generateWorkflow('cron-a', { uuidSeed: 1 }); + + const [trigger, node] = result.steps; + + t.deepEqual(trigger, { + id: 'cron', + type: 'cron', + openfn: { uuid: 1 }, + next: { a: { openfn: { uuid: 3 } } }, + }); + t.deepEqual(node, { id: 'a', openfn: { uuid: 2 }, name: 'a' }); +}); + +test('it should generate a webhook trigger', (t) => { + const result = generateWorkflow('webhook-a', { uuidSeed: 1 }); + + const [trigger, node] = result.steps; + + t.deepEqual(trigger, { + id: 'webhook', + type: 'webhook', + openfn: { uuid: 1 }, + next: { a: { openfn: { uuid: 3 } } }, + }); + t.deepEqual(node, { id: 'a', openfn: { uuid: 2 }, name: 'a' }); +}); + test('it should generate a node with a prop', (t) => { const result = gen('a(expression=y)-b', t); const expected = _.cloneDeep(fixtures.ab); diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 8b5f138e2..23fc44782 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -138,7 +138,37 @@ test('should create a Project from prov state with a workflow', (t) => { }); }); -test('mapWorkflow: map a simple trigger', (t) => { +test('mapWorkflow: map a cron trigger', (t) => { + const mapped = mapWorkflow({ + id: 'cron', + name: 'w', + deleted_at: null, + triggers: { + cron: { + id: '1234', + type: 'cron', + cron_expression: '0 1 0 0', + enabled: true, + }, + }, + jobs: {}, + edges: {}, + }); + + const [trigger] = mapped.steps; + t.deepEqual(trigger, { + id: 'cron', + type: 'cron', + next: {}, + openfn: { + enabled: true, + uuid: '1234', + cron_expression: '0 1 0 0', + }, + }); +}); + +test('mapWorkflow: map a webhook trigger', (t) => { const mapped = mapWorkflow(state.workflows['my-workflow']); const [trigger] = mapped.steps; @@ -291,7 +321,7 @@ test('mapEdge: map label', (t) => { } as any); t.deepEqual(e, { disabled: true, - name: 'abc', + label: 'abc', }); }); diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index b6a7fa570..3d4c21cfe 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -262,7 +262,7 @@ test('should handle credentials', (t) => { t.is(step.project_credential_id, 'p'); }); -test.only('should ignore forked_from', (t) => { +test('should ignore forked_from', (t) => { const data = { id: 'my-project', workflows: [ @@ -293,9 +293,7 @@ test.only('should ignore forked_from', (t) => { }, }; const proj = new Project(data); - console.log(proj); const state = toAppState(proj, { format: 'json' }); - console.log(state); t.falsy((state as any).forked_form); }); @@ -332,7 +330,40 @@ test('should ignore workflow start keys', (t) => { t.falsy(state.workflows['wf'].start); }); -test.todo('handle edge labels'); +test('should handle edge labels', (t) => { + const data = { + id: 'my-project', + workflows: [ + { + id: 'wf', + name: 'wf', + start: 'step', + steps: [ + { + id: 'trigger', + type: 'webhook', + next: { + step: { + label: 'hello', + }, + }, + }, + { + id: 'step', + expression: '.', + configuration: 'p', + openfn: { + keychain_credential_id: 'k', + }, + }, + ], + }, + ], + }; + + const state = toAppState(new Project(data), { format: 'json' }); + t.is(state.workflows.wf.edges['trigger->step'].condition_label, 'hello'); +}); test('serialize steps and trigger in alphabetical order', (t) => { const wf = `@name wf diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index fc9b23dae..ffe612a0c 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -1,10 +1,76 @@ import test from 'ava'; -import { generateHash } from '../../src/util/version'; -import { generateWorkflow } from '../../src'; +import { generateHash, parse } from '../../src/util/version'; +import Project, { generateWorkflow } from '../../src'; -// TODO just caught a bug with both of these - needs to add tests around this -test.todo('include edge label in hash'); -test.todo('include edge expression in hash'); +// this is an actual lightning workflow state, copied verbatim +// todo already out of data as the version will change soon +// next, update this +const example = { + id: '320157d2-260d-4e32-91c0-db935547c263', + name: 'Turtle Power', + edges: [ + { + enabled: true, + id: 'ed3ebfbf-6fa3-4438-b21d-06f7eec216c1', + condition_type: 'always', + source_trigger_id: 'bf10f31a-cf51-45a2-95a4-756d0a25af53', + target_job_id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + }, + { + enabled: true, + id: '253bf2d7-1a01-44c8-8e2e-ccf50de92dff', + condition_type: 'js_expression', + condition_label: 'always tbh', + condition_expression: 'state.data', + source_job_id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + target_job_id: '40b839bd-5ade-414e-8dde-ed3ae77239ea', + }, + ], + version_history: ['app:211291f6e6d5'], + inserted_at: '2025-12-19T15:26:49Z', + jobs: [ + { + id: '4d18c46b-3bb4-4af1-81e2-07f9aee527fc', + name: 'Transform data', + body: 'fri', + adaptor: '@openfn/language-http@7.2.6', + project_credential_id: 'dd409089-5569-4157-8cf6-528ace283348', + }, + { + id: '40b839bd-5ade-414e-8dde-ed3ae77239ea', + name: 'do something', + body: '// Check out the Job Writing Guide for help getting started:\n// https://docs.openfn.org/documentation/jobs/job-writing-guide\n', + adaptor: '@openfn/language-http@7.2.6', + project_credential_id: null, + }, + ], + triggers: [ + { + enabled: false, + id: 'bf10f31a-cf51-45a2-95a4-756d0a25af53', + type: 'webhook', + }, + ], + updated_at: '2026-01-23T12:08:47Z', + lock_version: 34, + deleted_at: null, + concurrency: null, +}; + +test('match lightning version', async (t) => { + const [expected] = example.version_history; + + // load the project from v1 state + const proj = await Project.from('state', { + workflows: [example], + }); + + const wf = proj.workflows[0]; + const hash = wf.getVersionHash(); + t.log(expected); + t.log(hash); + t.is(parse(hash).hash, parse(expected).hash); +}); test('generate an 12 character version hash for a basic workflow', (t) => { const workflow = generateWorkflow( @@ -15,31 +81,240 @@ test('generate an 12 character version hash for a basic workflow', (t) => { ` ); const hash = workflow.getVersionHash(); - t.is(hash, 'cli:518f491717a7'); + t.is(hash, 'cli:72aed7c5f224'); +}); + +test('ordering: generate version string with no steps', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:a'); +}); + +test('ordering: generate version string with webook trigger and step', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + trigger(type=webhook)-x(adaptor=http,expression=fn,project_credential_id=abc) + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:awebhookhttpfnxabctruewebhook-x'); +}); + +test('ordering: multiple steps are sorted alphabetically by name', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + z-x + a-x + m-x + ` + ); + // With step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfamxztruea-xtruem-xtruez-x'); +}); + +test('ordering: step names are sorted case-insensitively', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + Z-x + a-x + B-x + ` + ); + // Steps should appear in order: a, B, x, Z (case-insensitive sort) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfaBxZtruea-xtrueB-xtrueZ-x'); +}); + +test('ordering: step keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-step(project_credential_id=cred,name=step,expression=code,adaptor=http) + ` + ); + // Step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfahttpcodestepcredtruea-step'); +}); + +test('ordering: multiple edges are sorted by edge name', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + z-a + a-b + m-n + ` + ); + // Edges sorted by "source-target" name: a-b, m-n, z-a + // Each edge has enabled=true and its generated name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabmnztruea-btruem-ntruez-a'); +}); + +test('ordering: edge keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-(label=lbl,condition=always,disabled=true)-b + ` + ); + // Edge keys sorted: condition_expression, condition_label, condition_type, enabled, label, name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfablblalwaysfalsea-b'); +}); + +test('ordering: trigger keys appear in sorted order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + t(type=cron,cron_expression="* * *",enabled=false)-x(expression=code) + ` + ); + // Trigger keys sorted: cron_expression, enabled, type + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wf* * *falsecroncodextruecron-x'); +}); + +test('ordering: complete workflow with all elements', (t) => { + const workflow = generateWorkflow( + ` + @name complete + @id some-id + trigger(type=webhook)-step2(adaptor=http,expression=fn2,project_credential_id=c2) + step1(adaptor=common,expression=fn1,project_credential_id=c1)-step2 + ` + ); + const hash = workflow.getVersionHash({ sha: false }); + t.is( + hash, + 'cli:completewebhookcommonfn1step1c1httpfn2step2c2truestep1-step2truewebhook-step2' + ); +}); + +test('ordering: multiple edges from same source are sorted by target', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-z + a-m + a-b + ` + ); + // Edges: a-b, a-m, a-z (sorted by full edge name) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabmztruea-btruea-mtruea-z'); +}); + +test('ordering: workflow with webhook trigger connected to step', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + trigger(type=webhook)-step + ` + ); + // Workflow name, trigger type, step name, edge (enabled + name) + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfwebhooksteptruewebhook-step'); +}); + +test('ordering: steps with partial fields maintain sorted key order', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-step(name=step,adaptor=http) + ` + ); + // Step keys sorted: adaptor, body, keychain_credential_id, name, project_credential_id + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfahttpsteptruea-step'); +}); + +test('ordering: edge with js_expression condition', (t) => { + const workflow = generateWorkflow( + ` + @name wf + @id some-id + a-(condition="state.x > 5",label=check)-b + ` + ); + // Edge keys sorted: condition_expression, condition_label, condition_type, enabled, label, name + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:wfabstate.x > 5checkjs_expressiontruea-b'); +}); + +test('ordering: undefined fields are omitted', (t) => { + const workflow1 = generateWorkflow( + ` + @name wf + @id some-id + a-b(name=b) + ` + ); + + const workflow2 = generateWorkflow( + ` + @name wf + @id some-id + a-b(name=b) + ` + ); + + // Both should produce the same hash + const hash1 = workflow1.getVersionHash({ sha: false }); + const hash2 = workflow2.getVersionHash({ sha: false }); + + t.is(hash1, 'cli:wfabtruea-b'); + t.is(hash1, hash2); }); +// TODO more ordering tests + test('unique hash but different steps order', (t) => { const workflow1 = generateWorkflow( ` @name same-workflow @id id-one a-b - b-c + a-c + a-d ` ); + + // different order of nodes but should generate the same hash const workflow2 = generateWorkflow( ` @name same-workflow @id id-two + a-d a-c - c-b + a-b ` ); - // different order of nodes (b & c changed position) but should generate the same hash // validate second step is actually different t.is(workflow1.steps[1].name, 'b'); - t.is(workflow2.steps[1].name, 'c'); + t.is(workflow2.steps[1].name, 'd'); + // assert that hashes are the same t.is(generateHash(workflow1), generateHash(workflow2)); }); @@ -74,6 +349,81 @@ test('hash changes when workflow name changes', (t) => { t.not(generateHash(wf1), generateHash(wf2)); }); +test('hash a trigger', (t) => { + // check that various changes on a trigger update the hash + const webhook = generateWorkflow( + `@name wf-1 + @id workflow-id + t(type=webhook)-x(expression=x) + ` + ); + const cron = generateWorkflow( + `@name wf-1 + @id workflow-id + t(type=cron)-x(expression=x) + ` + ); + + t.not(generateHash(webhook), generateHash(cron)); + + const cronEnabled = generateWorkflow( + `@name wf-1 + @id workflow-id + t(enabled=false)-x + ` + ); + t.not(generateHash(webhook), generateHash(cronEnabled)); + + const cronExpression = generateWorkflow( + `@name wf-1 + @id workflow-id + t(cron_expression="1")-x + ` + ); + t.not(generateHash(webhook), generateHash(cronExpression)); +}); + +test('hash changes across an edge', (t) => { + const basicEdge = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-b + ` + ); + + const withLabel = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(label=x)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withLabel)); + + const withCondition = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(condition=always)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withCondition)); + + const withDisabled = generateWorkflow( + ` + @name wf-1 + @id workflow-id + a-(disabled=true)-b + ` + ); + + t.not(generateHash(basicEdge), generateHash(withDisabled)); +}); + +// TODO joe to think more about credential mapping (keychain and project cred keys) // can't get credentials to work in the generator, need to fix that test.skip('hash changes when credentials field changes', (t) => { const wf1 = generateWorkflow( @@ -157,3 +507,21 @@ test('ignored fields do not affect hash', (t) => { ); t.is(generateHash(wf1), generateHash(wf1_ignored)); }); + +// This test is important because when merging, the local workflow +// representation won't have UUIDs in it - and that should be fine, nothing should break +test('works without UUIDs', (t) => { + const workflow = generateWorkflow( + ` + @name a + @id some-id + webhook-transform_data(name="Transform data",expression="fn(s => s)") + `, + { + openfnUuid: false, + } + ); + + const hash = workflow.getVersionHash({ sha: false }); + t.is(hash, 'cli:awebhookfn(s => s)Transform datatruewebhook-Transform data'); +}); From d888387b320dee94b41213a041229b506a75056d Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Sun, 1 Feb 2026 12:54:51 +0000 Subject: [PATCH 10/34] update merge strategy with onlyUpdated option We'll use this on deploy to ensure that only locally changed workflows get replaced in the merge --- packages/project/src/Project.ts | 4 + packages/project/src/gen/generator.ts | 6 ++ packages/project/src/merge/merge-project.ts | 79 ++++++++++++------- .../src/util/find-changed-workflows.ts | 30 +++++++ .../project/test/merge/merge-project.test.ts | 68 ++++++++++++++++ .../test/util/find-changed-workflows.test.ts | 41 ++++++++++ 6 files changed, 201 insertions(+), 27 deletions(-) create mode 100644 packages/project/src/util/find-changed-workflows.ts create mode 100644 packages/project/test/util/find-changed-workflows.test.ts diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index e4800fd0b..14e297af0 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -264,6 +264,10 @@ export class Project { generateConfig() { return extractConfig(this); } + + clone() { + return new Project(this.serialize('project') as any); + } } export default Project; diff --git a/packages/project/src/gen/generator.ts b/packages/project/src/gen/generator.ts index 9cb09d930..01e12b15f 100644 --- a/packages/project/src/gen/generator.ts +++ b/packages/project/src/gen/generator.ts @@ -17,6 +17,9 @@ type GenerateWorkflowOptions = { uuidMap?: Record; openfnUuid: boolean; // TODO probably need to do this by default? + + /** If true, will set up a version hash in the history array */ + history: boolean; }; type GenerateProjectOptions = GenerateWorkflowOptions & { @@ -265,6 +268,9 @@ function generateWorkflow( } const wf = new Workflow(raw); + if (options.history) { + wf.pushHistory(wf.getVersionHash()); + } return wf; } diff --git a/packages/project/src/merge/merge-project.ts b/packages/project/src/merge/merge-project.ts index 17abdb488..3fcabe393 100644 --- a/packages/project/src/merge/merge-project.ts +++ b/packages/project/src/merge/merge-project.ts @@ -6,29 +6,40 @@ import mapUuids from './map-uuids'; import baseMerge from '../util/base-merge'; import getDuplicates from '../util/get-duplicates'; import Workflow from '../Workflow'; +import findChangedWorkflows from '../util/find-changed-workflows'; export const SANDBOX_MERGE = 'sandbox'; export const REPLACE_MERGE = 'replace'; +export const SYNC_MERGE = 'replace'; + export class UnsafeMergeError extends Error {} export type MergeProjectOptions = { workflowMappings: Record; // removeUnmapped: boolean; force: boolean; + + /** + * If mode is sandbox, basically only content will be merged and all metadata/settings/options/config is ignored + * If mode is replace, all properties on the source will override the target (including UUIDs, name) + */ mode: typeof SANDBOX_MERGE | typeof REPLACE_MERGE; + + /** + * If true, only workflows that have changed in the source + * will be merged. + */ + onlyUpdated: boolean; }; const defaultOptions: MergeProjectOptions = { workflowMappings: {}, removeUnmapped: false, force: true, - /** - * If mode is sandbox, basically only content will be merged and all metadata/settings/options/config is ignored - * If mode is replace, all properties on the source will override the target (including UUIDs, name) - */ mode: SANDBOX_MERGE, + onlyUpdated: false, }; /** @@ -51,28 +62,34 @@ export function merge( defaultOptions ) as Required; - // check whether multiple workflows are merging into one. throw Error - const dupTargetMappings = getDuplicates( - Object.values(options.workflowMappings ?? {}) - ); - if (dupTargetMappings.length) { - throw new Error( - `The following target workflows have multiple source workflows merging into them: ${dupTargetMappings.join( - ', ' - )}` - ); - } - const finalWorkflows: Workflow[] = []; const usedTargetIds = new Set(); + let sourceWorkflows = source.workflows; + + const noMappings = isEmpty(options.workflowMappings); + + if (options.onlyUpdated) { + // only include workflows that have changed (since history or forked_from) in the list + // unchanged target workflows will be added to the finalWorkflows list later + sourceWorkflows = findChangedWorkflows(source); + } + + if (!noMappings) { + // check whether multiple workflows are merging into one + const dupes = getDuplicates(Object.values(options.workflowMappings ?? {})); + if (dupes.length) { + throw new Error( + `The following target workflows have multiple source workflows merging into them: ${dupes.join( + ', ' + )}` + ); + } - const noMappings = isEmpty(options?.workflowMappings); // no mapping provided. hence * - let sourceWorkflows: Workflow[] = source.workflows.filter((w) => { - if (noMappings) return true; - return !!options?.workflowMappings[w.id]; - }); + sourceWorkflows = source.workflows.filter( + (w) => !!options.workflowMappings[w.id] + ); + } - // mergeability const potentialConflicts: Record = {}; for (const sourceWorkflow of sourceWorkflows) { const targetId = @@ -100,11 +117,19 @@ export function merge( if (targetWorkflow) { usedTargetIds.add(targetWorkflow.id); - const mappings = mapUuids(sourceWorkflow, targetWorkflow); - finalWorkflows.push( - // @ts-ignore - mergeWorkflows(sourceWorkflow, targetWorkflow, mappings) - ); + + // If mode is replace, just swap out the target workflow for the source workflow + // No mapping needed really + if (options.mode === REPLACE_MERGE) { + finalWorkflows.push(sourceWorkflow); + } else { + // Otherwise, merge these workflows, preserving UUIDs smartly + const mappings = mapUuids(sourceWorkflow, targetWorkflow); + finalWorkflows.push( + // @ts-ignore + mergeWorkflows(sourceWorkflow, targetWorkflow, mappings) + ); + } } else { finalWorkflows.push(sourceWorkflow); } diff --git a/packages/project/src/util/find-changed-workflows.ts b/packages/project/src/util/find-changed-workflows.ts new file mode 100644 index 000000000..6fac9a76f --- /dev/null +++ b/packages/project/src/util/find-changed-workflows.ts @@ -0,0 +1,30 @@ +import Project from '../Project'; +import { generateHash } from './version'; + +/** + * For a give Project, identify which workflows have changed + * Uses forked_from as the base, or history if that's unavailable + */ +export default (project: Project) => { + const base: Record = + project.cli.forked_from ?? + project.workflows.reduce((obj, wf) => { + if (wf.history.length) { + obj[wf.id] = wf.history.at(-1); + } + return obj; + }, {}); + + const changed = []; + + for (const wf of project.workflows) { + if (wf.id in base) { + const hash = generateHash(wf); + if (hash !== base[wf.id]) { + changed.push(wf); + } + } + } + + return changed; +}; diff --git a/packages/project/test/merge/merge-project.test.ts b/packages/project/test/merge/merge-project.test.ts index d56f6f5ac..27f60e584 100644 --- a/packages/project/test/merge/merge-project.test.ts +++ b/packages/project/test/merge/merge-project.test.ts @@ -669,3 +669,71 @@ test('options: multiple source into one target error', (t) => { } ); }); + +test('options: onlyUpdated with no changed workflows', (t) => { + // If I do this as a replace, and nothing has changed, the target UUIDs should be preserved + const source = createProject([ + generateWorkflow('@id a a-b', { history: true }), + generateWorkflow('@id b x-y', { history: true }), + ]); + const target = createProject([ + generateWorkflow('@id a a-b', { history: true }), + generateWorkflow('@id b x-y', { history: true }), + ]); + + const result = merge(source, target, { + onlyUpdated: true, + mode: 'replace', + }); + + // step UUIDs in the target should not have changed + t.is( + result.workflows[0].steps[0].openfn.uuid, + target.workflows[0].steps[0].openfn.uuid + ); + t.is( + result.workflows[0].steps[1].openfn.uuid, + target.workflows[0].steps[1].openfn.uuid + ); +}); + +test('options: onlyUpdated with 1 changed, 1 unchanged workflow', (t) => { + // If I do this as a replace, and nothing has changed, the target UUIDs should be preserved + const source = createProject([ + generateWorkflow('@id a a-b', { history: true }), + generateWorkflow('@id b x-y', { history: true }), + ]); + const target = createProject([ + generateWorkflow('@id a a-b', { history: true }), + generateWorkflow('@id b x-y', { history: true }), + ]); + + // change the source + source.workflows[0].steps[0].expression = 'fn()'; + + const result = merge(source, target, { + onlyUpdated: true, + + // Set this to mode replace and use UUIDs as a proxy for + // "did this thing change?" + mode: 'replace', + }); + + // step 1 has changed and should match the source + t.is(result.workflows[0].steps[0].expression, 'fn()'); + t.is( + result.workflows[0].steps[0].openfn.uuid, + source.workflows[0].steps[0].openfn.uuid + ); + + // but step 2 did not change and should have the original UUID + t.is( + result.workflows[0].steps[1].openfn.uuid, + target.workflows[0].steps[1].openfn.uuid + ); +}); + +test.todo('options: only changed and 1 workflow'); + +// this test it's important that the final project includes the unchanged workflow +test.todo('options: only changed, and 1 changed, 1 unchanged workflow'); diff --git a/packages/project/test/util/find-changed-workflows.test.ts b/packages/project/test/util/find-changed-workflows.test.ts new file mode 100644 index 000000000..c3b144ae9 --- /dev/null +++ b/packages/project/test/util/find-changed-workflows.test.ts @@ -0,0 +1,41 @@ +import test from 'ava'; +import findChangedWorkflows from '../../src/util/find-changed-workflows'; +import { generateProject } from '../../src'; +import { generateHash } from '../../src/util/version'; + +test('should return 0 changed workflows from forked_from', (t) => { + const project = generateProject('proj', ['@id a a-b', '@id b x-y']); + const [a, b] = project.workflows; + + // set up forked_from + project.cli.forked_from = { + [a.id]: generateHash(a), + [b.id]: generateHash(b), + }; + + const changed = findChangedWorkflows(project); + + t.deepEqual(changed, []); +}); + +test('should return 1 changed workflows from forked_from', (t) => { + const project = generateProject('proj', ['@id a a-b', '@id b x-y']); + const [a, b] = project.workflows; + + // set up forked_from + project.cli.forked_from = { + [a.id]: generateHash(a), + [b.id]: generateHash(b), + }; + + // Now change b + b.steps[0].name = 'x1'; + + const changed = findChangedWorkflows(project); + t.is(changed.length, 1); + t.is(changed[0].id, 'b'); +}); + +test.todo('changed from history'); +test.todo('multiple changed workflows'); +test.todo('if no base available, assume a change'); From 8edbf7719e33496bb040508920344dedfa86874c Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Sun, 1 Feb 2026 15:41:53 +0000 Subject: [PATCH 11/34] typo in logging --- packages/cli/src/projects/fetch.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 8ca84ab40..a69285e11 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -254,7 +254,7 @@ export async function fetchRemoteProject( options.project } to UUID ${projectUUID} from local project ${printProjectName( localProject - )}}` + )}` ); } From 7666440a1baf2b7fdd0b72641e9020d5c9d10679 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Sun, 1 Feb 2026 17:04:29 +0000 Subject: [PATCH 12/34] tidy --- packages/cli/src/projects/fetch.ts | 25 +++---------------------- packages/project/src/Workflow.ts | 5 +++++ 2 files changed, 8 insertions(+), 22 deletions(-) diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index a69285e11..3d168815c 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -266,6 +266,7 @@ export async function fetchRemoteProject( projectUUID, logger ); + console.log(data.workflows); const project = await Project.from( 'state', @@ -279,6 +280,8 @@ export async function fetchRemoteProject( } ); + console.log(project.workflows[0].history); + logger.debug( `Loaded remote project ${project.openfn!.uuid} with id ${ project.id @@ -321,27 +324,5 @@ To ignore this error and override the local file, pass --force (-f) throw error; } - - const hasAnyHistory = remoteProject.workflows.find( - (w) => w.workflow.history?.length - ); - - // Skip version checking if: - const skipVersionCheck = - options.force || // The user forced the checkout - !hasAnyHistory; // the remote project has no history (can happen in old apps) - - // TODO temporarily force skip - // TODO canMergeInto needs to return a reason - if (!skipVersionCheck && !remoteProject.canMergeInto(localProject!)) { - // TODO allow rename - const e = new Error( - `Error! An incompatible project exists at this location.` - ); - - delete e.stack; - - throw e; - } } } diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index 2d323bfbb..e1220428c 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -204,12 +204,17 @@ class Workflow { // return true if the current workflow can be merged into the target workflow without losing any changes canMergeInto(target: Workflow) { + console.log('this is remote project'); const thisHistory = this.workflow.history?.concat(this.getVersionHash()) ?? []; const targetHistory = target.workflow.history?.concat(target.getVersionHash()) ?? []; + console.log({ thisHistory }); + console.log({ targetHistory }); + const targetHead = targetHistory[targetHistory.length - 1]; + console.log({ targetHead }); return thisHistory.indexOf(targetHead) > -1; } } From 6a0783966952b17a04edfd7a3c9c67d36a8fbdd6 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Sun, 1 Feb 2026 17:48:47 +0000 Subject: [PATCH 13/34] fix fetch test --- packages/cli/src/projects/fetch.ts | 3 -- packages/cli/test/projects/fetch.test.ts | 40 ++++++++++++------------ 2 files changed, 20 insertions(+), 23 deletions(-) diff --git a/packages/cli/src/projects/fetch.ts b/packages/cli/src/projects/fetch.ts index 3d168815c..9102c574d 100644 --- a/packages/cli/src/projects/fetch.ts +++ b/packages/cli/src/projects/fetch.ts @@ -266,7 +266,6 @@ export async function fetchRemoteProject( projectUUID, logger ); - console.log(data.workflows); const project = await Project.from( 'state', @@ -280,8 +279,6 @@ export async function fetchRemoteProject( } ); - console.log(project.workflows[0].history); - logger.debug( `Loaded remote project ${project.openfn!.uuid} with id ${ project.id diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 398f9e58a..55eba0145 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -470,10 +470,14 @@ test.serial( t.regex(message, /Fetched project file to/); } ); + // In this test, the file on disk has diverged from the remove -// This means changes could be lost, so we throw! +// This means changes could be lost +// But we do not validate against this because: +// a) we can't! Not without infinite immutable history anyway +// b) users should not be manutally editing these files test.serial( - 'error: throw if fetching a project that has diverged', + 'do not throw even if fetching a project that has diverged', async (t) => { // Change project.yaml const modified = myProject_yaml @@ -487,29 +491,25 @@ test.serial( '/ws/.projects/project@app.openfn.org.yaml': modified, }); - await t.throwsAsync( - () => - fetchHandler( - { - project: PROJECT_UUID, - alias: 'project', - - endpoint: ENDPOINT, - apiKey: 'test-api-key', - workspace: '/ws', - } as any, - logger - ), - { - message: /incompatible project/, - } + await t.notThrowsAsync(() => + fetchHandler( + { + project: PROJECT_UUID, + alias: 'project', + + endpoint: ENDPOINT, + apiKey: 'test-api-key', + workspace: '/ws', + } as any, + logger + ) ); const filePath = '/ws/.projects/project@app.openfn.org.yaml'; const fileContent = await readFile(filePath, 'utf-8'); - // The file should NOT be overwritten - t.regex(fileContent, /fn\(x\)/); + // The file should be overwritten + t.regex(fileContent, /fn\(\)/); } ); From 661679f66220a71c64ffb8888c227896f9c171bf Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Sun, 1 Feb 2026 18:10:11 +0000 Subject: [PATCH 14/34] types --- packages/project/src/util/find-changed-workflows.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/project/src/util/find-changed-workflows.ts b/packages/project/src/util/find-changed-workflows.ts index 6fac9a76f..baa61b2f2 100644 --- a/packages/project/src/util/find-changed-workflows.ts +++ b/packages/project/src/util/find-changed-workflows.ts @@ -8,7 +8,7 @@ import { generateHash } from './version'; export default (project: Project) => { const base: Record = project.cli.forked_from ?? - project.workflows.reduce((obj, wf) => { + project.workflows.reduce((obj: any, wf) => { if (wf.history.length) { obj[wf.id] = wf.history.at(-1); } From 9e96430915d5e052f9700222e01b999077dea5f6 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 15:42:28 +0000 Subject: [PATCH 15/34] fix deploy sync and merge after deploy --- packages/cli/src/projects/deploy.ts | 33 +++++++++++---------- packages/cli/src/projects/util.ts | 16 ++++++++-- packages/project/src/Project.ts | 5 ++++ packages/project/src/Workflow.ts | 5 ---- packages/project/src/merge/merge-project.ts | 27 +++++++++-------- packages/project/src/parse/from-fs.ts | 8 +++-- 6 files changed, 56 insertions(+), 38 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 319653f45..196c91e95 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -1,5 +1,5 @@ import yargs from 'yargs'; -import Project from '@openfn/project'; +import Project, { Workspace } from '@openfn/project'; import c from 'chalk'; import { writeFile } from 'node:fs/promises'; import path from 'node:path'; @@ -49,7 +49,7 @@ const options = [ ]; const printProjectName = (project: Project) => - `${project.id} (${project.openfn?.uuid || ''})`; + `${project}${project.id} (${project.openfn?.uuid || ''})`; export const command: yargs.CommandModule = { command: 'deploy', @@ -101,21 +101,20 @@ export async function handler(options: DeployOptions, logger: Logger) { ); const config = loadAppAuthConfig(options, logger); - // TODO: allow users to specify which project to deploy - // Should be able to take any project.yaml file via id, uuid, alias or path - // Note that it's a little wierd to deploy a project you haven't checked out, - // so put good safeguards here logger.info('Attempting to load checked-out project from workspace'); - // TODO this doesn't have a history! - // loading from the fs the history isn't available + // TODO this is the hard way to load the local alias + // We need track alias in openfn.yaml to make this easier (and tracked in from fs) + const ws = new Workspace(options.workspace || '.'); + const { alias } = ws.getActiveProject()!; + // TODO this doesn't have an alias const localProject = await Project.from('fs', { root: options.workspace || '.', + alias, }); - // TODO if there's no local metadata, the user must pass a UUID or alias to post to - logger.success(`Loaded local project ${printProjectName(localProject)}`); + // First step, fetch the latest version and write // this may throw! let remoteProject: Project; @@ -153,8 +152,7 @@ Pass --force to override this error and deploy anyway.`); return false; } - // this fails now because the local project has no UUIDs - // But should that matter ,actually? + // TODO: what if remote diff and the version checked disagree for some reason? const diffs = reportDiff(remoteProject!, localProject, logger); if (!diffs.length) { logger.success('Nothing to deploy'); @@ -176,7 +174,7 @@ Pass --force to override this error and deploy anyway.`); logger.warn( 'Skipping compatibility check as no local version history detected' ); - logger.warn('Pushing these changes may overrite changes made to the app'); + logger.warn('Pushing these changes may overwrite changes made to the app'); } else { const divergentWorkflows = hasRemoteDiverged(localProject, remoteProject!); if (divergentWorkflows) { @@ -203,10 +201,13 @@ Pass --force to override this error and deploy anyway.`); } logger.info('Merging changes into remote project'); + // TODO I would like to log which workflows are being updated const merged = Project.merge(localProject, remoteProject!, { mode: 'replace', force: true, + onlyUpdated: true, }); + // generate state for the provisioner const state = merged.serialize('state', { format: 'json', @@ -262,7 +263,6 @@ Pass --force to override this error and deploy anyway.`); merged.config ); - // TODO why isn't this right? oh, because the outpu path isn't quite right updateForkedFrom(finalProject); const configData = finalProject.generateConfig(); await writeFile( @@ -270,9 +270,10 @@ Pass --force to override this error and deploy anyway.`); configData.content ); + // TODO why is alias wrong here? const finalOutputPath = getSerializePath(localProject, options.workspace!); - logger.debug('Updating local project at ', finalOutputPath); - await serialize(finalProject, finalOutputPath); + const fullFinalPath = await serialize(finalProject, finalOutputPath); + logger.debug('Updated local project at ', fullFinalPath); } logger.success('Updated project at', config.endpoint); diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index d367be7a2..e54795113 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -158,10 +158,20 @@ export async function deployProject( }); if (!response.ok) { - const body = await response.json(); - + logger?.error(`Deploy failed with code `, response.status); logger?.error('Failed to deploy project:'); - logger?.error(JSON.stringify(body, null, 2)); + + const contentType = response.headers.get('content-type'); + + if (contentType.match('application/json ')) { + const body = await response.json(); + logger?.error(JSON.stringify(body, null, 2)); + } else { + const content = await response.text(); + // TODO html errors are too long to be useful... figure this out later + logger?.error(content); + } + throw new CLIError( `Failed to deploy project ${state.name}: ${response.status}` ); diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 14e297af0..34fc40989 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -165,6 +165,11 @@ export class Project { return this.cli.alias ?? 'main'; } + set alias(value: string) { + this.cli ??= {}; + this.cli.alias = value; + } + get uuid() { return this.openfn?.uuid ? `${this.openfn.uuid}` : undefined; } diff --git a/packages/project/src/Workflow.ts b/packages/project/src/Workflow.ts index e1220428c..2d323bfbb 100644 --- a/packages/project/src/Workflow.ts +++ b/packages/project/src/Workflow.ts @@ -204,17 +204,12 @@ class Workflow { // return true if the current workflow can be merged into the target workflow without losing any changes canMergeInto(target: Workflow) { - console.log('this is remote project'); const thisHistory = this.workflow.history?.concat(this.getVersionHash()) ?? []; const targetHistory = target.workflow.history?.concat(target.getVersionHash()) ?? []; - console.log({ thisHistory }); - console.log({ targetHistory }); - const targetHead = targetHistory[targetHistory.length - 1]; - console.log({ targetHead }); return thisHistory.indexOf(targetHead) > -1; } } diff --git a/packages/project/src/merge/merge-project.ts b/packages/project/src/merge/merge-project.ts index 3fcabe393..f1528be7b 100644 --- a/packages/project/src/merge/merge-project.ts +++ b/packages/project/src/merge/merge-project.ts @@ -118,18 +118,20 @@ export function merge( if (targetWorkflow) { usedTargetIds.add(targetWorkflow.id); - // If mode is replace, just swap out the target workflow for the source workflow - // No mapping needed really - if (options.mode === REPLACE_MERGE) { - finalWorkflows.push(sourceWorkflow); - } else { - // Otherwise, merge these workflows, preserving UUIDs smartly - const mappings = mapUuids(sourceWorkflow, targetWorkflow); - finalWorkflows.push( - // @ts-ignore - mergeWorkflows(sourceWorkflow, targetWorkflow, mappings) - ); - } + // Otherwise, merge these workflows, preserving UUIDs smartly + const mappings = mapUuids(sourceWorkflow, targetWorkflow); + finalWorkflows.push( + // @ts-ignore + mergeWorkflows(sourceWorkflow, targetWorkflow, mappings) + ); + + // THis was bad!! Why did I do this? + // // If mode is replace, just swap out the target workflow for the source workflow + // // No mapping needed really + // if (options.mode === REPLACE_MERGE) { + // finalWorkflows.push(cloneDeep(sourceWorkflow)); + // } else { + // } } else { finalWorkflows.push(sourceWorkflow); } @@ -163,6 +165,7 @@ export function merge( ...source.options, }, name: source.name ?? target.name, + alias: source.alias ?? target.alias, description: source.description ?? target.description, credentials: source.credentials ?? target.credentials, collections: source.collections ?? target.collections, diff --git a/packages/project/src/parse/from-fs.ts b/packages/project/src/parse/from-fs.ts index 1ced41426..8375a5afe 100644 --- a/packages/project/src/parse/from-fs.ts +++ b/packages/project/src/parse/from-fs.ts @@ -18,6 +18,7 @@ export type FromFsConfig = { root: string; config?: Partial; logger?: Logger; + alias?: string; }; // Parse a single project from a root folder @@ -25,7 +26,7 @@ export type FromFsConfig = { // It just builds the project on disk // I suppose we could take an option? export const parseProject = async (options: FromFsConfig) => { - const { root, logger } = options; + const { root, logger, alias } = options; const { type, content } = findWorkspaceFile(root); const context = loadWorkspaceFile(content, type as any); @@ -111,5 +112,8 @@ export const parseProject = async (options: FromFsConfig) => { } } - return new Project(proj as l.Project, context.workspace); + return new Project(proj as l.Project, { + alias, + ...context.workspace, + }); }; From 6a0f51b96c7f50ea447fb150828d00204b91fd5a Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 16:43:49 +0000 Subject: [PATCH 16/34] ensure trigger enabled state is tracked in workflow.yaml --- packages/cli/src/projects/deploy.ts | 10 ++- packages/lexicon/core.d.ts | 4 +- packages/project/src/parse/from-app-state.ts | 4 +- .../project/src/serialize/to-app-state.ts | 1 + packages/project/src/util/project-diff.ts | 7 ++ packages/project/src/util/version.ts | 1 + .../project/test/parse/from-app-state.test.ts | 7 +- .../test/serialize/to-app-state.test.ts | 3 +- packages/project/test/serialize/to-fs.test.ts | 72 +++++++++++++++++++ .../test/util/version-workflow.test.ts | 1 + 10 files changed, 100 insertions(+), 10 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 196c91e95..fd72600dc 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -49,7 +49,7 @@ const options = [ ]; const printProjectName = (project: Project) => - `${project}${project.id} (${project.openfn?.uuid || ''})`; + `${project.id} (${project.openfn?.uuid || ''})`; export const command: yargs.CommandModule = { command: 'deploy', @@ -153,7 +153,7 @@ Pass --force to override this error and deploy anyway.`); } // TODO: what if remote diff and the version checked disagree for some reason? - const diffs = reportDiff(remoteProject!, localProject, logger); + const diffs = reportDiff(localProject, remoteProject, logger); if (!diffs.length) { logger.success('Nothing to deploy'); return; @@ -280,8 +280,12 @@ Pass --force to override this error and deploy anyway.`); } export const reportDiff = (local: Project, remote: Project, logger: Logger) => { + // console.log(local.workflows[0].workflow); + // console.log(remote.workflows[0].workflow); + console.log('local', JSON.stringify(local.workflows[0].workflow)); + console.log('remote', JSON.stringify(remote.workflows[0].workflow)); const diffs = remote.diff(local); - + console.log({ diffs }); if (diffs.length === 0) { logger.info('No workflow changes detected'); return diffs; diff --git a/packages/lexicon/core.d.ts b/packages/lexicon/core.d.ts index 07e9b6490..b87f90832 100644 --- a/packages/lexicon/core.d.ts +++ b/packages/lexicon/core.d.ts @@ -213,7 +213,9 @@ export type ConditionalStepEdge = { /** * A no-op type of Step */ -export interface Trigger extends Step {} +export interface Trigger extends Step { + enabled?: boolean; +} /** * An expression which has been compiled, and so includes import and export statements diff --git a/packages/project/src/parse/from-app-state.ts b/packages/project/src/parse/from-app-state.ts index 2587c8725..f9e84a960 100644 --- a/packages/project/src/parse/from-app-state.ts +++ b/packages/project/src/parse/from-app-state.ts @@ -107,8 +107,7 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { // TODO what do we do if the condition is disabled? // I don't think that's the same as edge condition false? Object.values(workflow.triggers).forEach((trigger: Provisioner.Trigger) => { - const { type, ...otherProps } = trigger; - + const { type, enabled, ...otherProps } = trigger; if (!mapped.start) { mapped.start = type; } @@ -119,6 +118,7 @@ export const mapWorkflow = (workflow: Provisioner.Workflow) => { mapped.steps.push({ id: type, type, + enabled, openfn: renameKeys(otherProps, { id: 'uuid' }), next: connectedEdges.reduce((obj: any, edge) => { const target = Object.values(jobs).find( diff --git a/packages/project/src/serialize/to-app-state.ts b/packages/project/src/serialize/to-app-state.ts index 8d428eb9a..b8e32781c 100644 --- a/packages/project/src/serialize/to-app-state.ts +++ b/packages/project/src/serialize/to-app-state.ts @@ -100,6 +100,7 @@ export const mapWorkflow = (workflow: Workflow) => { isTrigger = true; node = { type: s.type ?? 'webhook', // this is mostly for tests + enabled: s.enabled, ...renameKeys(s.openfn, { uuid: 'id' }), } as Provisioner.Trigger; wfState.triggers[node.type] = node; diff --git a/packages/project/src/util/project-diff.ts b/packages/project/src/util/project-diff.ts index 7e849130a..242aba9cf 100644 --- a/packages/project/src/util/project-diff.ts +++ b/packages/project/src/util/project-diff.ts @@ -39,6 +39,13 @@ export function diff(a: Project, b: Project): WorkflowDiff[] { // workflow exists in A but not in B = removed diffs.push({ id: workflowA.id, type: 'removed' }); } else if (workflowA.getVersionHash() !== workflowB.getVersionHash()) { + // TODO what's up with this bullshit diff? + console.log(workflowA.getVersionHash({ sha: false })); + console.log(); + console.log(); + console.log(workflowB.getVersionHash({ sha: false })); + console.log(); + console.log(); // workflow exists in both but with different content = changed diffs.push({ id: workflowA.id, type: 'changed' }); } diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index 69ed2e054..e36d9da67 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -65,6 +65,7 @@ export const generateHash = ( const trigger = wfState.triggers[triggerId]; triggerKeys.forEach((key) => { const value = get(trigger, key); + // bit of a hack: default the trigger key value if (isDefined(value)) { parts.push(serializeValue(value)); } diff --git a/packages/project/test/parse/from-app-state.test.ts b/packages/project/test/parse/from-app-state.test.ts index 23fc44782..3deffc5a1 100644 --- a/packages/project/test/parse/from-app-state.test.ts +++ b/packages/project/test/parse/from-app-state.test.ts @@ -105,7 +105,8 @@ test('should create a Project from prov state with a workflow', (t) => { { id: 'webhook', type: 'webhook', - openfn: { enabled: true, uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058' }, + enabled: true, + openfn: { uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058' }, next: { 'transform-data': { condition: 'always', @@ -160,8 +161,8 @@ test('mapWorkflow: map a cron trigger', (t) => { id: 'cron', type: 'cron', next: {}, + enabled: true, openfn: { - enabled: true, uuid: '1234', cron_expression: '0 1 0 0', }, @@ -176,6 +177,7 @@ test('mapWorkflow: map a webhook trigger', (t) => { t.deepEqual(trigger, { id: 'webhook', type: 'webhook', + enabled: true, next: { 'transform-data': { condition: 'always', @@ -186,7 +188,6 @@ test('mapWorkflow: map a webhook trigger', (t) => { }, }, openfn: { - enabled: true, uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', }, }); diff --git a/packages/project/test/serialize/to-app-state.test.ts b/packages/project/test/serialize/to-app-state.test.ts index 3d4c21cfe..c900fb34c 100644 --- a/packages/project/test/serialize/to-app-state.test.ts +++ b/packages/project/test/serialize/to-app-state.test.ts @@ -77,6 +77,7 @@ test('should set defaults for keys that Lightning needs', (t) => { { id: 'trigger', type: 'webhook', + enabled: true, next: { step: { openfn: { @@ -121,7 +122,7 @@ test('should set defaults for keys that Lightning needs', (t) => { keychain_credential_id: null, }, }, - triggers: { webhook: { type: 'webhook', id: 1 } }, + triggers: { webhook: { type: 'webhook', id: 1, enabled: true } }, edges: { ['trigger->step']: { id: '', diff --git a/packages/project/test/serialize/to-fs.test.ts b/packages/project/test/serialize/to-fs.test.ts index 484a0e244..c4adad5a4 100644 --- a/packages/project/test/serialize/to-fs.test.ts +++ b/packages/project/test/serialize/to-fs.test.ts @@ -157,6 +157,78 @@ test('extractWorkflow: single simple workflow with random edge property', (t) => }); }); +test('extractWorkflow: include trigger enabled state (true)', (t) => { + const project = new Project( + { + workflows: [ + { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'webhook', + type: 'webhook', + enabled: true, + }, + ], + openfn: { + id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', + }, + }, + ], + }, + { + formats: { + workflow: 'json', // for easier testing + }, + } + ); + + const { content } = extractWorkflow(project, 'my-workflow'); + + t.deepEqual(JSON.parse(content).steps[0], { + id: 'webhook', + type: 'webhook', + enabled: true, + }); +}); + +test('extractWorkflow: include trigger enabled state (false)', (t) => { + const project = new Project( + { + workflows: [ + { + id: 'my-workflow', + name: 'My Workflow', + steps: [ + { + id: 'webhook', + type: 'webhook', + enabled: false, + }, + ], + openfn: { + id: '72ca3eb0-042c-47a0-a2a1-a545ed4a8406', + }, + }, + ], + }, + { + formats: { + workflow: 'json', // for easier testing + }, + } + ); + + const { content } = extractWorkflow(project, 'my-workflow'); + + t.deepEqual(JSON.parse(content).steps[0], { + id: 'webhook', + type: 'webhook', + enabled: false, + }); +}); + test('extractWorkflow: single simple workflow with custom root', (t) => { const config = { dirs: { diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index ffe612a0c..4f34e8843 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -1,6 +1,7 @@ import test from 'ava'; import { generateHash, parse } from '../../src/util/version'; import Project, { generateWorkflow } from '../../src'; +import Workflow from '../../src/Workflow'; // this is an actual lightning workflow state, copied verbatim // todo already out of data as the version will change soon From 859aff6316a56705a82b9c8c3137a99ca2ebb66d Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 16:45:42 +0000 Subject: [PATCH 17/34] remove logs --- packages/cli/src/projects/deploy.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index fd72600dc..ab60edb1e 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -280,12 +280,7 @@ Pass --force to override this error and deploy anyway.`); } export const reportDiff = (local: Project, remote: Project, logger: Logger) => { - // console.log(local.workflows[0].workflow); - // console.log(remote.workflows[0].workflow); - console.log('local', JSON.stringify(local.workflows[0].workflow)); - console.log('remote', JSON.stringify(remote.workflows[0].workflow)); const diffs = remote.diff(local); - console.log({ diffs }); if (diffs.length === 0) { logger.info('No workflow changes detected'); return diffs; From 890936bd27aa0596d8a49e702402214dd17e05e4 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 16:47:22 +0000 Subject: [PATCH 18/34] remove logs --- packages/project/src/util/project-diff.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/packages/project/src/util/project-diff.ts b/packages/project/src/util/project-diff.ts index 242aba9cf..7e849130a 100644 --- a/packages/project/src/util/project-diff.ts +++ b/packages/project/src/util/project-diff.ts @@ -39,13 +39,6 @@ export function diff(a: Project, b: Project): WorkflowDiff[] { // workflow exists in A but not in B = removed diffs.push({ id: workflowA.id, type: 'removed' }); } else if (workflowA.getVersionHash() !== workflowB.getVersionHash()) { - // TODO what's up with this bullshit diff? - console.log(workflowA.getVersionHash({ sha: false })); - console.log(); - console.log(); - console.log(workflowB.getVersionHash({ sha: false })); - console.log(); - console.log(); // workflow exists in both but with different content = changed diffs.push({ id: workflowA.id, type: 'changed' }); } From 95384c6e676a9c0d2a2d488ab5a9f936ac7dce00 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 17:03:26 +0000 Subject: [PATCH 19/34] fix test --- packages/project/src/merge/merge-project.ts | 8 ------ .../project/test/merge/merge-project.test.ts | 25 +++++++++---------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/packages/project/src/merge/merge-project.ts b/packages/project/src/merge/merge-project.ts index f1528be7b..14bb1a4bb 100644 --- a/packages/project/src/merge/merge-project.ts +++ b/packages/project/src/merge/merge-project.ts @@ -124,14 +124,6 @@ export function merge( // @ts-ignore mergeWorkflows(sourceWorkflow, targetWorkflow, mappings) ); - - // THis was bad!! Why did I do this? - // // If mode is replace, just swap out the target workflow for the source workflow - // // No mapping needed really - // if (options.mode === REPLACE_MERGE) { - // finalWorkflows.push(cloneDeep(sourceWorkflow)); - // } else { - // } } else { finalWorkflows.push(sourceWorkflow); } diff --git a/packages/project/test/merge/merge-project.test.ts b/packages/project/test/merge/merge-project.test.ts index 27f60e584..667744223 100644 --- a/packages/project/test/merge/merge-project.test.ts +++ b/packages/project/test/merge/merge-project.test.ts @@ -700,14 +700,18 @@ test('options: onlyUpdated with no changed workflows', (t) => { test('options: onlyUpdated with 1 changed, 1 unchanged workflow', (t) => { // If I do this as a replace, and nothing has changed, the target UUIDs should be preserved const source = createProject([ - generateWorkflow('@id a a-b', { history: true }), - generateWorkflow('@id b x-y', { history: true }), + generateWorkflow('@id a a-b', { uuidSeed: 100, history: true }), + generateWorkflow('@id b x-y', { uuidSeed: 200, history: true }), ]); const target = createProject([ - generateWorkflow('@id a a-b', { history: true }), - generateWorkflow('@id b x-y', { history: true }), + generateWorkflow('@id a a-b', { uuidSeed: 100, history: true }), + generateWorkflow('@id b x-y', { uuidSeed: 200, history: true }), ]); + // Scribble on both workflows + target.workflows[0].jam = 'jar'; + target.workflows[1].jam = 'jar'; + // change the source source.workflows[0].steps[0].expression = 'fn()'; @@ -721,16 +725,11 @@ test('options: onlyUpdated with 1 changed, 1 unchanged workflow', (t) => { // step 1 has changed and should match the source t.is(result.workflows[0].steps[0].expression, 'fn()'); - t.is( - result.workflows[0].steps[0].openfn.uuid, - source.workflows[0].steps[0].openfn.uuid - ); + // And our scribble should be lost + t.falsy(result.workflows[0].jam); - // but step 2 did not change and should have the original UUID - t.is( - result.workflows[0].steps[1].openfn.uuid, - target.workflows[0].steps[1].openfn.uuid - ); + // but step 2 did not change and should have our scribble + t.is(result.workflows[1].jam, 'jar'); }); test.todo('options: only changed and 1 workflow'); From 81dc03ddfa7203b3e6f72ecc8207d33049ae83c9 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 2 Feb 2026 17:08:01 +0000 Subject: [PATCH 20/34] update dry run messaging --- packages/cli/src/projects/deploy.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index ab60edb1e..5a1699fd1 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -274,9 +274,9 @@ Pass --force to override this error and deploy anyway.`); const finalOutputPath = getSerializePath(localProject, options.workspace!); const fullFinalPath = await serialize(finalProject, finalOutputPath); logger.debug('Updated local project at ', fullFinalPath); - } - logger.success('Updated project at', config.endpoint); + logger.success('Updated project at', config.endpoint); + } } export const reportDiff = (local: Project, remote: Project, logger: Logger) => { From 5a5e177814791a60a75a010e15ce2f97915e6566 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Fri, 6 Feb 2026 16:29:58 +0000 Subject: [PATCH 21/34] types --- packages/cli/src/projects/util.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index e54795113..fa87547eb 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -161,7 +161,7 @@ export async function deployProject( logger?.error(`Deploy failed with code `, response.status); logger?.error('Failed to deploy project:'); - const contentType = response.headers.get('content-type'); + const contentType = response.headers.get('content-type') ?? ''; if (contentType.match('application/json ')) { const body = await response.json(); From 7867287fad795f2dfd41ee4cf31ab2de19a4bbd1 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Fri, 6 Feb 2026 16:41:01 +0000 Subject: [PATCH 22/34] fix tests --- packages/cli/test/projects/fetch.test.ts | 2 +- packages/cli/test/projects/fixtures.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/test/projects/fetch.test.ts b/packages/cli/test/projects/fetch.test.ts index 55eba0145..a4e280f4a 100644 --- a/packages/cli/test/projects/fetch.test.ts +++ b/packages/cli/test/projects/fetch.test.ts @@ -435,8 +435,8 @@ test.serial( { id: 'webhook', type: 'webhook', + enabled: true, openfn: { - enabled: true, uuid: '4a06289c-15aa-4662-8dc6-f0aaacd8a058', }, next: { diff --git a/packages/cli/test/projects/fixtures.ts b/packages/cli/test/projects/fixtures.ts index 4b77482d0..0350a42bb 100644 --- a/packages/cli/test/projects/fixtures.ts +++ b/packages/cli/test/projects/fixtures.ts @@ -84,8 +84,8 @@ workflows: uuid: 66add020-e6eb-4eec-836b-20008afca816 - id: webhook type: webhook + enabled: true openfn: - enabled: true uuid: 4a06289c-15aa-4662-8dc6-f0aaacd8a058 next: transform-data: From 37a0b75cd2a279ace9e7a8d1448fd9e779fdf58a Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Fri, 6 Feb 2026 16:53:41 +0000 Subject: [PATCH 23/34] another test fix --- packages/cli/test/projects/checkout.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/cli/test/projects/checkout.test.ts b/packages/cli/test/projects/checkout.test.ts index fac0c1e29..8f7f74add 100644 --- a/packages/cli/test/projects/checkout.test.ts +++ b/packages/cli/test/projects/checkout.test.ts @@ -482,6 +482,7 @@ workspace: { id: 'webhook', type: 'webhook', + enabled: true, next: { 'transform-data-to-fhir-standard': { disabled: false, From df9e3d51852c1c7159c1b65fd7e585b3cead2876 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Fri, 6 Feb 2026 16:57:11 +0000 Subject: [PATCH 24/34] integration tests --- integration-tests/cli/test/project-v1.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/integration-tests/cli/test/project-v1.test.ts b/integration-tests/cli/test/project-v1.test.ts index 17f74171e..8518c6435 100644 --- a/integration-tests/cli/test/project-v1.test.ts +++ b/integration-tests/cli/test/project-v1.test.ts @@ -146,6 +146,7 @@ options: {} steps: - id: webhook type: webhook + enabled: true next: transform-data: disabled: false From 4f3830efe0e63288b1d4ece4206500920841101f Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 15:47:05 +0000 Subject: [PATCH 25/34] =?UTF-8?q?alias=20[CLI]=20=E2=9A=A0=20WARNING:=20th?= =?UTF-8?q?e=20project=20deploy=20command=20is=20in=20BETA=20and=20may=20n?= =?UTF-8?q?ot=20be=20stable.=20Use=20cautiously=20on=20production=20projec?= =?UTF-8?q?ts.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [CLI] ✘ Command failed! [CLI] ✘ Error: ENOENT: no such file or directory, open '/home/joe/repo/openfn/kit/openfn.json' at readFileSync (node:fs:441:20) at findWorkspaceFile (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+project@0.12.1/node_modules/@openfn/project/dist/index.js:497:20) at parseProject (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+project@0.12.1/node_modules/@openfn/project/dist/index.js:836:29) at Project.from (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+project@0.12.1/node_modules/@openfn/project/dist/index.js:1398:16) at handler (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:2266:38) at parse (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:3684:18) at process. (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:3711:5) at process.emit (node:events:524:28) at emit (node:internal/child_process:949:14) at process.processTicksAndRejections (node:internal/process/task_queues:91:21) { errno: -2, code: 'ENOENT', syscall: 'open', path: '/home/joe/repo/openfn/kit/openfn.json' } to [Workspace] ⚠ Could not find openfn.yaml at /home/joe/repo/openfn/kit. Using default values. [Workspace] ⚠ No projects found: directory at /home/joe/repo/openfn/kit/.projects does not exist [CLI] ✘ Command failed! [CLI] ✘ Error: No OpenFn projects found at handler5 (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:3338:11) at parse (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:3684:18) at process. (file:///home/joe/.local/share/pnpm/global/5/.pnpm/@openfn+cli@1.25.0/node_modules/@openfn/cli/dist/process/runner.js:3711:5) at process.emit (node:events:524:28) at emit (node:internal/child_process:949:14) at process.processTicksAndRejections (node:internal/process/task_queues:91:21) --- .changeset/rich-feet-bake.md | 5 +++++ packages/cli/src/projects/deploy.ts | 1 + 2 files changed, 6 insertions(+) create mode 100644 .changeset/rich-feet-bake.md diff --git a/.changeset/rich-feet-bake.md b/.changeset/rich-feet-bake.md new file mode 100644 index 000000000..be0cad3a1 --- /dev/null +++ b/.changeset/rich-feet-bake.md @@ -0,0 +1,5 @@ +--- +'@openfn/cli': minor +--- + +Add `push` as an alias for `deploy` diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 5a1699fd1..d463c2bf7 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -53,6 +53,7 @@ const printProjectName = (project: Project) => export const command: yargs.CommandModule = { command: 'deploy', + aliases: 'push', describe: `Deploy the checked out project to a Lightning Instance`, builder: (yargs: yargs.Argv) => build(options, yargs) From cf75b62c4d159ef015da7991840d07eb6ef26413 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 16:20:30 +0000 Subject: [PATCH 26/34] project: lower case workflow names in hash --- packages/cli/src/projects/deploy.ts | 3 + packages/project/src/util/version.ts | 6 +- .../test/util/version-workflow.test.ts | 56 +++++++++++++++++++ 3 files changed, 64 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index d463c2bf7..a0336b6ac 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -81,6 +81,9 @@ export const hasRemoteDiverged = ( if (wf.id in refs) { const forkedVersion = refs[wf.id]; const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); + console.log( + `${wf.id}: forked_from: ${forkedVersion}, remote: ${remoteVersion}` + ); if (forkedVersion !== remoteVersion) { diverged ??= []; diverged.push(wf.id); diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index e36d9da67..cdd182a16 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -56,7 +56,11 @@ export const generateHash = ( wfKeys.forEach((key) => { const value = get(workflow, key); if (isDefined(value)) { - parts.push(serializeValue(value)); + if (key === 'name') { + parts.push(value.toLowerCase()); + } else { + parts.push(serializeValue(value)); + } } }); diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index 4f34e8843..f6a8be547 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -526,3 +526,59 @@ test('works without UUIDs', (t) => { const hash = workflow.getVersionHash({ sha: false }); t.is(hash, 'cli:awebhookfn(s => s)Transform datatruewebhook-Transform data'); }); + +// This is a real issue found against lightning +test('Should be consistent with name casing', async (t) => { + const yaml = `id: sandbox-test +name: sandbox-test +cli: + version: 2 +description: abc +collections: [] +credentials: [] +openfn: + uuid: 91e9906a-28b9-4497-9d5f-22b64a55c8dd + endpoint: http://localhost:4000 + inserted_at: 2025-12-19T15:24:22Z + updated_at: 2026-02-09T15:32:58Z +options: + allow_support_access: false + requires_mfa: false + retention_policy: retain_all +workflows: + - name: X'Y + steps: + - id: a + name: a + expression: . + adaptor: "@openfn/language-common@latest" + openfn: + uuid: e2c857d2-5405-4e7e-8187-293730151172 + - id: webhook + type: webhook + enabled: true + openfn: + uuid: 0e77c576-6c70-49e0-82e3-3473653ccdaa + next: + a: + disabled: false + condition: always + openfn: + uuid: 2c4b0b25-1567-4873-8518-658a22dcbcdd + history: + - app:dc5618fdc911 + openfn: + uuid: 43198acb-41f0-48a0-8d6f-00506fee5b95 + inserted_at: 2026-02-09T16:12:03Z + updated_at: 2026-02-09T16:12:18Z + lock_version: 4 + id: x-y + start: webhook +`; + + const expectedVersion = 'cli:dc5618fdc911'; + + const project = await Project.from('project', yaml, { format: 'yaml' }); + const hash = project.workflows[0].getVersionHash(); + t.is(hash, expectedVersion); +}); From c1fc1b2f502ee605936f0c543ce002eaed803abe Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 16:50:04 +0000 Subject: [PATCH 27/34] project: allow a filter for workflow diffs --- packages/project/src/Project.ts | 4 +- packages/project/src/util/project-diff.ts | 15 ++++- .../project/test/util/project-diff.test.ts | 64 +++++++++++++++++++ .../test/util/version-workflow.test.ts | 56 ---------------- 4 files changed, 80 insertions(+), 59 deletions(-) diff --git a/packages/project/src/Project.ts b/packages/project/src/Project.ts index 34fc40989..72c3ad004 100644 --- a/packages/project/src/Project.ts +++ b/packages/project/src/Project.ts @@ -242,8 +242,8 @@ export class Project { } // Compare this project with another and return a list of workflow changes - diff(project: Project) { - return projectDiff(this, project); + diff(project: Project, workflows: string[] = []) { + return projectDiff(this, project, workflows); } canMergeInto(target: Project) { diff --git a/packages/project/src/util/project-diff.ts b/packages/project/src/util/project-diff.ts index 7e849130a..37ecf60e1 100644 --- a/packages/project/src/util/project-diff.ts +++ b/packages/project/src/util/project-diff.ts @@ -28,11 +28,20 @@ export type WorkflowDiff = { * // Shows how staging has diverged from main * ``` */ -export function diff(a: Project, b: Project): WorkflowDiff[] { +export function diff( + a: Project, + b: Project, + // only consider these workflows + workflows?: string[] +): WorkflowDiff[] { const diffs: WorkflowDiff[] = []; // Check all of project A's workflows for (const workflowA of a.workflows) { + if (workflows?.length && !workflows.includes(workflowA.id)) { + continue; + } + const workflowB = b.getWorkflow(workflowA.id); if (!workflowB) { @@ -46,6 +55,10 @@ export function diff(a: Project, b: Project): WorkflowDiff[] { // Check for workflows that were added in B for (const workflowB of b.workflows) { + if (workflows?.length && !workflows.includes(workflowB.id)) { + continue; + } + if (!a.getWorkflow(workflowB.id)) { // workflow exists in B but not in A = added diffs.push({ id: workflowB.id, type: 'added' }); diff --git a/packages/project/test/util/project-diff.test.ts b/packages/project/test/util/project-diff.test.ts index 82d668113..e15218ad0 100644 --- a/packages/project/test/util/project-diff.test.ts +++ b/packages/project/test/util/project-diff.test.ts @@ -43,6 +43,27 @@ test('diff: should detect changed workflow', (t) => { t.deepEqual(diffs[0], { id: wfA.id, type: 'changed' }); }); +test('diff: should only consider changed workflows from a filter list', (t) => { + const wfA = generateWorkflow('trigger-x'); + const wfB = generateWorkflow('trigger-y'); + // Make sure they have the same id but different content + wfB.id = wfA.id; + + const projectA = new Project({ + name: 'project-a', + workflows: [wfA], + }); + + const projectB = new Project({ + name: 'project-b', + workflows: [wfB], + }); + + const diffs = diff(projectA, projectB, ['xxx']); + + t.is(diffs.length, 0); +}); + test('diff: should detect added workflow', (t) => { const wf1 = generateWorkflow('@id a trigger-x'); const wf2 = generateWorkflow('@id b trigger-y'); @@ -63,6 +84,27 @@ test('diff: should detect added workflow', (t) => { t.deepEqual(diffs[0], { id: wf2.id, type: 'added' }); }); +test('diff: should only consider added workflows from a filter list', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-y'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1, wf2, wf3], + }); + + const diffs = diff(projectA, projectB, ['b']); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'added' }); +}); + test('diff: should detect removed workflow', (t) => { const wf1 = generateWorkflow('@id a trigger-x'); const wf2 = generateWorkflow('@id b trigger-y'); @@ -83,6 +125,28 @@ test('diff: should detect removed workflow', (t) => { t.deepEqual(diffs[0], { id: wf2.id, type: 'removed' }); }); +test('diff: should only consider removed workflows from a filter list', (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + const wf3 = generateWorkflow('@id c trigger-z'); + + const projectA = new Project({ + name: 'a', + workflows: [wf1, wf2, wf3], + }); + + const projectB = new Project({ + name: 'b', + workflows: [wf1], // remove b and c + }); + + // only compare on b + const diffs = diff(projectA, projectB, ['b']); + + t.is(diffs.length, 1); + t.deepEqual(diffs[0], { id: wf2.id, type: 'removed' }); +}); + test('diff: should detect multiple changes at once', (t) => { const wf1 = generateWorkflow('@id a trigger-x'); const wf2 = generateWorkflow('@id b trigger-y'); diff --git a/packages/project/test/util/version-workflow.test.ts b/packages/project/test/util/version-workflow.test.ts index f6a8be547..4f34e8843 100644 --- a/packages/project/test/util/version-workflow.test.ts +++ b/packages/project/test/util/version-workflow.test.ts @@ -526,59 +526,3 @@ test('works without UUIDs', (t) => { const hash = workflow.getVersionHash({ sha: false }); t.is(hash, 'cli:awebhookfn(s => s)Transform datatruewebhook-Transform data'); }); - -// This is a real issue found against lightning -test('Should be consistent with name casing', async (t) => { - const yaml = `id: sandbox-test -name: sandbox-test -cli: - version: 2 -description: abc -collections: [] -credentials: [] -openfn: - uuid: 91e9906a-28b9-4497-9d5f-22b64a55c8dd - endpoint: http://localhost:4000 - inserted_at: 2025-12-19T15:24:22Z - updated_at: 2026-02-09T15:32:58Z -options: - allow_support_access: false - requires_mfa: false - retention_policy: retain_all -workflows: - - name: X'Y - steps: - - id: a - name: a - expression: . - adaptor: "@openfn/language-common@latest" - openfn: - uuid: e2c857d2-5405-4e7e-8187-293730151172 - - id: webhook - type: webhook - enabled: true - openfn: - uuid: 0e77c576-6c70-49e0-82e3-3473653ccdaa - next: - a: - disabled: false - condition: always - openfn: - uuid: 2c4b0b25-1567-4873-8518-658a22dcbcdd - history: - - app:dc5618fdc911 - openfn: - uuid: 43198acb-41f0-48a0-8d6f-00506fee5b95 - inserted_at: 2026-02-09T16:12:03Z - updated_at: 2026-02-09T16:12:18Z - lock_version: 4 - id: x-y - start: webhook -`; - - const expectedVersion = 'cli:dc5618fdc911'; - - const project = await Project.from('project', yaml, { format: 'yaml' }); - const hash = project.workflows[0].getVersionHash(); - t.is(hash, expectedVersion); -}); From 66c31244dd77fef5064ebf41eade54d2a2530a1d Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 17:40:06 +0000 Subject: [PATCH 28/34] revert lowercase --- packages/cli/src/projects/deploy.ts | 80 +++++++++++++-- packages/cli/src/projects/util.ts | 1 + packages/cli/src/projects/version.ts | 7 +- packages/cli/test/projects/deploy.test.ts | 116 +++++++++++++++++++++- packages/project/src/util/version.ts | 6 +- 5 files changed, 194 insertions(+), 16 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index a0336b6ac..422128e02 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -70,7 +70,8 @@ export const command: yargs.CommandModule = { export const hasRemoteDiverged = ( local: Project, - remote: Project + remote: Project, + workflows: string[] ): string[] | null => { let diverged: string[] | null = null; @@ -78,7 +79,7 @@ export const hasRemoteDiverged = ( // for each workflow, check that the local fetched_from is the head of the remote history for (const wf of local.workflows) { - if (wf.id in refs) { + if (workflows.includes(wf.id) && wf.id in refs) { const forkedVersion = refs[wf.id]; const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); console.log( @@ -156,8 +157,19 @@ Pass --force to override this error and deploy anyway.`); return false; } + const locallyChangedWorkflows = await findLocallyChangedWorkflows( + ws, + localProject + ); + console.log({ locallyChangedWorkflows }); + // TODO: what if remote diff and the version checked disagree for some reason? - const diffs = reportDiff(localProject, remoteProject, logger); + const diffs = reportDiff( + localProject, + remoteProject, + locallyChangedWorkflows, + logger + ); if (!diffs.length) { logger.success('Nothing to deploy'); return; @@ -180,7 +192,11 @@ Pass --force to override this error and deploy anyway.`); ); logger.warn('Pushing these changes may overwrite changes made to the app'); } else { - const divergentWorkflows = hasRemoteDiverged(localProject, remoteProject!); + const divergentWorkflows = hasRemoteDiverged( + localProject, + remoteProject!, + locallyChangedWorkflows + ); if (divergentWorkflows) { logger.warn( `The following workflows have diverged: ${divergentWorkflows}` @@ -283,8 +299,17 @@ Pass --force to override this error and deploy anyway.`); } } -export const reportDiff = (local: Project, remote: Project, logger: Logger) => { - const diffs = remote.diff(local); +export const reportDiff = ( + local: Project, + remote: Project, + locallyChangedWorkflows: string[], + logger: Logger +) => { + // TODO something is wrong here! + // this just says the differences between local and remote + // but i want to ignore remote changes and only get a diff for anything + // where the local has changed since forked_from + const diffs = remote.diff(local, locallyChangedWorkflows); if (diffs.length === 0) { logger.info('No workflow changes detected'); return diffs; @@ -324,3 +349,46 @@ export const reportDiff = (local: Project, remote: Project, logger: Logger) => { return diffs; }; ``; + +export const findLocallyChangedWorkflows = async ( + workspace: Workspace, + project: Project +) => { + // Check openfn.yaml for the forked_from versions + const { forked_from } = workspace.activeProject ?? {}; + + // If there are no forked_from references, we have no baseline + // so assume everything has changed + if (!forked_from || Object.keys(forked_from).length === 0) { + return project.workflows.map((w) => w.id); + } + + const changedWorkflows: string[] = []; + + // Check for changed and added workflows + for (const workflow of project.workflows) { + const currentHash = workflow.getVersionHash(); + const forkedHash = forked_from[workflow.id]; + console.log(currentHash, forkedHash); + + if (forkedHash === undefined) { + // Workflow is not in forked_from, so it's been added locally + changedWorkflows.push(workflow.id); + } else if (forkedHash !== currentHash) { + // Workflow exists but hash has changed + changedWorkflows.push(workflow.id); + } + // else: hash matches, no change + } + + // Check for removed workflows + const currentWorkflowIds = new Set(project.workflows.map((w) => w.id)); + for (const workflowId in forked_from) { + if (!currentWorkflowIds.has(workflowId)) { + // Workflow was in forked_from but is no longer in the project + changedWorkflows.push(workflowId); + } + } + + return changedWorkflows; +}; diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index fa87547eb..7f55d8b8b 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -228,6 +228,7 @@ export async function tidyWorkflowDir( } export const updateForkedFrom = (proj: Project) => { + console.log('>> UPDATING FORKED FROM'); proj.cli.forked_from = proj.workflows.reduce((obj: any, wf) => { if (wf.history.length) { obj[wf.id] = wf.history.at(-1); diff --git a/packages/cli/src/projects/version.ts b/packages/cli/src/projects/version.ts index dfe0c90bf..9184e4fc8 100644 --- a/packages/cli/src/projects/version.ts +++ b/packages/cli/src/projects/version.ts @@ -39,10 +39,13 @@ export const handler = async (options: VersionOptions, logger: Logger) => { logger.error(`No workflow found with id ${options.workflow}`); return; } - output.set(workflow.name || workflow.id, workflow.getVersionHash()); + output.set( + workflow.name || workflow.id, + workflow.getVersionHash({ sha: false }) + ); } else { for (const wf of activeProject?.workflows || []) { - output.set(wf.name || wf.id, wf.getVersionHash()); + output.set(wf.name || wf.id, wf.getVersionHash({ sha: false })); } } if (!output.size) { diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts index 47ddf0d45..0e442cfd8 100644 --- a/packages/cli/test/projects/deploy.test.ts +++ b/packages/cli/test/projects/deploy.test.ts @@ -4,14 +4,13 @@ import mock from 'mock-fs'; import path from 'node:path'; import Project, { generateWorkflow } from '@openfn/project'; import { createMockLogger } from '@openfn/logger'; -import createLightningServer, { - DEFAULT_PROJECT_ID, -} from '@openfn/lightning-mock'; +import createLightningServer from '@openfn/lightning-mock'; import { handler as deployHandler, hasRemoteDiverged, reportDiff, + findLocallyChangedWorkflows, } from '../../src/projects/deploy'; import { myProject_yaml, myProject_v1 } from './fixtures'; import { checkout } from '../../src/projects'; @@ -329,3 +328,114 @@ test('hasRemoteDiverged: 1 workflow, 1 diverged', (t) => { const diverged = hasRemoteDiverged(local, remote); t.deepEqual(diverged, ['w']); }); + +test('findLocallyChangedWorkflows: no changed workflows', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const hash1 = wf1.getVersionHash(); + const hash2 = wf2.getVersionHash(); + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + // Create a mock workspace with forked_from that matches current hashes + const workspace = { + activeProject: { + forked_from: { + a: hash1, + b: hash2, + }, + }, + } as any; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, []); +}); + +test('findLocallyChangedWorkflows: all workflows changed if there is no forked_from', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + // Create a mock workspace with NO forked_from + const workspace = { + activeProject: {}, + } as any; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['a', 'b']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally changed workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-z'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + b: wf2.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + project.workflows[0].name = 'changed'; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['a']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally added workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['b']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally removed workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + b: wf2.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1], + }); + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['b']); +}); diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index cdd182a16..e36d9da67 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -56,11 +56,7 @@ export const generateHash = ( wfKeys.forEach((key) => { const value = get(workflow, key); if (isDefined(value)) { - if (key === 'name') { - parts.push(value.toLowerCase()); - } else { - parts.push(serializeValue(value)); - } + parts.push(serializeValue(value)); } }); From 9a5596d88882b59ae7ebfb7709530062f749050f Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 17:46:03 +0000 Subject: [PATCH 29/34] revert lowercase --- packages/cli/src/projects/deploy.ts | 3 --- packages/cli/src/projects/version.ts | 7 ++----- packages/project/src/index.ts | 4 ++++ packages/project/src/util/version.ts | 16 ++++++++++++++-- 4 files changed, 20 insertions(+), 10 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 422128e02..12e153dde 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -82,9 +82,6 @@ export const hasRemoteDiverged = ( if (workflows.includes(wf.id) && wf.id in refs) { const forkedVersion = refs[wf.id]; const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); - console.log( - `${wf.id}: forked_from: ${forkedVersion}, remote: ${remoteVersion}` - ); if (forkedVersion !== remoteVersion) { diverged ??= []; diverged.push(wf.id); diff --git a/packages/cli/src/projects/version.ts b/packages/cli/src/projects/version.ts index 9184e4fc8..f78a2f696 100644 --- a/packages/cli/src/projects/version.ts +++ b/packages/cli/src/projects/version.ts @@ -39,13 +39,10 @@ export const handler = async (options: VersionOptions, logger: Logger) => { logger.error(`No workflow found with id ${options.workflow}`); return; } - output.set( - workflow.name || workflow.id, - workflow.getVersionHash({ sha: false }) - ); + output.set(workflow.name || workflow.id, workflow.getVersionHash({})); } else { for (const wf of activeProject?.workflows || []) { - output.set(wf.name || wf.id, wf.getVersionHash({ sha: false })); + output.set(wf.name || wf.id, wf.getVersionHash({})); } } if (!output.size) { diff --git a/packages/project/src/index.ts b/packages/project/src/index.ts index 5ccb4b034..b39da758a 100644 --- a/packages/project/src/index.ts +++ b/packages/project/src/index.ts @@ -10,3 +10,7 @@ export { generateWorkflow, generateProject } from './gen/generator'; export { diff } from './util/project-diff'; export type { WorkflowDiff, DiffType } from './util/project-diff'; +export { + generateHash as generateVersionHash, + match as versionsEqual, +} from './util/version'; diff --git a/packages/project/src/util/version.ts b/packages/project/src/util/version.ts index e36d9da67..df42b8bb1 100644 --- a/packages/project/src/util/version.ts +++ b/packages/project/src/util/version.ts @@ -10,8 +10,20 @@ function isDefined(v: any) { } export const parse = (version: string) => { - const [source, hash] = version.split(':'); - return { source, hash }; + if (version.match(':')) { + const [source, hash] = version.split(':'); + return { source, hash }; + } + return { hash: version }; +}; + +/** + * + * Compare two version hashes + * Ignores the source specifier (if present) + */ +export const match = (a: string, b: string) => { + return parse(a).hash === parse(b).hash; }; export type HashOptions = { From 6c4e78967fdcaa5495a54a23d56937a14de24a9a Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Mon, 9 Feb 2026 17:55:55 +0000 Subject: [PATCH 30/34] smarter traacking of diffs and divergence Tests are likely to break but the logic is about there --- packages/cli/src/projects/deploy.ts | 18 ++++++++---------- packages/cli/src/projects/util.ts | 1 - 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index 12e153dde..f680f3628 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -1,5 +1,5 @@ import yargs from 'yargs'; -import Project, { Workspace } from '@openfn/project'; +import Project, { versionsEqual, Workspace } from '@openfn/project'; import c from 'chalk'; import { writeFile } from 'node:fs/promises'; import path from 'node:path'; @@ -70,8 +70,7 @@ export const command: yargs.CommandModule = { export const hasRemoteDiverged = ( local: Project, - remote: Project, - workflows: string[] + remote: Project ): string[] | null => { let diverged: string[] | null = null; @@ -79,10 +78,10 @@ export const hasRemoteDiverged = ( // for each workflow, check that the local fetched_from is the head of the remote history for (const wf of local.workflows) { - if (workflows.includes(wf.id) && wf.id in refs) { + if (wf.id in refs) { const forkedVersion = refs[wf.id]; const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); - if (forkedVersion !== remoteVersion) { + if (!versionsEqual(forkedVersion, remoteVersion!)) { diverged ??= []; diverged.push(wf.id); } @@ -158,7 +157,6 @@ Pass --force to override this error and deploy anyway.`); ws, localProject ); - console.log({ locallyChangedWorkflows }); // TODO: what if remote diff and the version checked disagree for some reason? const diffs = reportDiff( @@ -189,10 +187,11 @@ Pass --force to override this error and deploy anyway.`); ); logger.warn('Pushing these changes may overwrite changes made to the app'); } else { + console.log({ locallyChangedWorkflows }); const divergentWorkflows = hasRemoteDiverged( localProject, - remoteProject!, - locallyChangedWorkflows + remoteProject! + // locallyChangedWorkflows ); if (divergentWorkflows) { logger.warn( @@ -366,12 +365,11 @@ export const findLocallyChangedWorkflows = async ( for (const workflow of project.workflows) { const currentHash = workflow.getVersionHash(); const forkedHash = forked_from[workflow.id]; - console.log(currentHash, forkedHash); if (forkedHash === undefined) { // Workflow is not in forked_from, so it's been added locally changedWorkflows.push(workflow.id); - } else if (forkedHash !== currentHash) { + } else if (!versionsEqual(currentHash, forkedHash)) { // Workflow exists but hash has changed changedWorkflows.push(workflow.id); } diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index 7f55d8b8b..fa87547eb 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -228,7 +228,6 @@ export async function tidyWorkflowDir( } export const updateForkedFrom = (proj: Project) => { - console.log('>> UPDATING FORKED FROM'); proj.cli.forked_from = proj.workflows.reduce((obj: any, wf) => { if (wf.history.length) { obj[wf.id] = wf.history.at(-1); From 0ab8392993e6a73af22d2c9f961a145f693a7607 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Tue, 10 Feb 2026 12:17:03 +0000 Subject: [PATCH 31/34] warn when checkout may result in lost work --- packages/cli/src/projects/checkout.ts | 35 ++++++- packages/cli/src/projects/deploy.ts | 43 +------- packages/cli/src/projects/util.ts | 43 ++++++++ packages/cli/test/projects/deploy.test.ts | 122 +--------------------- packages/cli/test/projects/util.test.ts | 116 +++++++++++++++++++- 5 files changed, 196 insertions(+), 163 deletions(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 1bb2886db..65ed41776 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -10,14 +10,18 @@ import * as o from '../options'; import * as po from './options'; import type { Opts } from './options'; -import { tidyWorkflowDir, updateForkedFrom } from './util'; +import { + findLocallyChangedWorkflows, + tidyWorkflowDir, + updateForkedFrom, +} from './util'; export type CheckoutOptions = Pick< Opts, - 'command' | 'project' | 'workspace' | 'log' | 'clean' + 'command' | 'project' | 'workspace' | 'log' | 'clean' | 'force' >; -const options = [o.log, po.workspace, po.clean]; +const options = [o.log, po.workspace, po.clean, o.force]; const command: yargs.CommandModule = { command: 'checkout ', @@ -62,6 +66,31 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { ); } + // get the current state of the checked out project + const localProject = await Project.from('fs', { + root: options.workspace || '.', + }); + logger.success(`Loaded local project ${localProject.alias}`); + const changed = await findLocallyChangedWorkflows(workspace, localProject); + if (changed.length && !options.force) { + logger.break(); + logger.warn( + 'WARNING: detected changes on your currently checked-out project' + ); + logger.warn( + `Changes may be lost by checking out ${localProject.alias} right now` + ); + logger.warn(`Pass --force or -f to override this warning and continue`); + // TODO log to run with force + // TODO need to implement a save function + const e = new Error( + `The currently checked out project has diverged! Changes may be lost` + ); + delete e.stack; + throw e; + } + // Check whether the checked out project has diverged from its forked from versions + // delete workflow dir before expanding project if (options.clean) { await rimraf(workspace.workflowsPath); diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index f680f3628..d21dd589b 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -13,6 +13,7 @@ import { serialize, getSerializePath, updateForkedFrom, + findLocallyChangedWorkflows, } from './util'; import { build, ensure } from '../util/command-builders'; @@ -345,45 +346,3 @@ export const reportDiff = ( return diffs; }; ``; - -export const findLocallyChangedWorkflows = async ( - workspace: Workspace, - project: Project -) => { - // Check openfn.yaml for the forked_from versions - const { forked_from } = workspace.activeProject ?? {}; - - // If there are no forked_from references, we have no baseline - // so assume everything has changed - if (!forked_from || Object.keys(forked_from).length === 0) { - return project.workflows.map((w) => w.id); - } - - const changedWorkflows: string[] = []; - - // Check for changed and added workflows - for (const workflow of project.workflows) { - const currentHash = workflow.getVersionHash(); - const forkedHash = forked_from[workflow.id]; - - if (forkedHash === undefined) { - // Workflow is not in forked_from, so it's been added locally - changedWorkflows.push(workflow.id); - } else if (!versionsEqual(currentHash, forkedHash)) { - // Workflow exists but hash has changed - changedWorkflows.push(workflow.id); - } - // else: hash matches, no change - } - - // Check for removed workflows - const currentWorkflowIds = new Set(project.workflows.map((w) => w.id)); - for (const workflowId in forked_from) { - if (!currentWorkflowIds.has(workflowId)) { - // Workflow was in forked_from but is no longer in the project - changedWorkflows.push(workflowId); - } - } - - return changedWorkflows; -}; diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index fa87547eb..a09996607 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -8,6 +8,7 @@ import type Project from '@openfn/project'; import { CLIError } from '../errors'; import resolvePath from '../util/resolve-path'; import { rimraf } from 'rimraf'; +import { versionsEqual, Workspace } from '@openfn/project'; type AuthOptions = Pick; @@ -237,3 +238,45 @@ export const updateForkedFrom = (proj: Project) => { return proj; }; + +export const findLocallyChangedWorkflows = async ( + workspace: Workspace, + project: Project +) => { + // Check openfn.yaml for the forked_from versions + const { forked_from } = workspace.activeProject ?? {}; + + // If there are no forked_from references, we have no baseline + // so assume everything has changed + if (!forked_from || Object.keys(forked_from).length === 0) { + return project.workflows.map((w) => w.id); + } + + const changedWorkflows: string[] = []; + + // Check for changed and added workflows + for (const workflow of project.workflows) { + const currentHash = workflow.getVersionHash(); + const forkedHash = forked_from[workflow.id]; + + if (forkedHash === undefined) { + // Workflow is not in forked_from, so it's been added locally + changedWorkflows.push(workflow.id); + } else if (!versionsEqual(currentHash, forkedHash)) { + // Workflow exists but hash has changed + changedWorkflows.push(workflow.id); + } + // else: hash matches, no change + } + + // Check for removed workflows + const currentWorkflowIds = new Set(project.workflows.map((w) => w.id)); + for (const workflowId in forked_from) { + if (!currentWorkflowIds.has(workflowId)) { + // Workflow was in forked_from but is no longer in the project + changedWorkflows.push(workflowId); + } + } + + return changedWorkflows; +}; diff --git a/packages/cli/test/projects/deploy.test.ts b/packages/cli/test/projects/deploy.test.ts index 0e442cfd8..953f7a4be 100644 --- a/packages/cli/test/projects/deploy.test.ts +++ b/packages/cli/test/projects/deploy.test.ts @@ -10,7 +10,6 @@ import { handler as deployHandler, hasRemoteDiverged, reportDiff, - findLocallyChangedWorkflows, } from '../../src/projects/deploy'; import { myProject_yaml, myProject_v1 } from './fixtures'; import { checkout } from '../../src/projects'; @@ -69,7 +68,7 @@ test('reportDiff: should report no changes for identical projects', (t) => { workflows: [wf], }); - const diffs = reportDiff(local, remote, logger); + const diffs = reportDiff(local, remote, [], logger); t.is(diffs.length, 0); const { message, level } = logger._parse(logger._last); @@ -91,7 +90,7 @@ test('reportDiff: should report changed workflow', (t) => { workflows: [wfRemote], }); - const diffs = reportDiff(local, remote, logger); + const diffs = reportDiff(local, remote, [], logger); t.is(diffs.length, 1); t.deepEqual(diffs[0], { id: 'a', type: 'changed' }); @@ -113,7 +112,7 @@ test('reportDiff: should report added workflow', (t) => { workflows: [wf1], }); - const diffs = reportDiff(local, remote, logger); + const diffs = reportDiff(local, remote, [], logger); t.is(diffs.length, 1); t.deepEqual(diffs[0], { id: 'b', type: 'added' }); @@ -135,7 +134,7 @@ test('reportDiff: should report removed workflow', (t) => { workflows: [wf1, wf2], }); - const diffs = reportDiff(local, remote, logger); + const diffs = reportDiff(local, remote, [], logger); t.is(diffs.length, 1); t.deepEqual(diffs[0], { id: 'b', type: 'removed' }); @@ -160,7 +159,7 @@ test('reportDiff: should report mix of added, changed, and removed workflows', ( workflows: [wf1, wf2Remote, wf3], // has a, b, c }); - const diffs = reportDiff(local, remote, logger); + const diffs = reportDiff(local, remote, [], logger); t.is(diffs.length, 3); t.deepEqual( @@ -328,114 +327,3 @@ test('hasRemoteDiverged: 1 workflow, 1 diverged', (t) => { const diverged = hasRemoteDiverged(local, remote); t.deepEqual(diverged, ['w']); }); - -test('findLocallyChangedWorkflows: no changed workflows', async (t) => { - const wf1 = generateWorkflow('@id a trigger-x'); - const wf2 = generateWorkflow('@id b trigger-y'); - - const hash1 = wf1.getVersionHash(); - const hash2 = wf2.getVersionHash(); - - const project = new Project({ - name: 'test', - workflows: [wf1, wf2], - }); - - // Create a mock workspace with forked_from that matches current hashes - const workspace = { - activeProject: { - forked_from: { - a: hash1, - b: hash2, - }, - }, - } as any; - - const changed = await findLocallyChangedWorkflows(workspace, project); - t.deepEqual(changed, []); -}); - -test('findLocallyChangedWorkflows: all workflows changed if there is no forked_from', async (t) => { - const wf1 = generateWorkflow('@id a trigger-x'); - const wf2 = generateWorkflow('@id b trigger-y'); - - const project = new Project({ - name: 'test', - workflows: [wf1, wf2], - }); - - // Create a mock workspace with NO forked_from - const workspace = { - activeProject: {}, - } as any; - - const changed = await findLocallyChangedWorkflows(workspace, project); - t.deepEqual(changed, ['a', 'b']); -}); - -test('findLocallyChangedWorkflows: detect 1 locally changed workflow', async (t) => { - const wf1 = generateWorkflow('@id a trigger-x'); - const wf2 = generateWorkflow('@id b trigger-z'); - - const workspace = { - activeProject: { - forked_from: { - a: wf1.getVersionHash(), - b: wf2.getVersionHash(), - }, - }, - } as any; - - const project = new Project({ - name: 'test', - workflows: [wf1, wf2], - }); - - project.workflows[0].name = 'changed'; - - const changed = await findLocallyChangedWorkflows(workspace, project); - t.deepEqual(changed, ['a']); -}); - -test('findLocallyChangedWorkflows: detect 1 locally added workflow', async (t) => { - const wf1 = generateWorkflow('@id a trigger-x'); - const wf2 = generateWorkflow('@id b trigger-y'); - - const workspace = { - activeProject: { - forked_from: { - a: wf1.getVersionHash(), - }, - }, - } as any; - - const project = new Project({ - name: 'test', - workflows: [wf1, wf2], - }); - - const changed = await findLocallyChangedWorkflows(workspace, project); - t.deepEqual(changed, ['b']); -}); - -test('findLocallyChangedWorkflows: detect 1 locally removed workflow', async (t) => { - const wf1 = generateWorkflow('@id a trigger-x'); - const wf2 = generateWorkflow('@id b trigger-y'); - - const workspace = { - activeProject: { - forked_from: { - a: wf1.getVersionHash(), - b: wf2.getVersionHash(), - }, - }, - } as any; - - const project = new Project({ - name: 'test', - workflows: [wf1], - }); - - const changed = await findLocallyChangedWorkflows(workspace, project); - t.deepEqual(changed, ['b']); -}); diff --git a/packages/cli/test/projects/util.test.ts b/packages/cli/test/projects/util.test.ts index 681a238f4..b4486ee17 100644 --- a/packages/cli/test/projects/util.test.ts +++ b/packages/cli/test/projects/util.test.ts @@ -1,6 +1,9 @@ import test from 'ava'; import Project, { generateWorkflow } from '@openfn/project'; -import { tidyWorkflowDir } from '../../src/projects/util'; +import { + findLocallyChangedWorkflows, + tidyWorkflowDir, +} from '../../src/projects/util'; test('tidyWorkflowDir: removes workflows that no longer exist', async (t) => { const currentProject = new Project({ @@ -157,3 +160,114 @@ test('tidyWorkflowDir: removes expression files when workflow steps change', asy t.deepEqual(toRemove, ['workflows/A/x.js']); }); + +test('findLocallyChangedWorkflows: no changed workflows', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const hash1 = wf1.getVersionHash(); + const hash2 = wf2.getVersionHash(); + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + // Create a mock workspace with forked_from that matches current hashes + const workspace = { + activeProject: { + forked_from: { + a: hash1, + b: hash2, + }, + }, + } as any; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, []); +}); + +test('findLocallyChangedWorkflows: all workflows changed if there is no forked_from', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + // Create a mock workspace with NO forked_from + const workspace = { + activeProject: {}, + } as any; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['a', 'b']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally changed workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-z'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + b: wf2.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + project.workflows[0].name = 'changed'; + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['a']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally added workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1, wf2], + }); + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['b']); +}); + +test('findLocallyChangedWorkflows: detect 1 locally removed workflow', async (t) => { + const wf1 = generateWorkflow('@id a trigger-x'); + const wf2 = generateWorkflow('@id b trigger-y'); + + const workspace = { + activeProject: { + forked_from: { + a: wf1.getVersionHash(), + b: wf2.getVersionHash(), + }, + }, + } as any; + + const project = new Project({ + name: 'test', + workflows: [wf1], + }); + + const changed = await findLocallyChangedWorkflows(workspace, project); + t.deepEqual(changed, ['b']); +}); From 1104b90370aa0d944fbdddce9d84735b30047c87 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Tue, 10 Feb 2026 13:14:10 +0000 Subject: [PATCH 32/34] fix test --- packages/cli/src/projects/checkout.ts | 6 +++++- packages/cli/src/projects/util.ts | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 65ed41776..55ab722e7 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -71,7 +71,11 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { root: options.workspace || '.', }); logger.success(`Loaded local project ${localProject.alias}`); - const changed = await findLocallyChangedWorkflows(workspace, localProject); + const changed = await findLocallyChangedWorkflows( + workspace, + localProject, + 'assume-ok' + ); if (changed.length && !options.force) { logger.break(); logger.warn( diff --git a/packages/cli/src/projects/util.ts b/packages/cli/src/projects/util.ts index a09996607..f86bfac53 100644 --- a/packages/cli/src/projects/util.ts +++ b/packages/cli/src/projects/util.ts @@ -241,7 +241,8 @@ export const updateForkedFrom = (proj: Project) => { export const findLocallyChangedWorkflows = async ( workspace: Workspace, - project: Project + project: Project, + ifNoForkedFrom: 'assume-ok' | 'assume-diverged' = 'assume-diverged' ) => { // Check openfn.yaml for the forked_from versions const { forked_from } = workspace.activeProject ?? {}; @@ -249,6 +250,9 @@ export const findLocallyChangedWorkflows = async ( // If there are no forked_from references, we have no baseline // so assume everything has changed if (!forked_from || Object.keys(forked_from).length === 0) { + if (ifNoForkedFrom === 'assume-ok') { + return []; + } return project.workflows.map((w) => w.id); } From d7ec251f972d11afa972a0b692fe05619ff07975 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Tue, 10 Feb 2026 13:23:22 +0000 Subject: [PATCH 33/34] little fix to checkout for uninitialised repos --- packages/cli/src/projects/checkout.ts | 56 +++++++++++++++------------ 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/packages/cli/src/projects/checkout.ts b/packages/cli/src/projects/checkout.ts index 55ab722e7..3e7b624aa 100644 --- a/packages/cli/src/projects/checkout.ts +++ b/packages/cli/src/projects/checkout.ts @@ -67,31 +67,39 @@ export const handler = async (options: CheckoutOptions, logger: Logger) => { } // get the current state of the checked out project - const localProject = await Project.from('fs', { - root: options.workspace || '.', - }); - logger.success(`Loaded local project ${localProject.alias}`); - const changed = await findLocallyChangedWorkflows( - workspace, - localProject, - 'assume-ok' - ); - if (changed.length && !options.force) { - logger.break(); - logger.warn( - 'WARNING: detected changes on your currently checked-out project' + try { + const localProject = await Project.from('fs', { + root: options.workspace || '.', + }); + logger.success(`Loaded local project ${localProject.alias}`); + const changed = await findLocallyChangedWorkflows( + workspace, + localProject, + 'assume-ok' ); - logger.warn( - `Changes may be lost by checking out ${localProject.alias} right now` - ); - logger.warn(`Pass --force or -f to override this warning and continue`); - // TODO log to run with force - // TODO need to implement a save function - const e = new Error( - `The currently checked out project has diverged! Changes may be lost` - ); - delete e.stack; - throw e; + if (changed.length && !options.force) { + logger.break(); + logger.warn( + 'WARNING: detected changes on your currently checked-out project' + ); + logger.warn( + `Changes may be lost by checking out ${localProject.alias} right now` + ); + logger.warn(`Pass --force or -f to override this warning and continue`); + // TODO log to run with force + // TODO need to implement a save function + const e = new Error( + `The currently checked out project has diverged! Changes may be lost` + ); + delete e.stack; + throw e; + } + } catch (e: any) { + if (e.message.match('ENOENT')) { + logger.debug('No openfn.yaml found locally: skipping divergence test'); + } else { + throw e; + } } // Check whether the checked out project has diverged from its forked from versions From 7442d3fff71da948b04187178ed569a1d2105304 Mon Sep 17 00:00:00 2001 From: Joe Clark Date: Tue, 10 Feb 2026 13:40:30 +0000 Subject: [PATCH 34/34] fixes --- packages/cli/src/projects/deploy.ts | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/packages/cli/src/projects/deploy.ts b/packages/cli/src/projects/deploy.ts index d21dd589b..2e2f6b8ff 100644 --- a/packages/cli/src/projects/deploy.ts +++ b/packages/cli/src/projects/deploy.ts @@ -71,7 +71,8 @@ export const command: yargs.CommandModule = { export const hasRemoteDiverged = ( local: Project, - remote: Project + remote: Project, + workflows: string[] // this was problematic for some reason ): string[] | null => { let diverged: string[] | null = null; @@ -79,7 +80,7 @@ export const hasRemoteDiverged = ( // for each workflow, check that the local fetched_from is the head of the remote history for (const wf of local.workflows) { - if (wf.id in refs) { + if (workflows.includes(wf.id) && wf.id in refs) { const forkedVersion = refs[wf.id]; const remoteVersion = remote.getWorkflow(wf.id)?.history.at(-1); if (!versionsEqual(forkedVersion, remoteVersion!)) { @@ -176,11 +177,9 @@ Pass --force to override this error and deploy anyway.`); // Skip divergence testing if the remote has no history in its workflows // (this will only happen on older versions of lightning) // TODO now maybe skip if there's no forked_from - const skipVersionTest = - // localProject.workflows.find((wf) => wf.history.length === 0) || - remoteProject.workflows.find((wf) => wf.history.length === 0); - - // localProject.workflows.forEach((w) => console.log(w.history)); + const skipVersionTest = remoteProject.workflows.find( + (wf) => wf.history.length === 0 + ); if (skipVersionTest) { logger.warn( @@ -188,11 +187,10 @@ Pass --force to override this error and deploy anyway.`); ); logger.warn('Pushing these changes may overwrite changes made to the app'); } else { - console.log({ locallyChangedWorkflows }); const divergentWorkflows = hasRemoteDiverged( localProject, - remoteProject! - // locallyChangedWorkflows + remoteProject!, + locallyChangedWorkflows ); if (divergentWorkflows) { logger.warn( @@ -207,7 +205,7 @@ Pass --force to override this error and deploy anyway.`); return; } else { logger.warn( - 'Remote project has not diverged from local project! Pushing anyway as -f passed' + 'Remote project has diverged from local project! Pushing anyway as -f passed' ); } } else {