diff --git a/.circleci/config.yml b/.circleci/config.yml index e3fa7bc1ad..fe81dd272c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,6 +3,31 @@ version: 2.1 orbs: codecov: codecov/codecov@1.0.5 +# Shared configuration +defaults: + elixir_version: &elixir_version "1.18.3-otp-27" + nodejs_version: &nodejs_version "22.12.0" + docker_elixir: &docker_elixir + - image: elixir:1.18.3-otp-27 + docker_elixir_postgres: &docker_elixir_postgres + - image: elixir:1.18.3-otp-27 + - image: cimg/postgres:17.3 + environment: &environment + ERL_FLAGS: +S 4:4 + ASSERT_RECEIVE_TIMEOUT: 1000 + MIX_ENV: test + +executors: + elixir: + docker: *docker_elixir + environment: *environment + working_directory: /home/lightning/project + + elixir_with_postgres: + docker: *docker_elixir_postgres + environment: *environment + working_directory: /home/lightning/project + commands: install_node: description: "Install NodeJS from NodeSource" @@ -17,121 +42,210 @@ commands: curl -fsSL https://deb.nodesource.com/setup_${NODE_MAJOR}.x | bash - apt-get install -y nodejs=<< parameters.version >>-1nodesource1 -jobs: - build: - parameters: - elixir_version: - description: Elixir version - type: string - default: "1.18.3-otp-27" - nodejs_version: - description: NodeJS version - type: string - default: "22.12.0" - execute: - description: What steps to execute after build - type: steps - - parallelism: 1 - docker: - - image: elixir:<< parameters.elixir_version >> - - image: cimg/postgres:17.3 - environment: - ERL_FLAGS: +S 4:4 - ASSERT_RECEIVE_TIMEOUT: 1000 - MIX_ENV: test - working_directory: /home/lightning/project + setup_lightning_user: + description: "Create lightning user and configure sudo" + steps: + - run: + name: "Create lightning user" + command: adduser --home /home/lightning --system lightning + - run: + name: "Install sudo and configure environment passthrough" + command: | + apt-get update && apt-get install -y sudo + echo 'Defaults env_keep += "ERL_FLAGS ASSERT_RECEIVE_TIMEOUT MIX_ENV"' | \ + sudo EDITOR='tee -a' visudo + - run: + name: "Configure git safe directory" + command: git config --global --add safe.directory /home/lightning/project + attach_built_workspace: + description: "Attach workspace and restore ownership" steps: - - run: adduser --home /home/lightning --system lightning + - attach_workspace: + at: /home/lightning/project + - run: + name: "Restore ownership after workspace attach" + command: chown -R lightning /home/lightning + - run: + name: "Install Hex and Rebar" + command: sudo -u lightning mix local.hex --force && sudo -u lightning mix local.rebar --force + +jobs: + # ============================================================================ + # COMPILE JOB - Builds everything once, persists to workspace + # ============================================================================ + compile: + executor: elixir + steps: + - setup_lightning_user - checkout + - run: + name: "Set ownership" + command: chown -R lightning /home/lightning - install_node: - version: << parameters.nodejs_version >> + version: *nodejs_version - run: - name: "Save Elixir and Erlang version for PLT caching" + name: "Save Elixir and Erlang version for caching" command: echo "$ELIXIR_VERSION $OTP_VERSION" | tee .elixir_otp_version - run: name: "Introspect schedulers" command: elixir -v + # Restore dependency cache - restore_cache: keys: - - v5-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} - - v5-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }} + - 2026-02-05-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} + - 2026-02-05-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }} + + - run: + name: "Fix ownership after cache restore" + command: chown -R lightning /home/lightning + - run: - name: "Install libsodium and sudo" + name: "Install build dependencies" command: | apt-get update && apt-get install -y \ build-essential \ cmake \ - libsodium-dev \ - sudo - - run: | - echo 'Defaults env_keep += "ERL_FLAGS ASSERT_RECEIVE_TIMEOUT MIX_ENV"' | \ - sudo EDITOR='tee -a' visudo - - - run: chown -R lightning /home/lightning - - run: sudo -u lightning mix local.hex --force && mix local.rebar --force - - run: cd assets; sudo -u lightning npm install --force - - run: sudo -u lightning mix do deps.get --only test, deps.compile, compile - - run: sudo -u lightning mix lightning.install_runtime + libsodium-dev + + - run: + name: "Install Hex and Rebar" + command: sudo -u lightning mix local.hex --force && sudo -u lightning mix local.rebar --force + + - run: + name: "Install Node dependencies" + command: cd assets && sudo -u lightning npm install --force + + - run: + name: "Compile Elixir dependencies and application" + command: sudo -u lightning mix do deps.get --only test, deps.compile, compile + + - run: + name: "Install runtime dependencies" + command: sudo -u lightning mix lightning.install_runtime - save_cache: - key: v5-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} + key: 2026-02-05-deps-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} paths: - _build - deps - - ~/.mix + - /home/lightning/.mix + # Restore and build PLT for Dialyzer - restore_cache: name: "Restore PLT cache" keys: - - v5-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} - - v5-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }} + - 2026-02-05-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} + - 2026-02-05-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }} + + - run: + name: "Ensure PLT directory exists" + command: mkdir -p priv/plts && chown -R lightning priv/plts + + - run: + name: "Build Dialyzer PLT" + command: sudo -u lightning env MIX_ENV=test mix dialyzer --plt - - run: mkdir -p priv/plts && chown -R lightning priv/plts - - run: sudo -u lightning env MIX_ENV=test mix dialyzer --plt - save_cache: - key: v5-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} + key: 2026-02-05-plt-{{ arch }}-{{ checksum ".elixir_otp_version" }}-{{ checksum "mix.lock" }} paths: - priv/plts - - steps: << parameters.execute >> + # Persist everything to workspace for downstream jobs + - persist_to_workspace: + root: /home/lightning/project + paths: + - . + + # ============================================================================ + # CHECK JOBS - Fast, parallel jobs that use the pre-built workspace + # ============================================================================ + lint: + executor: elixir + steps: + - setup_lightning_user + - attach_built_workspace + - run: + name: "Check code formatting" + command: | + sudo -u lightning mix format --check-formatted || touch /tmp/lint_failed + - run: + name: "Check code style with Credo" + when: always + command: | + sudo -u lightning mix credo --strict --all || touch /tmp/lint_failed + - run: + name: "Check for security vulnerabilities" + when: always + command: | + sudo -u lightning mix sobelow --threshold medium || touch /tmp/lint_failed + - run: + name: "Verify all checks passed" + when: always + command: | + if [ -f /tmp/lint_failed ]; then + echo "One or more lint checks failed" + exit 1 + fi + + check_dialyzer: + executor: elixir + steps: + - setup_lightning_user + - attach_built_workspace + - run: + name: "Run Dialyzer type checking" + command: sudo -u lightning mix dialyzer + + test_elixir: + executor: elixir_with_postgres + steps: + - setup_lightning_user + - attach_built_workspace + - install_node: + version: *nodejs_version + - run: + name: "Install libsodium" + command: apt-get update && apt-get install -y libsodium-dev + - run: + name: "Setup test database" + command: sudo -u lightning mix do ecto.create, ecto.migrate + - run: + name: "Run Elixir tests" + command: sudo -u lightning ./bin/ci_tests + - codecov/upload: + file: test/reports/coverage.json + - store_test_results: + path: test/reports/ + + test_javascript: + executor: elixir + steps: + - setup_lightning_user + - attach_built_workspace + - install_node: + version: *nodejs_version + - run: + name: "Run JavaScript tests" + command: cd assets && sudo -u lightning npm run test-report + - store_test_results: + path: test/reports/ workflows: pre-flight checks: jobs: - - build: - name: "Check code formatting" - execute: - - run: sudo -u lightning mix format --check-formatted - - build: - name: "Check code style" - execute: - - run: sudo -u lightning mix credo --strict --all - - build: - name: "Type check" - execute: - - run: sudo -u lightning mix dialyzer - - build: - name: "Check for security vulnerabilities" - execute: - - run: sudo -u lightning mix sobelow --threshold medium - - build: - name: "Check Elixir tests (codecov)" - execute: - - run: sudo -u lightning mix do ecto.create, ecto.migrate - - run: - command: sudo -u lightning ./bin/ci_tests - - codecov/upload: - file: test/reports/coverage.json - - store_test_results: - path: test/reports/ - - build: - name: "Check Javascript tests" - execute: - - run: cd assets; sudo -u lightning npm install && npm run test-report - - store_test_results: - path: test/reports/ + # First: compile everything once + - compile + + # Then: run all checks in parallel using the compiled workspace + - lint: + requires: [compile] + - check_dialyzer: + requires: [compile] + - test_elixir: + requires: [compile] + - test_javascript: + requires: [compile] diff --git a/.gitignore b/.gitignore index b7bcd12c8a..f8ff37ac20 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,7 @@ lightning-*.tar # Ignore adaptor registry cache adaptor_registry_cache.json +!test/fixtures/adaptor_registry_cache.json # Ignore assets that are produced by build tools. /priv/static/images/adaptors/ diff --git a/CHANGELOG.md b/CHANGELOG.md index e6004dd3b7..594d05c792 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,8 +19,20 @@ and this project adheres to ### Changed +- Refactor CircleCI to build-then-fan-out pattern, compiling once then running + checks in parallel to eliminate cache race conditions and reduce flaky tests + [#4378](https://github.com/OpenFn/lightning/pull/4378) + ### Fixed +- Version chip missing tooltips + [#4389](https://github.com/OpenFn/lightning/pull/4389) +- Fixed StaleEntryError when saving workflows where a job is replaced and its + edge retargeted to a new job (e.g. via AI assistant) + [#4383](https://github.com/OpenFn/lightning/issues/4383) + +## [2.15.13] - 2026-02-06 + ## [2.15.13-pre1] - 2026-02-05 ### Fixed @@ -93,6 +105,8 @@ and this project adheres to - Standardise copy button feedback across collaborative editor [#3578](https://github.com/OpenFn/lightning/issues/3578) +- Modified version hashing algorithm for CLI compatibility + [#4346](https://github.com/OpenFn/lightning/issues/4346) ### Fixed diff --git a/lib/lightning/projects/provisioner.ex b/lib/lightning/projects/provisioner.ex index 809c1ab304..49f33d0c51 100644 --- a/lib/lightning/projects/provisioner.ex +++ b/lib/lightning/projects/provisioner.ex @@ -65,8 +65,11 @@ defmodule Lightning.Projects.Provisioner do with :ok <- VersionControlUsageLimiter.limit_github_sync(project.id), project_changeset <- build_import_changeset(project, user_or_repo_connection, data), + edges_to_cleanup <- + edges_referencing_deleted_jobs(project_changeset), {:ok, %{workflows: workflows} = project} <- Repo.insert_or_update(project_changeset, allow_stale: allow_stale), + :ok <- cleanup_orphaned_edges(edges_to_cleanup), :ok <- handle_collection_deletion(project_changeset), updated_project <- preload_dependencies(project), {:ok, _changes} <- @@ -271,6 +274,50 @@ defmodule Lightning.Projects.Provisioner do end end + # Before import, find edges referencing a job being deleted (as target or source). + # Returns edge IDs so we can clean them up after the FK cascade sets NULL. + defp edges_referencing_deleted_jobs(project_changeset) do + deleted_job_ids = + project_changeset + |> get_assoc(:workflows) + |> Enum.flat_map(fn wf_cs -> + wf_cs + |> get_assoc(:jobs) + |> Enum.filter(fn job_cs -> job_cs.action == :delete end) + |> Enum.map(&get_field(&1, :id)) + end) + |> Enum.reject(&is_nil/1) + + if deleted_job_ids == [] do + [] + else + from(e in Edge, + where: + e.target_job_id in ^deleted_job_ids or + e.source_job_id in ^deleted_job_ids, + select: e.id + ) + |> Repo.all() + end + end + + # After import, remove edges that were orphaned by job deletion. + # Only deletes edges whose IDs we captured before the FK cascade, + # and only if they still have a NULL FK (target or source without trigger). + defp cleanup_orphaned_edges([]), do: :ok + + defp cleanup_orphaned_edges(edge_ids) do + from(e in Edge, + where: e.id in ^edge_ids, + where: + is_nil(e.target_job_id) or + (is_nil(e.source_job_id) and is_nil(e.source_trigger_id)) + ) + |> Repo.delete_all() + + :ok + end + defp handle_collection_deletion(project_changeset) do deleted_size = project_changeset diff --git a/lib/lightning/workflow_versions.ex b/lib/lightning/workflow_versions.ex index 52f0021299..c77d60e0b7 100644 --- a/lib/lightning/workflow_versions.ex +++ b/lib/lightning/workflow_versions.ex @@ -2,24 +2,20 @@ defmodule Lightning.WorkflowVersions do @moduledoc """ Provenance + comparison helpers for workflow heads. - - Persists append-only rows in `workflow_versions` and maintains a materialized - `workflows.version_history` array (12-char lowercase hex). - - `record_version/3` and `record_versions/3` are **idempotent** (`ON CONFLICT DO NOTHING`) - and **concurrency-safe** (row lock, append without dupes). - - `history_for/1` and `latest_hash/1` read the array first; when empty they fall back - to the table with deterministic ordering by `(inserted_at, id)`. - - `reconcile_history!/1` rebuilds the array from provenance rows. + - Persists append-only rows in `workflow_versions` with deterministic ordering + by `(inserted_at, id)`. + - `record_version/3` is **idempotent** and **concurrency-safe** (squashes + consecutive versions with the same source). + - `history_for/1` and `latest_hash/1` query the table with deterministic ordering. - `classify/2` and `classify_with_delta/2` compare two histories (same/ahead/diverged). Validation & invariants: - - `hash` must match `^[a-f0-9]{12}$`; `source` must be `"app"` or `"cli"`; - `(workflow_id, hash)` is unique. + - `hash` must match `^[a-f0-9]{12}$`; `source` must be `"app"` or `"cli"`. - Designed for fast diffs and consistent “latest head” lookups. + Designed for fast diffs and consistent "latest head" lookups. """ import Ecto.Query - alias Ecto.Changeset alias Ecto.Multi alias Lightning.Repo alias Lightning.Validators.Hex @@ -31,13 +27,12 @@ defmodule Lightning.WorkflowVersions do @sources ~w(app cli) @doc """ - Records a **single** workflow head `hash` with provenance and keeps - `workflows.version_history` in sync. + Records a **single** workflow head `hash` with provenance. This operation is **idempotent** and **concurrency-safe**: - it inserts into `workflow_versions` with `ON CONFLICT DO NOTHING`, then - locks the workflow row (`FOR UPDATE`) and appends `hash` to the array only - if it is not already present. + - If the latest version has the same source, it squashes (replaces) it + - If the hash+source already exists, it does nothing + - Otherwise, it inserts a new row ## Parameters * `workflow` — the workflow owning the history @@ -45,14 +40,13 @@ defmodule Lightning.WorkflowVersions do * `source` — `"app"` or `"cli"` (defaults to `"app"`) ## Returns - * `{:ok, %Workflow{}}` — workflow (possibly unchanged) with an updated - `version_history` if a new `hash` was appended + * `{:ok, %Workflow{}}` — workflow (unchanged) * `{:error, reason}` — database error ## Examples iex> WorkflowVersions.record_version(wf, "deadbeefcafe", "app") - {:ok, %Workflow{version_history: [..., "deadbeefcafe"]}} + {:ok, %Workflow{}} iex> WorkflowVersions.record_version(wf, "NOT_HEX", "app") {:error, :invalid_input} @@ -89,10 +83,9 @@ defmodule Lightning.WorkflowVersions do }) ) |> maybe_delete_current_latest() - |> update_workflow_history(workflow) |> Repo.transaction() |> case do - {:ok, %{update_workflow: updated}} -> {:ok, updated} + {:ok, _} -> {:ok, workflow} {:error, _op, reason, _} -> {:error, reason} end else @@ -128,69 +121,17 @@ defmodule Lightning.WorkflowVersions do ) end - defp update_workflow_history(multi, workflow) do - Multi.run( - multi, - :update_workflow, - fn repo, %{new_version: new_version, delete_latest: deleted_version} -> - workflow = - from(w in Workflow, where: w.id == ^workflow.id, lock: "FOR UPDATE") - |> repo.one!() - - workflow - |> Changeset.change( - version_history: - build_version_history( - workflow.version_history || [], - new_version, - deleted_version - ) - ) - |> repo.update() - end - ) - end - - defp build_version_history(history, %{} = new_version, deleted_version) do - version_string = format_version(new_version) - hist = maybe_remove_squashed_version(history, deleted_version) - hist ++ [version_string] - end - - defp build_version_history(history, nil, _deleted), do: history - - defp maybe_remove_squashed_version(history, %{} = deleted_version) do - deleted_string = format_version(deleted_version) - - case List.last(history) do - ^deleted_string -> List.delete_at(history, -1) - _ -> history - end - end - - defp maybe_remove_squashed_version(history, nil), do: history - - defp format_version(%{source: source, hash: hash}), do: "#{source}:#{hash}" - @doc """ Returns the **ordered** history of heads for a workflow. - If `workflow.version_history` is present and non-empty, that array is returned. - Otherwise, the function falls back to `workflow_versions` ordered by - `inserted_at ASC, id ASC` to provide deterministic ordering for equal timestamps. + Queries `workflow_versions` ordered by `inserted_at ASC, id ASC` to provide + deterministic ordering for equal timestamps. ## Examples - iex> WorkflowVersions.history_for(%Workflow{version_history: ["a", "b"]}) - ["a", "b"] - - iex> WorkflowVersions.history_for(wf) # when array is empty/nil - ["a", "b", "c"] + iex> WorkflowVersions.history_for(wf) + ["app:a", "cli:b", "app:c"] """ - def history_for(%Workflow{version_history: arr}) - when is_list(arr) and arr != [], - do: arr - def history_for(%Workflow{id: id}) do from(v in WorkflowVersion, where: v.workflow_id == ^id, @@ -203,33 +144,26 @@ defmodule Lightning.WorkflowVersions do @doc """ Returns the **latest** head for a workflow (or `nil` if none). - Uses `workflow.version_history` when populated (taking the last element). - If empty/nil, reads from `workflow_versions` with - `ORDER BY inserted_at DESC, id DESC LIMIT 1` for deterministic results. + Queries `workflow_versions` with `ORDER BY inserted_at DESC, id DESC LIMIT 1` + for deterministic results. ## Examples - iex> WorkflowVersions.latest_hash(%Workflow{version_history: ["a", "b"]}) - "b" + iex> WorkflowVersions.latest_hash(wf) + "app:b" iex> WorkflowVersions.latest_hash(wf_without_versions) nil """ @spec latest_hash(Workflow.t()) :: hash | nil def latest_hash(%Workflow{} = wf) do - case wf.version_history do - list when is_list(list) and list != [] -> - List.last(list) - - _ -> - from(v in WorkflowVersion, - where: v.workflow_id == ^wf.id, - order_by: [desc: v.inserted_at, desc: v.id], - limit: 1, - select: fragment("? || ':' || ?", v.source, v.hash) - ) - |> Repo.one() - end + from(v in WorkflowVersion, + where: v.workflow_id == ^wf.id, + order_by: [desc: v.inserted_at, desc: v.id], + limit: 1, + select: fragment("? || ':' || ?", v.source, v.hash) + ) + |> Repo.one() end defp latest_version(workflow_id) do @@ -241,29 +175,6 @@ defmodule Lightning.WorkflowVersions do |> Repo.one() end - @doc """ - Rebuilds and **persists** `workflow.version_history` from provenance rows. - - This is useful for maintenance/migrations when the array drifts from the - `workflow_versions` table. Ordering is `inserted_at ASC, id ASC`. - - ## Returns - * `%Workflow{}` — updated workflow with a rebuilt `version_history` - - ## Examples - - iex> wf = WorkflowVersions.reconcile_history!(wf) - %Workflow{version_history: [...]} - """ - @spec reconcile_history!(Workflow.t()) :: Workflow.t() - def reconcile_history!(%Workflow{id: id} = wf) do - arr = history_for(%Workflow{id: id, version_history: []}) - - wf - |> Changeset.change(version_history: arr) - |> Repo.update!() - end - @doc """ Generates a deterministic hash for a workflow based on its structure. diff --git a/lib/lightning/workflows.ex b/lib/lightning/workflows.ex index a3080f3b89..ab8493729c 100644 --- a/lib/lightning/workflows.ex +++ b/lib/lightning/workflows.ex @@ -155,7 +155,18 @@ defmodule Lightning.Workflows do {:error, :workflow_deleted} end end) + |> Multi.run(:orphan_deleted_jobs, fn repo, _changes -> + orphan_jobs_being_deleted(repo, changeset) + end) |> Multi.insert_or_update(:workflow, changeset) + |> Multi.run(:cleanup_orphaned_edges, fn repo, + %{ + workflow: workflow, + orphan_deleted_jobs: + orphaned_edge_ids + } -> + cleanup_orphaned_edges(repo, workflow.id, orphaned_edge_ids) + end) |> then(fn multi -> if changeset.changes == %{} do multi @@ -219,6 +230,75 @@ defmodule Lightning.Workflows do |> save_workflow(actor, opts) end + # Nullifies edge FK references to jobs that are about to be deleted. + # This prevents PostgreSQL's cascade delete from removing edges that Ecto + # is trying to update (the retargeting race condition). + # + # Returns the IDs of edges whose target_job_id or source_job_id was nullified, + # so that cleanup_orphaned_edges can precisely remove only those edges (if they + # weren't retargeted by the changeset). + defp orphan_jobs_being_deleted(repo, changeset) do + deleted_job_ids = + changeset + |> Ecto.Changeset.get_change(:jobs, []) + |> Enum.filter(fn cs -> cs.action in [:replace, :delete] end) + |> Enum.map(fn cs -> cs.data.id end) + + if deleted_job_ids == [] do + {:ok, []} + else + workflow_id = changeset.data.id + + {_target_count, target_orphaned_ids} = + from(e in Edge, + where: e.workflow_id == ^workflow_id, + where: e.target_job_id in ^deleted_job_ids, + select: e.id + ) + |> repo.update_all(set: [target_job_id: nil]) + + {_source_count, source_orphaned_ids} = + from(e in Edge, + where: e.workflow_id == ^workflow_id, + where: e.source_job_id in ^deleted_job_ids, + select: e.id + ) + |> repo.update_all(set: [source_job_id: nil]) + + orphaned_edge_ids = + Enum.uniq(target_orphaned_ids ++ source_orphaned_ids) + + Logger.debug(fn -> + "Orphaned #{length(target_orphaned_ids)} target and #{length(source_orphaned_ids)} source edge refs for deleted jobs: #{inspect(deleted_job_ids)}" + end) + + {:ok, orphaned_edge_ids} + end + end + + # Removes edges that were orphaned by job deletion and not retargeted. + # Only deletes edges whose IDs were returned by orphan_jobs_being_deleted + # AND that still have a NULL FK (target_job_id or source without trigger). + defp cleanup_orphaned_edges(_repo, _workflow_id, []), do: {:ok, 0} + + defp cleanup_orphaned_edges(repo, workflow_id, orphaned_edge_ids) do + {count, _} = + from(e in Edge, + where: e.workflow_id == ^workflow_id, + where: e.id in ^orphaned_edge_ids, + where: + is_nil(e.target_job_id) or + (is_nil(e.source_job_id) and is_nil(e.source_trigger_id)) + ) + |> repo.delete_all() + + Logger.debug(fn -> + "Cleaned up #{count} orphaned edges for workflow #{workflow_id}" + end) + + {:ok, count} + end + @spec publish_kafka_trigger_events(Ecto.Changeset.t(Workflow.t())) :: :ok def publish_kafka_trigger_events(changeset) do changeset diff --git a/lib/lightning/workflows/workflow.ex b/lib/lightning/workflows/workflow.ex index 973faa2190..448bf8c764 100644 --- a/lib/lightning/workflows/workflow.ex +++ b/lib/lightning/workflows/workflow.ex @@ -43,7 +43,6 @@ defmodule Lightning.Workflows.Workflow do field :concurrency, :integer, default: nil field :enable_job_logs, :boolean, default: true field :positions, :map - field :version_history, {:array, :string}, default: [] has_many :edges, Edge, on_replace: :delete_if_exists has_many :jobs, Job, on_replace: :delete diff --git a/lib/lightning/workflows/workflow_version.ex b/lib/lightning/workflows/workflow_version.ex index a68abf415f..52159b9683 100644 --- a/lib/lightning/workflows/workflow_version.ex +++ b/lib/lightning/workflows/workflow_version.ex @@ -5,11 +5,9 @@ defmodule Lightning.Workflows.WorkflowVersion do - One row per head: `hash` (12-char lowercase hex), `source` ("app" | "cli"), `workflow_id`, `inserted_at` (UTC μs). - Append-only: `updated_at` disabled; rows are never mutated. - - Uniqueness: `(workflow_id, hash)` unique; same hash may exist across workflows. - - Validation mirrors DB checks: hash format, allowed sources, valid `workflow_id`. + - Validation: hash format, allowed sources, valid `workflow_id`. - Deterministic ordering via `:utc_datetime_usec` timestamps. - - Use `Lightning.WorkflowVersions` to record/query and keep - `workflows.version_history` in sync. + - Use `Lightning.WorkflowVersions` to record/query workflow versions. """ use Lightning.Schema import Ecto.Changeset diff --git a/lib/lightning_web/components/layout_components.ex b/lib/lightning_web/components/layout_components.ex index f7547148f6..56de80372e 100644 --- a/lib/lightning_web/components/layout_components.ex +++ b/lib/lightning_web/components/layout_components.ex @@ -458,18 +458,14 @@ defmodule LightningWeb.LayoutComponents do
-
- v{Application.spec(:lightning, :vsn)} -
+ <%!-- Collapsed branding: centered --%>