diff --git a/CHANGELOG.md b/CHANGELOG.md
index 762fa2afa7f..a0af6e2b40d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -17,6 +17,14 @@ and this project adheres to
### Added
+- Support collections in sandboxes. Collection names are now scoped per project,
+ empty collections are cloned into a sandbox on provision, and collection names
+ (not data) are synchronised when a sandbox is merged back into its parent.
+ Adds a v2 collections API at `/collections/:project_id/:name` selected via the
+ `x-api-version: 2` header. V1 continues to work and returns 409 when a name is
+ ambiguous across projects.
+ [#3548](https://github.com/OpenFn/lightning/issues/3548)
+
### Changed
- Bump `@openfn/ws-worker` from
diff --git a/lib/lightning/collections.ex b/lib/lightning/collections.ex
index d89671a8de5..682acec71a0 100644
--- a/lib/lightning/collections.ex
+++ b/lib/lightning/collections.ex
@@ -53,9 +53,19 @@ defmodule Lightning.Collections do
end
@spec get_collection(String.t()) ::
- {:ok, Collection.t()} | {:error, :not_found}
+ {:ok, Collection.t()} | {:error, :not_found} | {:error, :conflict}
def get_collection(name) do
- case Repo.get_by(Collection, name: name) do
+ case Repo.all(from c in Collection, where: c.name == ^name) do
+ [] -> {:error, :not_found}
+ [collection] -> {:ok, collection}
+ [_ | _] -> {:error, :conflict}
+ end
+ end
+
+ @spec get_collection(Ecto.UUID.t(), String.t()) ::
+ {:ok, Collection.t()} | {:error, :not_found}
+ def get_collection(project_id, name) do
+ case Repo.get_by(Collection, project_id: project_id, name: name) do
nil -> {:error, :not_found}
collection -> {:ok, collection}
end
diff --git a/lib/lightning/collections/collection.ex b/lib/lightning/collections/collection.ex
index ecb4c1a0791..fc6b947b2f2 100644
--- a/lib/lightning/collections/collection.ex
+++ b/lib/lightning/collections/collection.ex
@@ -40,7 +40,8 @@ defmodule Lightning.Collections.Collection do
|> validate_format(:name, ~r/^[a-z0-9]+([\-_.][a-z0-9]+)*$/,
message: "Collection name must be URL safe"
)
- |> unique_constraint([:name],
+ |> unique_constraint(:name,
+ name: :collections_project_id_name_index,
message: "A collection with this name already exists"
)
end
@@ -50,7 +51,8 @@ defmodule Lightning.Collections.Collection do
|> validate_format(:name, ~r/^[a-z0-9]+([\-_.][a-z0-9]+)*$/,
message: "Collection name must be URL safe"
)
- |> unique_constraint([:name],
+ |> unique_constraint(:name,
+ name: :collections_project_id_name_index,
message: "A collection with this name already exists"
)
end
diff --git a/lib/lightning/projects/sandboxes.ex b/lib/lightning/projects/sandboxes.ex
index cebcd8edfcb..089d73311e1 100644
--- a/lib/lightning/projects/sandboxes.ex
+++ b/lib/lightning/projects/sandboxes.ex
@@ -18,6 +18,7 @@ defmodule Lightning.Projects.Sandboxes do
## Operations
* `provision/3` - Create a new sandbox from a parent project
+ * `merge/4` - Merge a sandbox into its target (workflows + collections)
* `update_sandbox/3` - Update sandbox name, color, or environment
* `delete_sandbox/2` - Delete a sandbox and all its descendants
@@ -36,10 +37,14 @@ defmodule Lightning.Projects.Sandboxes do
import Ecto.Query
alias Lightning.Accounts.User
+ alias Lightning.Collections
+ alias Lightning.Collections.Collection
alias Lightning.Credentials.KeychainCredential
alias Lightning.Policies.Permissions
+ alias Lightning.Projects.MergeProjects
alias Lightning.Projects.Project
alias Lightning.Projects.ProjectCredential
+ alias Lightning.Projects.Provisioner
alias Lightning.Projects.SandboxPromExPlugin
alias Lightning.Repo
alias Lightning.Workflows
@@ -121,6 +126,42 @@ defmodule Lightning.Projects.Sandboxes do
end
end
+ @doc """
+ Merges a sandbox into its target project.
+
+ Applies the sandbox's workflow configuration to the target via the
+ provisioner, then synchronises collection names. Collection data is
+ never copied.
+
+ ## Parameters
+ * `source` - The sandbox project being merged
+ * `target` - The project receiving the merge
+ * `actor` - The user performing the merge
+ * `opts` - Merge options (`:selected_workflow_ids`, `:deleted_target_workflow_ids`)
+
+ ## Returns
+ * `{:ok, updated_target}` - Merge and collection sync succeeded
+ * `{:error, reason}` - Workflow merge or collection sync failed
+ """
+ @spec merge(Project.t(), Project.t(), User.t(), map()) ::
+ {:ok, Project.t()} | {:error, term()}
+ def merge(
+ %Project{} = source,
+ %Project{} = target,
+ %User{} = actor,
+ opts \\ %{}
+ ) do
+ merge_doc = MergeProjects.merge_project(source, target, opts)
+
+ with {:ok, updated_target} <-
+ Provisioner.import_document(target, actor, merge_doc,
+ allow_stale: true
+ ),
+ {:ok, _} <- sync_collections(source, target) do
+ {:ok, updated_target}
+ end
+ end
+
@doc """
Updates a sandbox project's basic attributes.
@@ -566,6 +607,87 @@ defmodule Lightning.Projects.Sandboxes do
|> copy_workflow_version_history(sandbox.workflow_id_mapping)
|> create_initial_workflow_snapshots()
|> copy_selected_dataclips(parent.id, Map.get(original_attrs, :dataclip_ids))
+ |> clone_collections_from_parent(parent)
+ end
+
+ defp clone_collections_from_parent(sandbox, parent) do
+ parent_names = parent |> Collections.list_project_collections() |> names()
+ insert_empty_collections(sandbox.id, parent_names)
+ sandbox
+ end
+
+ @doc """
+ Synchronises collection names from a sandbox to its merge target.
+
+ After a successful merge, this brings the target's set of collections in
+ line with the sandbox's:
+
+ * Collections present in the sandbox but missing from the target are
+ created (empty) in the target.
+ * Collections present in the target but missing from the sandbox are
+ deleted from the target, along with all their items.
+
+ **Collection data is never copied or merged.** Only the set of collection
+ names is synchronised, mirroring the sandbox-is-for-configuration model.
+
+ The create and delete operations run in a single transaction; a failure
+ leaves the target's collections unchanged.
+ """
+ @spec sync_collections(Project.t(), Project.t()) ::
+ {:ok, %{created: non_neg_integer(), deleted: non_neg_integer()}}
+ | {:error, term()}
+ def sync_collections(%Project{} = source, %Project{} = target) do
+ source_names = source |> Collections.list_project_collections() |> names()
+
+ target_collections = Collections.list_project_collections(target)
+ target_names = names(target_collections)
+
+ to_create = MapSet.difference(source_names, target_names)
+
+ names_to_delete = MapSet.difference(target_names, source_names)
+
+ to_delete_ids =
+ for c <- target_collections,
+ c.name in names_to_delete,
+ do: c.id
+
+ Repo.transaction(fn ->
+ {created, _} = insert_empty_collections(target.id, to_create)
+ {deleted, _} = delete_collections(to_delete_ids)
+ %{created: created, deleted: deleted}
+ end)
+ end
+
+ defp names(collections), do: MapSet.new(collections, & &1.name)
+
+ defp insert_empty_collections(project_id, names) do
+ if Enum.empty?(names) do
+ {0, nil}
+ else
+ now = DateTime.utc_now() |> DateTime.truncate(:second)
+
+ rows =
+ Enum.map(names, fn name ->
+ %{
+ id: Ecto.UUID.generate(),
+ name: name,
+ project_id: project_id,
+ byte_size_sum: 0,
+ inserted_at: now,
+ updated_at: now
+ }
+ end)
+
+ # on_conflict: :nothing handles the rare case where two concurrent
+ # merges into the same target both try to create the same collection.
+ Repo.insert_all(Collection, rows, on_conflict: :nothing)
+ end
+ end
+
+ defp delete_collections([]), do: {0, nil}
+
+ defp delete_collections(ids) do
+ Repo.delete_all(from c in Collection, where: c.id in ^ids)
end
defp copy_workflow_version_history(sandbox, workflow_id_mapping) do
diff --git a/lib/lightning_web/controllers/collections_controller.ex b/lib/lightning_web/controllers/collections_controller.ex
index a75398030fb..b20f3b9e8d2 100644
--- a/lib/lightning_web/controllers/collections_controller.ex
+++ b/lib/lightning_web/controllers/collections_controller.ex
@@ -2,6 +2,7 @@ defmodule LightningWeb.CollectionsController do
use LightningWeb, :controller
alias Lightning.Collections
+ alias Lightning.Collections.Collection
alias Lightning.Extensions.Message
alias Lightning.Policies.Permissions
@@ -25,19 +26,9 @@ defmodule LightningWeb.CollectionsController do
@valid_params ["key", "cursor", "limit" | @timestamp_params]
- defp authorize(conn, collection) do
- subject = conn.assigns[:subject] || conn.assigns[:current_user]
-
- Permissions.can(
- Lightning.Policies.Collections,
- :access_collection,
- subject,
- collection
- )
- end
-
- def put(conn, %{"name" => col_name, "key" => key, "value" => value}) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ @spec put(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def put(conn, %{"key" => key, "value" => value} = params) do
+ with {:ok, collection} <- resolve(params),
:ok <- authorize(conn, collection),
:ok <- Collections.put(collection, key, value) do
json(conn, %{upserted: 1, error: nil})
@@ -50,8 +41,11 @@ defmodule LightningWeb.CollectionsController do
end
end
- def put_all(conn, %{"name" => col_name, "items" => items}) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ def put(conn, _params), do: missing_body(conn, "value")
+
+ @spec put_all(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def put_all(conn, %{"items" => items} = params) do
+ with {:ok, collection} <- resolve(params),
:ok <- authorize(conn, collection),
{:ok, count} <- Collections.put_all(collection, items) do
json(conn, %{upserted: count, error: nil})
@@ -66,21 +60,22 @@ defmodule LightningWeb.CollectionsController do
end
end
- def get(conn, %{"name" => col_name, "key" => key}) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ def put_all(conn, _params), do: missing_body(conn, "items")
+
+ @spec get(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def get(conn, %{"key" => key} = params) do
+ with {:ok, collection} <- resolve(params),
:ok <- authorize(conn, collection) do
case Collections.get(collection, key) do
- nil ->
- resp(conn, :no_content, "")
-
- item ->
- json(conn, item)
+ nil -> resp(conn, :no_content, "")
+ item -> json(conn, item)
end
end
end
- def delete(conn, %{"name" => col_name, "key" => key}) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ @spec delete(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def delete(conn, %{"key" => key} = params) do
+ with {:ok, collection} <- resolve(params),
:ok <- authorize(conn, collection) do
case Collections.delete(collection, key) do
:ok ->
@@ -92,19 +87,40 @@ defmodule LightningWeb.CollectionsController do
end
end
- def delete_all(conn, %{"name" => col_name} = params) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ @spec delete_all(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def delete_all(conn, params) do
+ with {:ok, collection} <- resolve(params),
:ok <- authorize(conn, collection) do
key_param = params["key"]
-
{:ok, n} = Collections.delete_all(collection, key_param)
-
json(conn, %{key: key_param, deleted: n, error: nil})
end
end
- def download(conn, %{"name" => col_name}) do
- with {:ok, collection} <- Collections.get_collection(col_name),
+ @spec stream(Plug.Conn.t(), map()) :: Plug.Conn.t() | term()
+ def stream(conn, params) do
+ with {:ok, collection} <- resolve(params),
+ :ok <- authorize(conn, collection),
+ {:ok, filters} <- parse_query_params(conn.query_params) do
+ key_pattern = conn.query_params["key"]
+ items_stream = stream_all_in_chunks(collection, filters, key_pattern)
+ response_limit = Map.fetch!(filters, :limit)
+
+ case stream_chunked(conn, items_stream, response_limit) do
+ {:error, conn} -> conn
+ {:ok, conn} -> conn
+ end
+ end
+ end
+
+ @doc """
+ Browser-pipeline download for a project-scoped collection.
+
+ Always v2 since the UI links to project-scoped download URLs.
+ """
+ @spec download(Plug.Conn.t(), map()) :: Plug.Conn.t()
+ def download(conn, %{"project_id" => project_id, "name" => name}) do
+ with {:ok, collection} <- Collections.get_collection(project_id, name),
:ok <- authorize(conn, collection) do
items_stream =
stream_all_in_chunks(
@@ -117,24 +133,37 @@ defmodule LightningWeb.CollectionsController do
|> put_resp_content_type("application/json")
|> put_resp_header(
"content-disposition",
- ~s(attachment; filename="#{col_name}.json")
+ ~s(attachment; filename="#{name}.json")
)
|> stream_as_json_array(items_stream)
end
end
- def stream(conn, %{"name" => col_name} = params) do
- with {:ok, collection, filters} <- validate_query(conn, col_name) do
- key_pattern = Map.get(params, "key")
+ # Resolves a collection by project + name when project_id is present, or by
+ # name alone otherwise. Any `{:error, reason}` is rendered by the fallback
+ # controller (404 for `:not_found`, 409 for `:conflict`).
+ @spec resolve(map()) :: {:ok, Collection.t()} | {:error, atom()}
+ defp resolve(%{"project_id" => project_id, "name" => name}),
+ do: Collections.get_collection(project_id, name)
- items_stream = stream_all_in_chunks(collection, filters, key_pattern)
- response_limit = Map.fetch!(filters, :limit)
+ defp resolve(%{"name" => name}),
+ do: Collections.get_collection(name)
- case stream_chunked(conn, items_stream, response_limit) do
- {:error, conn} -> conn
- {:ok, conn} -> conn
- end
- end
+ defp authorize(conn, collection) do
+ subject = conn.assigns[:subject] || conn.assigns[:current_user]
+
+ Permissions.can(
+ Lightning.Policies.Collections,
+ :access_collection,
+ subject,
+ collection
+ )
+ end
+
+ defp missing_body(conn, field) do
+ conn
+ |> put_status(:unprocessable_entity)
+ |> json(%{error: "Missing required field: #{field}"})
end
defp stream_as_json_array(conn, items_stream) do
@@ -203,17 +232,13 @@ defmodule LightningWeb.CollectionsController do
end
end
- defp validate_query(conn, col_name) do
- with {:ok, collection} <- Collections.get_collection(col_name),
- :ok <- authorize(conn, collection),
- query_params <-
- Enum.into(conn.query_params, %{
- "cursor" => nil,
- "limit" => "#{@default_stream_limit}"
- }),
- {:ok, filters} <- validate_query_params(query_params) do
- {:ok, collection, filters}
- end
+ defp parse_query_params(query_params) do
+ query_params
+ |> Enum.into(%{
+ "cursor" => nil,
+ "limit" => "#{@default_stream_limit}"
+ })
+ |> validate_query_params()
end
defp validate_query_params(
diff --git a/lib/lightning_web/controllers/fallback_controller.ex b/lib/lightning_web/controllers/fallback_controller.ex
index 6e3f3c590bf..fbf777d0f3a 100644
--- a/lib/lightning_web/controllers/fallback_controller.ex
+++ b/lib/lightning_web/controllers/fallback_controller.ex
@@ -28,6 +28,15 @@ defmodule LightningWeb.FallbackController do
|> render(:"401")
end
+ def call(conn, {:error, :conflict}) do
+ conn
+ |> put_status(:conflict)
+ |> json(%{
+ error:
+ "Multiple collections found with this name. Use API v2 with a project_id."
+ })
+ end
+
def call(conn, {:error, :forbidden}) do
conn
|> put_status(:forbidden)
diff --git a/lib/lightning_web/live/project_live/collections_component.ex b/lib/lightning_web/live/project_live/collections_component.ex
index 534dbca1b1f..92a1cd6ff02 100644
--- a/lib/lightning_web/live/project_live/collections_component.ex
+++ b/lib/lightning_web/live/project_live/collections_component.ex
@@ -45,7 +45,8 @@ defmodule LightningWeb.ProjectLive.CollectionsComponent do
socket
) do
with :ok <- can_create_collection(socket) do
- {:ok, collection} = Collections.get_collection(collection_name)
+ project_id = socket.assigns.project.id
+ {:ok, collection} = Collections.get_collection(project_id, collection_name)
changeset =
Collection.form_changeset(collection, %{raw_name: collection.name})
@@ -65,7 +66,8 @@ defmodule LightningWeb.ProjectLive.CollectionsComponent do
socket
) do
with :ok <- can_create_collection(socket) do
- {:ok, collection} = Collections.get_collection(collection_name)
+ project_id = socket.assigns.project.id
+ {:ok, collection} = Collections.get_collection(project_id, collection_name)
{:noreply, assign(socket, collection: collection, action: :delete)}
end
@@ -207,7 +209,9 @@ defmodule LightningWeb.ProjectLive.CollectionsComponent do
Download
diff --git a/lib/lightning_web/live/sandbox_live/components.ex b/lib/lightning_web/live/sandbox_live/components.ex
index f6e20e2fe61..44d4713bd0e 100644
--- a/lib/lightning_web/live/sandbox_live/components.ex
+++ b/lib/lightning_web/live/sandbox_live/components.ex
@@ -382,6 +382,7 @@ defmodule LightningWeb.SandboxLive.Components do
>
<:message>
Sandbox merging is in beta. For production projects, use the CLI to merge locally and preview changes first.
+ Collection names will be synced: new collections are added (empty) to the target, and collections missing from the sandbox are removed from the target. Collection data is never merged.
diff --git a/lib/lightning_web/live/sandbox_live/index.ex b/lib/lightning_web/live/sandbox_live/index.ex
index 33110a574d3..17e67fb9866 100644
--- a/lib/lightning_web/live/sandbox_live/index.ex
+++ b/lib/lightning_web/live/sandbox_live/index.ex
@@ -6,10 +6,13 @@ defmodule LightningWeb.SandboxLive.Index do
alias Lightning.Projects
alias Lightning.Projects.MergeProjects
alias Lightning.Projects.ProjectLimiter
+ alias Lightning.Projects.Sandboxes
alias Lightning.Repo
alias Lightning.VersionControl
alias LightningWeb.SandboxLive.Components
+ require Logger
+
defmodule MergeWorkflow do
defstruct [:id, :name, :is_diverged, :is_new, :is_deleted]
end
@@ -795,16 +798,7 @@ defmodule LightningWeb.SandboxLive.Index do
%{}
end
- result =
- source
- |> MergeProjects.merge_project(target, opts)
- |> then(
- &Lightning.Projects.Provisioner.import_document(target, actor, &1,
- allow_stale: true
- )
- )
-
- case result do
+ case Sandboxes.merge(source, target, actor, opts) do
{:ok, _updated_target} = success ->
maybe_commit_to_github(target, "Merged sandbox #{source.name}")
success
@@ -887,5 +881,6 @@ defmodule LightningWeb.SandboxLive.Index do
end
defp format_merge_error(%{text: text}), do: text
+ defp format_merge_error(reason) when is_binary(reason), do: reason
defp format_merge_error(reason), do: "Failed to merge: #{inspect(reason)}"
end
diff --git a/lib/lightning_web/plugs/collections_router.ex b/lib/lightning_web/plugs/collections_router.ex
new file mode 100644
index 00000000000..8d25732d018
--- /dev/null
+++ b/lib/lightning_web/plugs/collections_router.ex
@@ -0,0 +1,154 @@
+defmodule LightningWeb.Plugs.CollectionsRouter do
+ @moduledoc """
+ Versioned routing plug for the Collections API.
+
+ Mounted via `forward` in the Phoenix router, this plug resolves the
+ API version from the `x-api-version` header and dispatches to the
+ appropriate controller action based on version, HTTP method, and path
+ segments.
+
+ ## Version resolution
+
+ The version is read from the `x-api-version` request header:
+
+ * Missing or `"1"` -> v1
+ * `"2"` -> v2
+ * Any other value or multiple headers -> 400 Bad Request
+
+ ## Routes
+
+ * **V1** (name-scoped): `/:name`, `/:name/:key`
+ * **V2** (project-scoped): `/:project_id/:name`, `/:project_id/:name/:key`
+
+ Controller actions may return a `%Plug.Conn{}` (rendered directly) or
+ an error tuple like `{:error, :not_found}`, which is passed to the
+ fallback controller.
+ """
+ use Phoenix.Controller
+ import Plug.Conn
+
+ alias LightningWeb.CollectionsController, as: C
+ alias LightningWeb.FallbackController
+
+ @supported_versions ~w(1 2)
+
+ def init(opts), do: opts
+
+ def call(conn, _opts) do
+ case resolve_version(conn) do
+ {:ok, conn} ->
+ case route(conn, conn.assigns.api_version, conn.method, conn.path_info) do
+ %Plug.Conn{} = conn -> conn
+ error -> FallbackController.call(conn, error)
+ end
+
+ {:error, conn} ->
+ conn
+ end
+ end
+
+ # -- Version resolution --------------------------------------------------
+
+ defp resolve_version(conn) do
+ case get_req_header(conn, "x-api-version") do
+ [] -> {:ok, assign(conn, :api_version, :v1)}
+ ["1"] -> {:ok, assign(conn, :api_version, :v1)}
+ ["2"] -> {:ok, assign(conn, :api_version, :v2)}
+ [value] -> {:error, reject_version(conn, value)}
+ _many -> {:error, reject_version(conn, "multiple")}
+ end
+ end
+
+ defp reject_version(conn, value) do
+ conn
+ |> put_status(:bad_request)
+ |> json(%{
+ error:
+ "Unsupported API version: #{inspect(value)}. " <>
+ "Supported versions: #{Enum.join(@supported_versions, ", ")}."
+ })
+ |> halt()
+ end
+
+ # -- V1: name-scoped -----------------------------------------------------
+
+ defp route(conn, :v1, "GET", [name]),
+ do: C.stream(conn, %{"name" => name})
+
+ defp route(conn, :v1, "GET", [name, key]),
+ do: C.get(conn, %{"name" => name, "key" => key})
+
+ defp route(conn, :v1, "PUT", [name, key]),
+ do: C.put(conn, body_with(conn, %{"name" => name, "key" => key}))
+
+ defp route(conn, :v1, "POST", [name]),
+ do: C.put_all(conn, body_with(conn, %{"name" => name}))
+
+ defp route(conn, :v1, "DELETE", [name, key]),
+ do: C.delete(conn, %{"name" => name, "key" => key})
+
+ defp route(conn, :v1, "DELETE", [name]),
+ do: C.delete_all(conn, all_params(conn, %{"name" => name}))
+
+ # -- V2: project-scoped --------------------------------------------------
+
+ defp route(conn, :v2, "GET", [project_id, name]),
+ do: C.stream(conn, %{"project_id" => project_id, "name" => name})
+
+ defp route(conn, :v2, "GET", [project_id, name, key]),
+ do:
+ C.get(conn, %{
+ "project_id" => project_id,
+ "name" => name,
+ "key" => key
+ })
+
+ defp route(conn, :v2, "PUT", [project_id, name, key]),
+ do:
+ C.put(
+ conn,
+ body_with(conn, %{
+ "project_id" => project_id,
+ "name" => name,
+ "key" => key
+ })
+ )
+
+ defp route(conn, :v2, "POST", [project_id, name]),
+ do:
+ C.put_all(
+ conn,
+ body_with(conn, %{"project_id" => project_id, "name" => name})
+ )
+
+ defp route(conn, :v2, "DELETE", [project_id, name, key]),
+ do:
+ C.delete(conn, %{
+ "project_id" => project_id,
+ "name" => name,
+ "key" => key
+ })
+
+ defp route(conn, :v2, "DELETE", [project_id, name]),
+ do:
+ C.delete_all(
+ conn,
+ all_params(conn, %{"project_id" => project_id, "name" => name})
+ )
+
+ # -- Fallback -------------------------------------------------------------
+
+ defp route(conn, _version, _method, _path) do
+ conn
+ |> put_resp_content_type("application/json")
+ |> send_resp(404, Jason.encode!(%{error: "Not Found"}))
+ end
+
+ # -- Helpers --------------------------------------------------------------
+
+ defp body_with(conn, extra), do: Map.merge(conn.body_params, extra)
+
+ defp all_params(conn, extra) do
+ conn.query_params |> Map.merge(conn.body_params) |> Map.merge(extra)
+ end
+end
diff --git a/lib/lightning_web/router.ex b/lib/lightning_web/router.ex
index ac2ce518878..179eafb91b2 100644
--- a/lib/lightning_web/router.ex
+++ b/lib/lightning_web/router.ex
@@ -112,15 +112,9 @@ defmodule LightningWeb.Router do
end
## Collections
- scope "/collections", LightningWeb do
+ scope "/collections" do
pipe_through [:authenticated_api]
-
- get "/:name", CollectionsController, :stream
- get "/:name/:key", CollectionsController, :get
- put "/:name/:key", CollectionsController, :put
- post "/:name", CollectionsController, :put_all
- delete "/:name/:key", CollectionsController, :delete
- delete "/:name", CollectionsController, :delete_all
+ forward "/", LightningWeb.Plugs.CollectionsRouter
end
## Authentication routes
@@ -145,7 +139,11 @@ defmodule LightningWeb.Router do
post "/users/two-factor", UserTOTPController, :create
get "/setup_vcs", VersionControlController, :index
get "/download/yaml", DownloadsController, :download_project_yaml
- get "/download/collections/:name", CollectionsController, :download
+
+ get "/download/collections/:project_id/:name",
+ CollectionsController,
+ :download
+
get "/dataclip/body/:id", DataclipController, :show
get "/projects/:project_id/jobs/:job_id/dataclips",
diff --git a/priv/repo/migrations/20260410000000_scope_collection_name_uniqueness_to_project.exs b/priv/repo/migrations/20260410000000_scope_collection_name_uniqueness_to_project.exs
new file mode 100644
index 00000000000..cc21e88141f
--- /dev/null
+++ b/priv/repo/migrations/20260410000000_scope_collection_name_uniqueness_to_project.exs
@@ -0,0 +1,23 @@
+defmodule Lightning.Repo.Migrations.ScopeCollectionNameUniquenessToProject do
+ use Ecto.Migration
+
+ def up do
+ drop_if_exists unique_index(:collections, [:name])
+ create unique_index(:collections, [:project_id, :name])
+ end
+
+ def down do
+ dupes =
+ repo().query!("SELECT name FROM collections GROUP BY name HAVING count(*) > 1")
+
+ if dupes.num_rows > 0 do
+ raise Ecto.MigrationError,
+ message:
+ "Cannot rollback: #{dupes.num_rows} collection name(s) exist in multiple projects. " <>
+ "Remove duplicates before rolling back."
+ end
+
+ drop_if_exists unique_index(:collections, [:project_id, :name])
+ create unique_index(:collections, [:name])
+ end
+end
diff --git a/test/lightning/collections_test.exs b/test/lightning/collections_test.exs
index 02b35505a0d..beb99d97ac4 100644
--- a/test/lightning/collections_test.exs
+++ b/test/lightning/collections_test.exs
@@ -17,6 +17,37 @@ defmodule Lightning.CollectionsTest do
assert {:error, :not_found} =
Collections.get_collection("nonexistent")
end
+
+ test "returns a conflict error when the same name exists in multiple projects" do
+ name = "shared-name"
+ insert(:collection, name: name)
+ insert(:collection, name: name)
+
+ assert {:error, :conflict} = Collections.get_collection(name)
+ end
+ end
+
+ describe "get_collection/2" do
+ test "returns the collection for the given project" do
+ name = "shared-name"
+ %{id: project_id_1} = project_1 = insert(:project)
+ %{id: project_id_2} = project_2 = insert(:project)
+ %{id: id_1} = insert(:collection, name: name, project: project_1)
+ %{id: id_2} = insert(:collection, name: name, project: project_2)
+
+ assert {:ok, %Collection{id: ^id_1}} =
+ Collections.get_collection(project_id_1, name)
+
+ assert {:ok, %Collection{id: ^id_2}} =
+ Collections.get_collection(project_id_2, name)
+ end
+
+ test "returns not_found when the collection does not exist in the project" do
+ %{id: project_id} = insert(:project)
+
+ assert {:error, :not_found} =
+ Collections.get_collection(project_id, "nonexistent")
+ end
end
describe "create_collection/2" do
@@ -48,7 +79,7 @@ defmodule Lightning.CollectionsTest do
{"A collection with this name already exists",
[
constraint: :unique,
- constraint_name: "collections_name_index"
+ constraint_name: "collections_project_id_name_index"
]}
]
}} =
diff --git a/test/lightning/sandboxes_test.exs b/test/lightning/sandboxes_test.exs
index aaceac714af..3d7fd057566 100644
--- a/test/lightning/sandboxes_test.exs
+++ b/test/lightning/sandboxes_test.exs
@@ -547,6 +547,228 @@ defmodule Lightning.Projects.SandboxesTest do
end
end
+ describe "collections provisioning" do
+ test "clones empty collection records from parent into sandbox" do
+ actor = insert(:user)
+ parent = insert(:project)
+ ensure_member!(parent, actor, :owner)
+
+ insert(:collection, project: parent, name: "col-a")
+
+ insert(:collection,
+ project: parent,
+ name: "col-b",
+ items: [%{key: "k", value: "v"}]
+ )
+
+ {:ok, sandbox} = Sandboxes.provision(parent, actor, %{name: "sandbox-x"})
+
+ sandbox_collections =
+ Lightning.Collections.list_project_collections(sandbox)
+
+ assert Enum.map(sandbox_collections, & &1.name) |> Enum.sort() == [
+ "col-a",
+ "col-b"
+ ]
+
+ # Items are not copied
+ Enum.each(sandbox_collections, fn col ->
+ assert Lightning.Collections.get_all(
+ col,
+ %{cursor: nil, limit: 100},
+ nil
+ ) ==
+ []
+ end)
+ end
+
+ test "provisioning a parent with no collections creates no sandbox collections" do
+ actor = insert(:user)
+ parent = insert(:project)
+ ensure_member!(parent, actor, :owner)
+
+ {:ok, sandbox} = Sandboxes.provision(parent, actor, %{name: "sandbox-x"})
+
+ assert Lightning.Collections.list_project_collections(sandbox) == []
+ end
+
+ test "each sandbox gets its own copy of parent collections" do
+ actor = insert(:user)
+ parent = insert(:project)
+ ensure_member!(parent, actor, :owner)
+
+ insert(:collection, project: parent, name: "col-a")
+
+ {:ok, sandbox_1} = Sandboxes.provision(parent, actor, %{name: "sandbox-1"})
+ {:ok, sandbox_2} = Sandboxes.provision(parent, actor, %{name: "sandbox-2"})
+
+ assert length(Lightning.Collections.list_project_collections(sandbox_1)) ==
+ 1
+
+ assert length(Lightning.Collections.list_project_collections(sandbox_2)) ==
+ 1
+ end
+ end
+
+ describe "sync_collections/2" do
+ test "creates collections in target that exist in source but not target" do
+ source = insert(:project)
+ target = insert(:project)
+
+ insert(:collection, project: source, name: "shared")
+ insert(:collection, project: source, name: "only-in-source")
+ insert(:collection, project: target, name: "shared")
+
+ assert {:ok, %{created: 1, deleted: 0}} =
+ Sandboxes.sync_collections(source, target)
+
+ target_names =
+ target
+ |> Lightning.Collections.list_project_collections()
+ |> Enum.map(& &1.name)
+ |> Enum.sort()
+
+ assert target_names == ["only-in-source", "shared"]
+ end
+
+ test "deletes collections in target that are missing from source, including items" do
+ source = insert(:project)
+ target = insert(:project)
+
+ insert(:collection, project: source, name: "shared")
+ insert(:collection, project: target, name: "shared")
+
+ dropped =
+ insert(:collection,
+ project: target,
+ name: "only-in-target",
+ items: [%{key: "k", value: "v"}]
+ )
+
+ assert {:ok, %{created: 0, deleted: 1}} =
+ Sandboxes.sync_collections(source, target)
+
+ refute Lightning.Repo.get(Lightning.Collections.Collection, dropped.id)
+
+ # items belonging to the deleted collection are removed with it
+ assert Lightning.Repo.all(
+ from i in Lightning.Collections.Item,
+ where: i.collection_id == ^dropped.id
+ ) == []
+ end
+
+ test "is a no-op when both projects have the same collections" do
+ source = insert(:project)
+ target = insert(:project)
+
+ insert(:collection, project: source, name: "a")
+ insert(:collection, project: target, name: "a")
+
+ assert {:ok, %{created: 0, deleted: 0}} =
+ Sandboxes.sync_collections(source, target)
+ end
+
+ test "does not copy collection data across" do
+ source = insert(:project)
+ target = insert(:project)
+
+ insert(:collection,
+ project: source,
+ name: "with-data",
+ items: [%{key: "k", value: "v"}]
+ )
+
+ assert {:ok, %{created: 1, deleted: 0}} =
+ Sandboxes.sync_collections(source, target)
+
+ [new_collection] = Lightning.Collections.list_project_collections(target)
+
+ assert Lightning.Collections.get_all(
+ new_collection,
+ %{cursor: nil, limit: 100},
+ nil
+ ) == []
+ end
+
+ test "runs in a single transaction -- either everything or nothing" do
+ source = insert(:project)
+ target = insert(:project)
+
+ insert(:collection, project: source, name: "to-create")
+ insert(:collection, project: target, name: "to-delete")
+
+ # Inject a failure inside the transaction by trying to insert a
+ # collection that will violate the unique constraint after we've done
+ # the work. We simulate this by wrapping the call in a parent
+ # transaction and forcing a rollback.
+ result =
+ Lightning.Repo.transaction(fn ->
+ {:ok, _summary} =
+ Sandboxes.sync_collections(source, target)
+
+ Lightning.Repo.rollback(:simulated_failure)
+ end)
+
+ assert result == {:error, :simulated_failure}
+
+ # Target state must be unchanged
+ target_names =
+ target
+ |> Lightning.Collections.list_project_collections()
+ |> Enum.map(& &1.name)
+
+ assert target_names == ["to-delete"]
+ end
+ end
+
+ describe "merge/4" do
+ test "imports the merge document and syncs collections" do
+ actor = insert(:user)
+ parent = insert(:project)
+ ensure_member!(parent, actor, :owner)
+
+ insert(:simple_workflow, project: parent)
+
+ sandbox =
+ insert(:project,
+ parent: parent,
+ project_users: [%{user: actor, role: :owner}]
+ )
+
+ insert(:simple_workflow, project: sandbox)
+
+ insert(:collection, project: sandbox, name: "new-col")
+
+ assert {:ok, _updated} = Sandboxes.merge(sandbox, parent, actor)
+
+ parent_names =
+ parent
+ |> Lightning.Collections.list_project_collections()
+ |> Enum.map(& &1.name)
+
+ assert "new-col" in parent_names
+ end
+
+ test "defaults opts to empty map" do
+ actor = insert(:user)
+ parent = insert(:project)
+ ensure_member!(parent, actor, :owner)
+
+ insert(:simple_workflow, project: parent)
+
+ sandbox =
+ insert(:project,
+ parent: parent,
+ project_users: [%{user: actor, role: :owner}]
+ )
+
+ insert(:simple_workflow, project: sandbox)
+
+ # Calling with 3 args exercises the \\ %{} default
+ assert {:ok, _updated} = Sandboxes.merge(sandbox, parent, actor)
+ end
+ end
+
describe "keychains" do
test "clones only used keychains and rewires jobs to cloned keychains" do
%{
diff --git a/test/lightning_web/collections_controller_test.exs b/test/lightning_web/collections_controller_test.exs
index 4451f58d2fe..039190dfb07 100644
--- a/test/lightning_web/collections_controller_test.exs
+++ b/test/lightning_web/collections_controller_test.exs
@@ -996,7 +996,7 @@ defmodule LightningWeb.API.CollectionsControllerTest do
end
end
- describe "GET /download/collections/:name" do
+ describe "GET /download/collections/:project_id/:name" do
setup :register_and_log_in_user
setup :create_project_for_current_user
@@ -1022,7 +1022,8 @@ defmodule LightningWeb.API.CollectionsControllerTest do
value: ~s({"name": "Bob"})
)
- response = get(conn, ~p"/download/collections/#{collection.name}")
+ response =
+ get(conn, ~p"/download/collections/#{project.id}/#{collection.name}")
assert response.status == 200
@@ -1049,7 +1050,8 @@ defmodule LightningWeb.API.CollectionsControllerTest do
} do
collection = insert(:collection, project: project)
- response = get(conn, ~p"/download/collections/#{collection.name}")
+ response =
+ get(conn, ~p"/download/collections/#{project.id}/#{collection.name}")
assert response.status == 200
assert Jason.decode!(response.resp_body) == []
@@ -1059,18 +1061,433 @@ defmodule LightningWeb.API.CollectionsControllerTest do
other_project = insert(:project)
collection = insert(:collection, project: other_project)
- response = get(conn, ~p"/download/collections/#{collection.name}")
+ response =
+ get(
+ conn,
+ ~p"/download/collections/#{other_project.id}/#{collection.name}"
+ )
assert response.status == 401
end
- test "returns 404 for non-existent collection", %{conn: conn} do
- response = get(conn, ~p"/download/collections/non-existent")
+ test "returns 404 for non-existent collection", %{
+ conn: conn,
+ project: project
+ } do
+ response = get(conn, ~p"/download/collections/#{project.id}/non-existent")
assert response.status == 404
end
end
+ describe "api version header" do
+ setup %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+ token = Lightning.Accounts.generate_api_token(user)
+ conn = assign_bearer(conn, token)
+ {:ok, conn: conn, project: project, collection: collection}
+ end
+
+ test "returns 400 for an unsupported version", %{
+ conn: conn,
+ collection: collection
+ } do
+ conn =
+ conn
+ |> put_req_header("x-api-version", "99")
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 400)["error"] =~ "Unsupported API version"
+ end
+
+ test "returns 400 for a garbage version value", %{
+ conn: conn,
+ collection: collection
+ } do
+ conn =
+ conn
+ |> put_req_header("x-api-version", "not-a-version")
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 400)["error"] =~ "Unsupported API version"
+ end
+
+ test "treats no header as v1", %{conn: conn, collection: collection} do
+ conn = get(conn, ~p"/collections/#{collection.name}")
+ assert json_response(conn, 200)["items"] == []
+ end
+
+ test "treats an explicit v1 header as v1", %{
+ conn: conn,
+ collection: collection
+ } do
+ conn =
+ conn
+ |> put_req_header("x-api-version", "1")
+ |> get(~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 200)["items"] == []
+ end
+
+ test "returns 400 when multiple x-api-version headers are sent", %{
+ conn: conn,
+ collection: collection
+ } do
+ # Bypass Plug.Conn.put_req_header/3 (which replaces) to simulate a
+ # client that sends the header twice.
+ conn =
+ update_in(conn.req_headers, fn headers ->
+ headers ++ [{"x-api-version", "1"}, {"x-api-version", "2"}]
+ end)
+
+ conn = get(conn, ~p"/collections/#{collection.name}")
+
+ assert json_response(conn, 400)["error"] =~ "Unsupported API version"
+ end
+ end
+
+ describe "unsupported paths and methods" do
+ setup %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+ token = Lightning.Accounts.generate_api_token(user)
+ conn = assign_bearer(conn, token)
+ {:ok, conn: conn, project: project, collection: collection}
+ end
+
+ test "v1 returns 404 for unsupported HTTP methods", %{
+ conn: conn,
+ collection: collection
+ } do
+ conn = patch(conn, ~p"/collections/#{collection.name}")
+ assert json_response(conn, 404)["error"] == "Not Found"
+ end
+
+ test "v1 returns 404 for unknown path shapes", %{conn: conn} do
+ conn = get(conn, "/collections/a/b/c/d")
+ assert json_response(conn, 404)["error"] == "Not Found"
+ end
+
+ test "v2 returns 404 for unsupported HTTP methods", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn =
+ conn
+ |> put_req_header("x-api-version", "2")
+ |> patch(~p"/collections/#{project.id}/#{collection.name}")
+
+ assert json_response(conn, 404)["error"] == "Not Found"
+ end
+
+ test "v2 returns 404 for unknown path shapes", %{conn: conn} do
+ conn =
+ conn
+ |> put_req_header("x-api-version", "2")
+ |> get("/collections/a/b/c/d/e")
+
+ assert json_response(conn, 404)["error"] == "Not Found"
+ end
+ end
+
+ describe "malformed request bodies" do
+ setup %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+ collection = insert(:collection, project: project)
+ token = Lightning.Accounts.generate_api_token(user)
+ conn = assign_bearer(conn, token)
+ {:ok, conn: conn, project: project, collection: collection}
+ end
+
+ test "PUT with no value returns 422", %{conn: conn, collection: collection} do
+ conn = put(conn, ~p"/collections/#{collection.name}/some-key", %{})
+
+ assert json_response(conn, 422)["error"] =~ "Missing required field: value"
+ end
+
+ test "POST with no items returns 422", %{conn: conn, collection: collection} do
+ conn = post(conn, ~p"/collections/#{collection.name}", %{})
+
+ assert json_response(conn, 422)["error"] =~ "Missing required field: items"
+ end
+ end
+
+ describe "v1 conflict — same collection name in multiple projects" do
+ setup %{conn: conn} do
+ user = insert(:user)
+ project_1 = insert(:project, project_users: [%{user: user}])
+ project_2 = insert(:project, project_users: [%{user: user}])
+ name = "shared-collection"
+ insert(:collection, name: name, project: project_1)
+ insert(:collection, name: name, project: project_2)
+ token = Lightning.Accounts.generate_api_token(user)
+ conn = assign_bearer(conn, token)
+ {:ok, conn: conn, name: name}
+ end
+
+ test "GET /:name returns 409", %{conn: conn, name: name} do
+ conn = get(conn, ~p"/collections/#{name}")
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+
+ test "GET /:name/:key returns 409", %{conn: conn, name: name} do
+ conn = get(conn, ~p"/collections/#{name}/some-key")
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+
+ test "PUT /:name/:key returns 409", %{conn: conn, name: name} do
+ conn = put(conn, ~p"/collections/#{name}/some-key", value: "val")
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+
+ test "POST /:name returns 409", %{conn: conn, name: name} do
+ conn = post(conn, ~p"/collections/#{name}", items: [])
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+
+ test "DELETE /:name/:key returns 409", %{conn: conn, name: name} do
+ conn = delete(conn, ~p"/collections/#{name}/some-key")
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+
+ test "DELETE /:name returns 409", %{conn: conn, name: name} do
+ conn = delete(conn, ~p"/collections/#{name}")
+ assert json_response(conn, 409)["error"] =~ "Use API v2"
+ end
+ end
+
+ describe "v2 API" do
+ setup %{conn: conn} do
+ user = insert(:user)
+ project = insert(:project, project_users: [%{user: user}])
+
+ collection =
+ insert(:collection,
+ project: project,
+ items: [%{key: "foo", value: "bar"}]
+ )
+
+ token = Lightning.Accounts.generate_api_token(user)
+ conn = assign_bearer(conn, token) |> put_req_header("x-api-version", "2")
+ {:ok, conn: conn, project: project, collection: collection}
+ end
+
+ test "GET /:project_id/:name/:key returns the item", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ item = hd(collection.items)
+
+ conn = get(conn, ~p"/collections/#{project.id}/#{collection.name}/foo")
+
+ assert json_response(conn, 200) == %{
+ "key" => item.key,
+ "value" => item.value,
+ "created" => DateTime.to_iso8601(item.inserted_at),
+ "updated" => DateTime.to_iso8601(item.updated_at)
+ }
+ end
+
+ test "GET /:project_id/:name/:key returns 204 when item not found", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn =
+ get(conn, ~p"/collections/#{project.id}/#{collection.name}/nonexistent")
+
+ assert response(conn, 204) == ""
+ end
+
+ test "GET /:project_id/:name/:key returns 404 when collection not found", %{
+ conn: conn,
+ project: project
+ } do
+ conn = get(conn, ~p"/collections/#{project.id}/nonexistent-collection/foo")
+ assert json_response(conn, 404)
+ end
+
+ test "GET /:project_id/:name streams items", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn = get(conn, ~p"/collections/#{project.id}/#{collection.name}")
+ body = json_response(conn, 200)
+ assert length(body["items"]) == 1
+ assert hd(body["items"])["key"] == "foo"
+ end
+
+ test "PUT /:project_id/:name/:key upserts an item", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn =
+ put(conn, ~p"/collections/#{project.id}/#{collection.name}/new-key",
+ value: "new-val"
+ )
+
+ assert json_response(conn, 200) == %{"upserted" => 1, "error" => nil}
+ end
+
+ test "POST /:project_id/:name upserts multiple items", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn =
+ post(conn, ~p"/collections/#{project.id}/#{collection.name}",
+ items: [%{key: "a", value: "1"}, %{key: "b", value: "2"}]
+ )
+
+ assert json_response(conn, 200) == %{"upserted" => 2, "error" => nil}
+ end
+
+ test "DELETE /:project_id/:name/:key deletes an item", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn = delete(conn, ~p"/collections/#{project.id}/#{collection.name}/foo")
+
+ assert json_response(conn, 200) == %{
+ "key" => "foo",
+ "deleted" => 1,
+ "error" => nil
+ }
+ end
+
+ test "DELETE /:project_id/:name deletes all items", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ conn = delete(conn, ~p"/collections/#{project.id}/#{collection.name}")
+ assert json_response(conn, 200)["deleted"] == 1
+ end
+
+ test "returns 401 when user does not have access to the project", %{
+ conn: conn
+ } do
+ other_project = insert(:project)
+ other_collection = insert(:collection, project: other_project)
+
+ conn =
+ get(
+ conn,
+ ~p"/collections/#{other_project.id}/#{other_collection.name}/foo"
+ )
+
+ assert json_response(conn, 401)
+ end
+
+ test "resolves correctly by project_id when same name exists in multiple projects",
+ %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ other_project = insert(:project, project_users: [])
+ insert(:collection, name: collection.name, project: other_project)
+
+ # v2 should resolve to the correct project's collection
+ conn = get(conn, ~p"/collections/#{project.id}/#{collection.name}/foo")
+ assert json_response(conn, 200)["key"] == "foo"
+ end
+
+ test "with a valid run token, can access own project's collection", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ workflow = insert(:simple_workflow, project: project)
+
+ workorder =
+ insert(:workorder, workflow: workflow, dataclip: insert(:dataclip))
+
+ run =
+ insert(:run,
+ work_order: workorder,
+ dataclip: workorder.dataclip,
+ starting_trigger: hd(workflow.triggers)
+ )
+
+ token = Lightning.Workers.generate_run_token(run)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{project.id}/#{collection.name}/foo")
+
+ assert json_response(conn, 200)["key"] == "foo"
+ end
+
+ test "with a valid run token, can write to own project's collection", %{
+ conn: conn,
+ project: project,
+ collection: collection
+ } do
+ workflow = insert(:simple_workflow, project: project)
+
+ workorder =
+ insert(:workorder, workflow: workflow, dataclip: insert(:dataclip))
+
+ run =
+ insert(:run,
+ work_order: workorder,
+ dataclip: workorder.dataclip,
+ starting_trigger: hd(workflow.triggers)
+ )
+
+ token = Lightning.Workers.generate_run_token(run)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> put(
+ ~p"/collections/#{project.id}/#{collection.name}/new-key",
+ value: "new-val"
+ )
+
+ assert json_response(conn, 200) == %{"upserted" => 1, "error" => nil}
+ end
+
+ test "with a run token, cannot access a different project's collection", %{
+ conn: conn
+ } do
+ other_project = insert(:project)
+ other_collection = insert(:collection, project: other_project)
+
+ workflow = insert(:simple_workflow)
+
+ workorder =
+ insert(:workorder, workflow: workflow, dataclip: insert(:dataclip))
+
+ run =
+ insert(:run,
+ work_order: workorder,
+ dataclip: workorder.dataclip,
+ starting_trigger: hd(workflow.triggers)
+ )
+
+ token = Lightning.Workers.generate_run_token(run)
+
+ conn =
+ conn
+ |> assign_bearer(token)
+ |> get(~p"/collections/#{other_project.id}/#{other_collection.name}/foo")
+
+ assert json_response(conn, 401)
+ end
+ end
+
defp encode_decode(item) do
item
|> Jason.encode!()
diff --git a/test/lightning_web/live/sandbox_live/index_test.exs b/test/lightning_web/live/sandbox_live/index_test.exs
index 75a1e038430..2163e501ae3 100644
--- a/test/lightning_web/live/sandbox_live/index_test.exs
+++ b/test/lightning_web/live/sandbox_live/index_test.exs
@@ -1482,6 +1482,154 @@ defmodule LightningWeb.SandboxLive.IndexTest do
end
end
+ describe "collection sync on merge" do
+ setup :register_and_log_in_user
+
+ setup %{user: user} do
+ root =
+ insert(:project,
+ name: "root",
+ project_users: [%{user: user, role: :owner}]
+ )
+
+ sandbox =
+ insert(:project,
+ name: "sandbox",
+ parent: root,
+ project_users: [%{user: user, role: :owner}]
+ )
+
+ {:ok, root: root, sandbox: sandbox}
+ end
+
+ defp mock_provisioner_ok(target) do
+ Mimic.expect(Lightning.Projects.MergeProjects, :merge_project, fn _src,
+ _tgt,
+ _opts ->
+ "merged_yaml"
+ end)
+
+ Mimic.expect(Lightning.Projects.Provisioner, :import_document, fn _tgt,
+ _actor,
+ _yaml,
+ _opts ->
+ {:ok, target}
+ end)
+
+ Mimic.expect(Lightning.Projects, :delete_sandbox, fn source, _actor ->
+ {:ok, source}
+ end)
+ end
+
+ test "new collections in sandbox are created in parent on merge", %{
+ conn: conn,
+ root: root,
+ sandbox: sandbox
+ } do
+ insert(:collection, project: sandbox, name: "new-col")
+
+ {:ok, view, _} = live(conn, ~p"/projects/#{root.id}/sandboxes")
+ mock_provisioner_ok(root)
+
+ Mimic.allow(Lightning.Projects.MergeProjects, self(), view.pid)
+ Mimic.allow(Lightning.Projects.Provisioner, self(), view.pid)
+ Mimic.allow(Lightning.Projects, self(), view.pid)
+
+ view
+ |> element("#branch-rewire-sandbox-#{sandbox.id} button")
+ |> render_click()
+
+ view |> form("#merge-sandbox-modal form") |> render_submit()
+
+ parent_names =
+ Lightning.Collections.list_project_collections(root)
+ |> Enum.map(& &1.name)
+
+ assert "new-col" in parent_names
+ end
+
+ test "collections deleted from sandbox are removed from parent on merge", %{
+ conn: conn,
+ root: root,
+ sandbox: sandbox
+ } do
+ # Parent has a collection, sandbox does not
+ insert(:collection, project: root, name: "to-delete")
+
+ {:ok, view, _} = live(conn, ~p"/projects/#{root.id}/sandboxes")
+ mock_provisioner_ok(root)
+
+ Mimic.allow(Lightning.Projects.MergeProjects, self(), view.pid)
+ Mimic.allow(Lightning.Projects.Provisioner, self(), view.pid)
+ Mimic.allow(Lightning.Projects, self(), view.pid)
+
+ view
+ |> element("#branch-rewire-sandbox-#{sandbox.id} button")
+ |> render_click()
+
+ view |> form("#merge-sandbox-modal form") |> render_submit()
+
+ parent_names =
+ Lightning.Collections.list_project_collections(root)
+ |> Enum.map(& &1.name)
+
+ refute "to-delete" in parent_names
+ end
+
+ test "collections present in both are unchanged after merge", %{
+ conn: conn,
+ root: root,
+ sandbox: sandbox
+ } do
+ insert(:collection, project: root, name: "shared")
+ insert(:collection, project: sandbox, name: "shared")
+
+ {:ok, view, _} = live(conn, ~p"/projects/#{root.id}/sandboxes")
+ mock_provisioner_ok(root)
+
+ Mimic.allow(Lightning.Projects.MergeProjects, self(), view.pid)
+ Mimic.allow(Lightning.Projects.Provisioner, self(), view.pid)
+ Mimic.allow(Lightning.Projects, self(), view.pid)
+
+ view
+ |> element("#branch-rewire-sandbox-#{sandbox.id} button")
+ |> render_click()
+
+ view |> form("#merge-sandbox-modal form") |> render_submit()
+
+ parent_collections = Lightning.Collections.list_project_collections(root)
+ assert length(parent_collections) == 1
+ assert hd(parent_collections).name == "shared"
+ end
+
+ test "merge fails with flash error when collection sync fails", %{
+ conn: conn,
+ root: root,
+ sandbox: sandbox
+ } do
+ {:ok, view, _} = live(conn, ~p"/projects/#{root.id}/sandboxes")
+
+ Mimic.expect(Lightning.Projects.Sandboxes, :merge, fn _src,
+ _tgt,
+ _actor,
+ _opts ->
+ {:error, "Failed to sync collections: :boom"}
+ end)
+
+ Mimic.allow(Lightning.Projects.Sandboxes, self(), view.pid)
+
+ view
+ |> element("#branch-rewire-sandbox-#{sandbox.id} button")
+ |> render_click()
+
+ view |> form("#merge-sandbox-modal form") |> render_submit()
+
+ html = render(view)
+ assert html =~ "Failed to sync collections"
+ refute has_element?(view, "#merge-sandbox-modal")
+ end
+ end
+
describe "Merge modal authorization" do
setup :register_and_log_in_user