diff --git a/.gitignore b/.gitignore
index f8ff37ac20..f32ebcacfa 100644
--- a/.gitignore
+++ b/.gitignore
@@ -81,6 +81,7 @@ priv/openfn
.dev.env
.dev.override.env
.test.override.env
+.envrc
worktrees
.docker-cache
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f19e179e67..0bf7bfd791 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -39,6 +39,15 @@ and this project adheres to
- Ability to filter work orders and runs via REST API by UUIDs or status; added
example curl requests to REST API docs.
[#4552](https://github.com/OpenFn/lightning/issues/4552)
+- Channel request detail page, reached by clicking a row in the channel history
+ table. Shows a client / destination / timing summary, a nested timing
+ visualization with per-phase breakdown and TTFB marker, foldable request and
+ response headers and body, and humanized transport and credential errors.
+ Captures richer request metadata (query string, body sizes, per-direction
+ durations, Finch phase timings) and attributes both the matched client webhook
+ auth method and the destination project credential on every proxied request.
+ Feature-gated behind experimental features.
+ [#4541](https://github.com/OpenFn/lightning/issues/4541)
### Changed
@@ -49,6 +58,10 @@ and this project adheres to
- Worker plan payload now includes `project_id` so workers can scope callbacks
(e.g. the collections API) to the project that owns the run.
- bumped local worker to 1.24.0
+- Channel timing fields are now stored in microseconds (previously milliseconds)
+ and request and response headers are stored as native jsonb on
+ `channel_events`. Handler adapted to Philter 0.3.0 timing map.
+ [#4541](https://github.com/OpenFn/lightning/issues/4541)
### Fixed
diff --git a/benchmarking/channels/mock_destination.exs b/benchmarking/channels/mock_destination.exs
index fc2d7b8628..3ca4d2ba35 100644
--- a/benchmarking/channels/mock_destination.exs
+++ b/benchmarking/channels/mock_destination.exs
@@ -143,21 +143,33 @@ defmodule MockDestination.Body do
"""
def generate(body_size) when body_size <= 1024 do
+ # Build the envelope once with an empty padding to measure overhead.
+ envelope =
+ Jason.encode!(%{
+ ok: true,
+ server: "mock_destination",
+ timestamp: DateTime.to_iso8601(DateTime.utc_now()),
+ padding: ""
+ })
+
+ overhead = byte_size(envelope)
+ padding_len = max(body_size - overhead, 0)
+
json =
Jason.encode!(%{
ok: true,
server: "mock_destination",
timestamp: DateTime.to_iso8601(DateTime.utc_now()),
- padding: String.duplicate("x", max(body_size - 80, 0))
+ padding: String.duplicate("x", padding_len)
})
- # Trim or pad to reach the target size exactly.
- byte_size = byte_size(json)
+ # Fine-tune to the exact target size.
+ actual = byte_size(json)
cond do
- byte_size == body_size -> json
- byte_size > body_size -> binary_part(json, 0, body_size)
- true -> json <> String.duplicate(" ", body_size - byte_size)
+ actual == body_size -> json
+ actual > body_size -> binary_part(json, 0, body_size)
+ true -> json <> String.duplicate(" ", body_size - actual)
end
end
diff --git a/lib/lightning/channels.ex b/lib/lightning/channels.ex
index 904dc1597b..ed1937ebe7 100644
--- a/lib/lightning/channels.ex
+++ b/lib/lightning/channels.ex
@@ -194,8 +194,11 @@ defmodule Lightning.Channels do
Multi.new()
|> Multi.insert(:channel, changeset)
- |> Multi.insert(:audit, fn %{channel: channel} ->
- Audit.event("created", channel.id, actor, changeset)
+ |> Multi.run(:audit, fn _repo, %{channel: channel} ->
+ case Audit.event("created", channel.id, actor, changeset) do
+ :no_changes -> {:ok, :no_changes}
+ %Ecto.Changeset{} = audit_cs -> Repo.insert(audit_cs)
+ end
end)
|> Audit.audit_auth_method_changes(changeset, actor)
|> Repo.transaction()
@@ -215,8 +218,11 @@ defmodule Lightning.Channels do
Multi.new()
|> Multi.update(:channel, changeset, stale_error_field: :lock_version)
- |> Multi.insert(:audit, fn %{channel: updated} ->
- Audit.event("updated", updated.id, actor, changeset)
+ |> Multi.run(:audit, fn _repo, %{channel: updated} ->
+ case Audit.event("updated", updated.id, actor, changeset) do
+ :no_changes -> {:ok, :no_changes}
+ %Ecto.Changeset{} = audit_cs -> Repo.insert(audit_cs)
+ end
end)
|> Audit.audit_auth_method_changes(changeset, actor)
|> Repo.transaction()
@@ -441,4 +447,38 @@ defmodule Lightning.Channels do
{total, nil}
end
+
+ @doc """
+ Returns a channel request with preloads, scoped to the given project.
+
+ Returns `nil` if the request doesn't exist, belongs to a different project,
+ or the ID is not a valid UUID.
+
+ Preloads: `channel_events`, `channel`, `channel_snapshot`,
+ `client_webhook_auth_method`, and `destination_credential` (with its
+ `credential` for display).
+ """
+ @spec get_channel_request_for_project(Ecto.UUID.t(), String.t()) ::
+ ChannelRequest.t() | nil
+ def get_channel_request_for_project(project_id, request_id) do
+ case Ecto.UUID.cast(request_id) do
+ {:ok, uuid} ->
+ from(cr in ChannelRequest,
+ join: c in Channel,
+ on: cr.channel_id == c.id,
+ where: cr.id == ^uuid and c.project_id == ^project_id,
+ preload: [
+ :channel_events,
+ :channel,
+ :channel_snapshot,
+ :client_webhook_auth_method,
+ destination_credential: :credential
+ ]
+ )
+ |> Repo.one()
+
+ :error ->
+ nil
+ end
+ end
end
diff --git a/lib/lightning/channels/channel_event.ex b/lib/lightning/channels/channel_event.ex
index 16db6d5104..0d583a4665 100644
--- a/lib/lightning/channels/channel_event.ex
+++ b/lib/lightning/channels/channel_event.ex
@@ -23,15 +23,23 @@ defmodule Lightning.Channels.ChannelEvent do
type: :destination_response | :error,
request_method: String.t() | nil,
request_path: String.t() | nil,
- request_headers: String.t() | nil,
+ request_query_string: String.t() | nil,
+ request_headers: list() | nil,
request_body_preview: String.t() | nil,
request_body_hash: String.t() | nil,
+ request_body_size: integer() | nil,
response_status: integer() | nil,
- response_headers: String.t() | nil,
+ response_headers: list() | nil,
response_body_preview: String.t() | nil,
response_body_hash: String.t() | nil,
- latency_ms: integer() | nil,
- ttfb_ms: integer() | nil,
+ response_body_size: integer() | nil,
+ latency_us: integer() | nil,
+ ttfb_us: integer() | nil,
+ request_send_us: integer() | nil,
+ response_duration_us: integer() | nil,
+ queue_us: integer() | nil,
+ connect_us: integer() | nil,
+ reused_connection: boolean() | nil,
error_message: String.t() | nil,
inserted_at: DateTime.t()
}
@@ -41,17 +49,25 @@ defmodule Lightning.Channels.ChannelEvent do
field :request_method, :string
field :request_path, :string
- field :request_headers, :string
+ field :request_query_string, :string
+ field :request_headers, {:array, {:array, :string}}
field :request_body_preview, :string
field :request_body_hash, :string
+ field :request_body_size, :integer
field :response_status, :integer
- field :response_headers, :string
+ field :response_headers, {:array, {:array, :string}}
field :response_body_preview, :string
field :response_body_hash, :string
+ field :response_body_size, :integer
- field :latency_ms, :integer
- field :ttfb_ms, :integer
+ field :latency_us, :integer
+ field :ttfb_us, :integer
+ field :request_send_us, :integer
+ field :response_duration_us, :integer
+ field :queue_us, :integer
+ field :connect_us, :integer
+ field :reused_connection, :boolean
field :error_message, :string
belongs_to :channel_request, ChannelRequest
@@ -61,22 +77,34 @@ defmodule Lightning.Channels.ChannelEvent do
def changeset(event, attrs) do
event
- |> cast(attrs, [
- :channel_request_id,
- :type,
- :request_method,
- :request_path,
- :request_headers,
- :request_body_preview,
- :request_body_hash,
- :response_status,
- :response_headers,
- :response_body_preview,
- :response_body_hash,
- :latency_ms,
- :ttfb_ms,
- :error_message
- ])
+ |> cast(
+ attrs,
+ [
+ :channel_request_id,
+ :type,
+ :request_method,
+ :request_path,
+ :request_query_string,
+ :request_headers,
+ :request_body_preview,
+ :request_body_hash,
+ :request_body_size,
+ :response_status,
+ :response_headers,
+ :response_body_preview,
+ :response_body_hash,
+ :response_body_size,
+ :latency_us,
+ :ttfb_us,
+ :request_send_us,
+ :response_duration_us,
+ :queue_us,
+ :connect_us,
+ :reused_connection,
+ :error_message
+ ],
+ empty_values: []
+ )
|> validate_required([:channel_request_id, :type])
|> assoc_constraint(:channel_request)
end
diff --git a/lib/lightning/channels/channel_request.ex b/lib/lightning/channels/channel_request.ex
index 97cf998971..ec84fe7670 100644
--- a/lib/lightning/channels/channel_request.ex
+++ b/lib/lightning/channels/channel_request.ex
@@ -8,6 +8,8 @@ defmodule Lightning.Channels.ChannelRequest do
alias Lightning.Channels.Channel
alias Lightning.Channels.ChannelEvent
alias Lightning.Channels.ChannelSnapshot
+ alias Lightning.Projects.ProjectCredential
+ alias Lightning.Workflows.WebhookAuthMethod
@type t :: %__MODULE__{
id: Ecto.UUID.t(),
@@ -15,6 +17,9 @@ defmodule Lightning.Channels.ChannelRequest do
channel_snapshot_id: Ecto.UUID.t(),
request_id: String.t(),
client_identity: String.t() | nil,
+ client_webhook_auth_method_id: Ecto.UUID.t() | nil,
+ client_auth_type: String.t() | nil,
+ destination_credential_id: Ecto.UUID.t() | nil,
state: :pending | :success | :failed | :timeout | :error,
started_at: DateTime.t(),
completed_at: DateTime.t() | nil
@@ -23,6 +28,7 @@ defmodule Lightning.Channels.ChannelRequest do
schema "channel_requests" do
field :request_id, :string
field :client_identity, :string
+ field :client_auth_type, :string
field :state, Ecto.Enum,
values: [:pending, :success, :failed, :timeout, :error]
@@ -32,6 +38,8 @@ defmodule Lightning.Channels.ChannelRequest do
belongs_to :channel, Channel
belongs_to :channel_snapshot, ChannelSnapshot
+ belongs_to :client_webhook_auth_method, WebhookAuthMethod
+ belongs_to :destination_credential, ProjectCredential
has_many :channel_events, ChannelEvent
end
@@ -43,6 +51,9 @@ defmodule Lightning.Channels.ChannelRequest do
:channel_snapshot_id,
:request_id,
:client_identity,
+ :client_webhook_auth_method_id,
+ :client_auth_type,
+ :destination_credential_id,
:state,
:started_at,
:completed_at
diff --git a/lib/lightning/channels/handler.ex b/lib/lightning/channels/handler.ex
index 4d62f01af2..c7eb8a220f 100644
--- a/lib/lightning/channels/handler.ex
+++ b/lib/lightning/channels/handler.ex
@@ -60,6 +60,10 @@ defmodule Lightning.Channels.Handler do
channel_snapshot_id: state.snapshot.id,
request_id: state.request_id,
client_identity: state.client_identity,
+ client_webhook_auth_method_id:
+ Map.get(state, :client_webhook_auth_method_id),
+ client_auth_type: Map.get(state, :client_auth_type),
+ destination_credential_id: Map.get(state, :destination_credential_id),
state: :pending,
started_at: state.started_at
}
@@ -105,15 +109,23 @@ defmodule Lightning.Channels.Handler do
type: event_type,
request_method: state.request_method,
request_path: state.request_path,
+ request_query_string: Map.get(state, :query_string),
request_headers: encode_headers(state.request_headers),
request_body_preview: get_in(result, [:request_observation, :preview]),
request_body_hash: get_in(result, [:request_observation, :hash]),
+ request_body_size: get_in(result, [:request_observation, :size]),
response_status: result.status,
response_headers: encode_headers(Map.get(state, :response_headers)),
response_body_preview: get_in(result, [:response_observation, :preview]),
response_body_hash: get_in(result, [:response_observation, :hash]),
- latency_ms: div(result.duration_us, 1000),
- ttfb_ms: state |> Map.get(:ttfb_us) |> maybe_div(1000),
+ response_body_size: get_in(result, [:response_observation, :size]),
+ latency_us: result.timing.total_us,
+ ttfb_us: Map.get(state, :ttfb_us),
+ request_send_us: get_in(result, [:timing, :send_us]),
+ response_duration_us: get_in(result, [:timing, :recv_us]),
+ queue_us: get_in(result, [:timing, :queue_us]),
+ connect_us: get_in(result, [:timing, :connect_us]),
+ reused_connection: get_in(result, [:timing, :reused_connection]),
error_message: if(result.error, do: classify_error(result.error))
}
@@ -175,12 +187,11 @@ defmodule Lightning.Channels.Handler do
defp encode_headers(nil), do: nil
- # Encodes as array-of-pairs rather than a map because HTTP allows
+ # Returns as array-of-pairs rather than a map because HTTP allows
# duplicate header keys (e.g. multiple Set-Cookie headers).
+ # Stored as native jsonb in the database.
defp encode_headers(headers) do
- headers
- |> Enum.map(fn {k, v} -> [k, v] end)
- |> Jason.encode!()
+ Enum.map(headers, fn {k, v} -> [k, v] end)
end
defp classify_error({:timeout, :connect_timeout}), do: "connect_timeout"
@@ -192,7 +203,4 @@ defmodule Lightning.Channels.Handler do
do: Atom.to_string(reason)
defp classify_error(error), do: inspect(error)
-
- defp maybe_div(nil, _), do: nil
- defp maybe_div(us, divisor), do: div(us, divisor)
end
diff --git a/lib/lightning_web/live/channel_live/form_component.ex b/lib/lightning_web/live/channel_live/form_component.ex
index 445756f7f0..7042271f8c 100644
--- a/lib/lightning_web/live/channel_live/form_component.ex
+++ b/lib/lightning_web/live/channel_live/form_component.ex
@@ -132,6 +132,8 @@ defmodule LightningWeb.ChannelLive.FormComponent do
<.input field={f[:name]} label="Name" type="text" phx-debounce="300" />
+ <.input field={f[:enabled]} label="Enabled" type="toggle" />
+
<.input
field={f[:destination_url]}
@@ -176,8 +178,6 @@ defmodule LightningWeb.ChannelLive.FormComponent do
- <.input field={f[:enabled]} label="Enabled" type="toggle" />
-
diff --git a/lib/lightning_web/live/channel_request_live/components.ex b/lib/lightning_web/live/channel_request_live/components.ex
new file mode 100644
index 0000000000..5036accc25
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/components.ex
@@ -0,0 +1,368 @@
+defmodule LightningWeb.ChannelRequestLive.Components do
+ @moduledoc """
+ Reusable function components for the channel request detail page.
+
+ Provides layout primitives (disclosure sections), HTTP display atoms
+ (method badges, status codes), and content viewers (headers, body)
+ used across multiple sections.
+ """
+
+ use LightningWeb, :component
+
+ import LightningWeb.RunLive.Components, only: [channel_state_pill: 1]
+
+ alias LightningWeb.ChannelRequestLive.Helpers
+ alias Phoenix.LiveView.JS
+
+ # --- Layout primitives ---
+
+ def disclosure_section(assigns) do
+ assigns =
+ assigns
+ |> assign_new(:title_right, fn -> [] end)
+ |> assign_new(:padded, fn -> true end)
+
+ ~H"""
+
+
+
+ {render_slot(@inner_block)}
+
+
+ """
+ end
+
+ def sub_section(assigns) do
+ assigns = assign_new(assigns, :title_right, fn -> [] end)
+
+ ~H"""
+
+
+
+ {render_slot(@inner_block)}
+
+
+ """
+ end
+
+ # --- HTTP display atoms ---
+
+ def method_badge(assigns) do
+ color_class =
+ case assigns.method do
+ "GET" -> "bg-blue-100 text-blue-800"
+ "POST" -> "bg-green-100 text-green-800"
+ "PUT" -> "bg-amber-100 text-amber-800"
+ "PATCH" -> "bg-amber-100 text-amber-800"
+ "DELETE" -> "bg-red-100 text-red-800"
+ _ -> "bg-secondary-100 text-secondary-800"
+ end
+
+ assigns = assign(assigns, color_class: color_class)
+
+ ~H"""
+
+ {@method || "—"}
+
+ """
+ end
+
+ def request_path_display(assigns) do
+ ~H"""
+
+ {@event && @event.request_path}
+
+ ?{@event.request_query_string}
+
+
+ """
+ end
+
+ def status_code_display(assigns) do
+ color_class =
+ cond do
+ is_nil(assigns.status) -> "text-secondary-400"
+ assigns.status >= 500 -> "text-red-700 bg-red-50"
+ assigns.status >= 400 -> "text-amber-700 bg-amber-50"
+ assigns.status >= 300 -> "text-blue-700 bg-blue-50"
+ assigns.status >= 200 -> "text-green-700 bg-green-50"
+ true -> "text-secondary-400"
+ end
+
+ assigns = assign(assigns, color_class: color_class)
+
+ ~H"""
+
+ {if @status, do: to_string(@status), else: "—"}
+
+ """
+ end
+
+ def status_code_badge(assigns) do
+ color_class =
+ case assigns.status do
+ s when s >= 200 and s < 300 -> "bg-green-100 text-green-700"
+ s when s >= 300 and s < 400 -> "bg-blue-100 text-blue-700"
+ s when s >= 400 and s < 500 -> "bg-amber-100 text-amber-700"
+ s when s >= 500 -> "bg-red-100 text-red-700"
+ _ -> "bg-secondary-100 text-secondary-700"
+ end
+
+ assigns = assign(assigns, color_class: color_class)
+
+ ~H"""
+
+ {@status}
+
+ """
+ end
+
+ def state_pill_with_tooltip(assigns) do
+ ~H"""
+ <%= if @state == :timeout and @error_message do %>
+
+ <.channel_state_pill state={@state} />
+
+ <% else %>
+ <.channel_state_pill state={@state} />
+ <% end %>
+ """
+ end
+
+ def response_empty(assigns) do
+ {icon, label} =
+ case assigns.type do
+ :transport ->
+ {"hero-exclamation-triangle", "No response received"}
+
+ :credential ->
+ {"hero-lock-closed", "Request not sent — credential error"}
+ end
+
+ assigns = assign(assigns, icon: icon, label: label)
+
+ ~H"""
+
+
+ <.icon name={@icon} class="h-8 w-8 mb-3 text-secondary-400" />
+
{@label}
+
{@human_message}
+
+ {@error_code}
+
+
+
+ """
+ end
+
+ # --- Content display ---
+
+ def headers_table(assigns) do
+ ~H"""
+
+
+
+ |
+ {name}
+ |
+
+ {value}
+ |
+
+
+
+ """
+ end
+
+ def body_viewer(assigns) do
+ content_type = Helpers.extract_content_type(assigns.headers)
+ is_binary_content = content_type && !Helpers.text_content_type?(content_type)
+
+ no_body =
+ assigns.body_size == 0 and
+ (is_nil(assigns.body_preview) or assigns.body_preview == "")
+
+ assigns =
+ assign(assigns,
+ content_type: content_type,
+ is_binary_content: is_binary_content,
+ no_body: no_body
+ )
+
+ ~H"""
+ <%= cond do %>
+ <% @no_body -> %>
+
+ <.icon name="hero-document" class="h-6 w-6 mb-1 text-secondary-300" />
+ No body
+
+ <% @is_binary_content -> %>
+
+
+ {Helpers.format_content_type_label(@content_type)}
+
+
+ {Helpers.format_bytes(@body_size)}
+
+
+ SHA256: {@body_hash}
+
+
+ <% is_nil(@body_preview) -> %>
+
+ Body not captured
+
+ ({Helpers.format_bytes(@body_size)})
+
+
+ <% true -> %>
+
+
+
+
+ {Helpers.format_content_type_label(@content_type)}
+
+ <.copy_icon_button
+ id={"#{@id}-copy"}
+ value={@body_preview}
+ title="Copy body"
+ size={3}
+ class="p-1 bg-white/80 rounded"
+ />
+
+
{@body_preview}
+
+
+
+ SHA256: {String.slice(@body_hash, 0..15)}...
+
+ <.copy_icon_button
+ id={"#{@id}-hash-copy"}
+ value={@body_hash}
+ title="Copy hash"
+ size={3}
+ />
+
+
byte_size(@body_preview)
+ }
+ class="mt-1 text-[11px] text-secondary-400"
+ >
+ Preview: {Helpers.format_bytes(byte_size(@body_preview))} of {Helpers.format_bytes(
+ @body_size
+ )}
+
+
+ <% end %>
+ """
+ end
+
+ attr :id, :string, required: true
+ attr :value, :string, required: true
+ attr :title, :string, default: "Copy"
+ attr :size, :integer, default: 4
+ attr :class, :string, default: nil
+
+ def copy_icon_button(assigns) do
+ ~H"""
+
+ """
+ end
+
+ def section_size_badge(assigns) do
+ ~H"""
+
+ {Helpers.format_bytes(@size)}
+
+ """
+ end
+end
diff --git a/lib/lightning_web/live/channel_request_live/helpers.ex b/lib/lightning_web/live/channel_request_live/helpers.ex
new file mode 100644
index 0000000000..d5b380a811
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/helpers.ex
@@ -0,0 +1,195 @@
+defmodule LightningWeb.ChannelRequestLive.Helpers do
+ @moduledoc """
+ Shared helper functions for the channel request detail page.
+
+ Pure functions only — no templates. Provides error humanization,
+ formatting utilities, and data extraction used across multiple
+ component modules.
+ """
+
+ # --- Error humanization ---
+
+ @transport_errors %{
+ "nxdomain" =>
+ "DNS lookup failed — the destination hostname could not be resolved",
+ "econnrefused" =>
+ "Connection refused — the destination server is not accepting connections on this port",
+ "ehostunreach" => "Host unreachable — no route to the destination server",
+ "enetunreach" => "Network unreachable — no network path to the destination",
+ "closed" => "Connection closed unexpectedly by the destination",
+ "econnreset" => "Connection reset — the destination dropped the connection",
+ "econnaborted" => "Connection aborted by the destination",
+ "epipe" =>
+ "Broken pipe — the destination closed the connection while data was being sent",
+ "connect_timeout" =>
+ "Connection timed out — the destination server did not respond to the connection attempt",
+ "response_timeout" =>
+ "Response timed out — the destination accepted the connection but did not send a response in time",
+ "timeout" => "Request timed out"
+ }
+
+ @credential_errors %{
+ "credential_missing_auth_fields" =>
+ "The configured credential is missing required authentication fields",
+ "credential_environment_not_found" =>
+ "The credential environment could not be found",
+ "oauth_refresh_failed" =>
+ "OAuth token refresh failed — the destination credential could not be renewed",
+ "oauth_reauthorization_required" =>
+ "OAuth credential needs to be re-authorized by a user"
+ }
+
+ @doc """
+ Converts a classified error code into a human-readable description.
+ Unknown codes pass through unchanged.
+ """
+ @spec humanize_error(String.t()) :: String.t()
+ def humanize_error(code) when is_binary(code) do
+ cond do
+ Map.has_key?(@transport_errors, code) ->
+ Map.fetch!(@transport_errors, code)
+
+ Map.has_key?(@credential_errors, code) ->
+ Map.fetch!(@credential_errors, code)
+
+ String.starts_with?(code, "unsupported_credential_schema:") ->
+ name = String.replace_prefix(code, "unsupported_credential_schema:", "")
+
+ "Unsupported credential type \"#{name}\" — this credential schema cannot be used for destination auth"
+
+ true ->
+ code
+ end
+ end
+
+ @doc """
+ Classifies an error code as `:transport`, `:credential`, or `nil` (unknown).
+ """
+ @spec error_category(String.t()) :: :transport | :credential | nil
+ def error_category(code) when is_binary(code) do
+ cond do
+ Map.has_key?(@transport_errors, code) ->
+ :transport
+
+ Map.has_key?(@credential_errors, code) ->
+ :credential
+
+ String.starts_with?(code, "unsupported_credential_schema:") ->
+ :credential
+
+ true ->
+ nil
+ end
+ end
+
+ # --- Data extraction ---
+
+ @doc """
+ Extracts the primary event from a channel request's events list.
+ Prefers `:destination_response`, falls back to `:error`.
+ """
+ def primary_event(channel_request) do
+ channel_request.channel_events
+ |> Enum.find(&(&1.type == :destination_response)) ||
+ Enum.find(channel_request.channel_events, &(&1.type == :error))
+ end
+
+ # --- Formatting ---
+
+ def format_auth_type(nil), do: "None"
+ def format_auth_type("api"), do: "API key"
+ def format_auth_type("basic"), do: "Basic auth"
+ def format_auth_type(type), do: type
+
+ @doc """
+ Formats the client auth method used for a channel request for display.
+
+ Returns `"
()"` when the matched method is present,
+ `"(deleted) ()"` when the id is set but the association is
+ `nil` after preload (method deleted after the request ran), and the raw
+ auth type / `"None"` when no client auth was configured for the request.
+ """
+ def format_client_auth(%{client_webhook_auth_method_id: nil} = channel_request) do
+ format_auth_type(channel_request.client_auth_type)
+ end
+
+ def format_client_auth(
+ %{client_webhook_auth_method: %{name: name}} = channel_request
+ ) do
+ "#{name} (#{format_auth_type(channel_request.client_auth_type)})"
+ end
+
+ def format_client_auth(channel_request) do
+ "(deleted) (#{format_auth_type(channel_request.client_auth_type)})"
+ end
+
+ @doc """
+ Formats the destination credential used for a channel request for display.
+
+ Returns the credential name when present, `"(deleted)"` when the id is
+ still set but the credential has been deleted, and `"None"` when no
+ destination credential was configured.
+ """
+ def format_destination_auth(%{destination_credential_id: nil}), do: "None"
+
+ def format_destination_auth(%{
+ destination_credential: %{credential: %{name: name}}
+ }),
+ do: name
+
+ def format_destination_auth(_channel_request), do: "(deleted)"
+
+ def format_bytes(nil), do: "—"
+
+ def format_bytes(bytes) when bytes < 1024,
+ do: "#{bytes} B"
+
+ def format_bytes(bytes) when bytes < 1_048_576,
+ do: "#{Float.round(bytes / 1024, 1)} KB"
+
+ def format_bytes(bytes),
+ do: "#{Float.round(bytes / 1_048_576, 1)} MB"
+
+ def format_us(nil), do: "—"
+
+ def format_us(us) when is_number(us) do
+ ms = us / 1000
+
+ if ms == Float.round(ms),
+ do: trunc(ms) |> to_string(),
+ else: Float.round(ms, 1) |> to_string()
+ end
+
+ # --- Content type utilities ---
+
+ def extract_content_type(nil), do: nil
+
+ def extract_content_type(headers) do
+ headers
+ |> Enum.find(fn [name, _] -> String.downcase(name) == "content-type" end)
+ |> case do
+ [_, value] -> value
+ nil -> nil
+ end
+ end
+
+ def text_content_type?(ct) do
+ String.contains?(ct, "text/") or
+ String.contains?(ct, "json") or
+ String.contains?(ct, "xml") or
+ String.contains?(ct, "javascript") or
+ String.contains?(ct, "html")
+ end
+
+ def format_content_type_label(ct) when is_binary(ct) do
+ cond do
+ String.contains?(ct, "json") -> "JSON"
+ String.contains?(ct, "xml") -> "XML"
+ String.contains?(ct, "html") -> "HTML"
+ String.contains?(ct, "text/") -> "TEXT"
+ true -> ct
+ end
+ end
+
+ def format_content_type_label(_), do: nil
+end
diff --git a/lib/lightning_web/live/channel_request_live/show.ex b/lib/lightning_web/live/channel_request_live/show.ex
new file mode 100644
index 0000000000..22184c9ff5
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/show.ex
@@ -0,0 +1,399 @@
+defmodule LightningWeb.ChannelRequestLive.Show do
+ use LightningWeb, :live_view
+
+ import LightningWeb.ChannelRequestLive.Components
+
+ import LightningWeb.ChannelRequestLive.Timing,
+ only: [timing_section: 1]
+
+ alias Lightning.Channels
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ on_mount {LightningWeb.Hooks, :project_scope}
+
+ @impl true
+ def mount(%{"id" => id}, _session, socket) do
+ %{current_user: current_user, project: project} = socket.assigns
+
+ if Lightning.Accounts.experimental_features_enabled?(current_user) do
+ case Channels.get_channel_request_for_project(project.id, id) do
+ nil ->
+ {:ok, redirect(socket, to: ~p"/projects/#{project}/history")}
+
+ channel_request ->
+ {:ok,
+ assign(socket,
+ active_menu_item: :runs,
+ page_title: "Channel Request",
+ request_id: id,
+ channel_request: channel_request
+ )}
+ end
+ else
+ {:ok, redirect(socket, to: ~p"/projects/#{project}/history")}
+ end
+ end
+
+ @impl true
+ def render(assigns) do
+ ~H"""
+
+ <:header>
+
+ <:breadcrumbs>
+
+
+
+
+ <:label>
+ Channel Request
+
+ {display_short_uuid(@request_id)}
+
+
+
+
+
+
+
+
+
+ <% cr = @channel_request %>
+ <% event = Helpers.primary_event(cr) %>
+ <% error_cat =
+ event && event.error_message && Helpers.error_category(event.error_message) %>
+
+ <.summary_card
+ channel_request={cr}
+ event={event}
+ channel={cr.channel}
+ error_category={error_cat}
+ />
+
+ <.timing_section :if={error_cat != :credential} event={event} />
+
+
+ <.request_section event={event} />
+ <.response_section event={event} error_category={error_cat} />
+
+
+ <.context_section
+ channel_request={cr}
+ snapshot={cr.channel_snapshot}
+ channel={cr.channel}
+ />
+
+
+
+ """
+ end
+
+ # --- Summary Card ---
+
+ defp summary_card(assigns) do
+ ~H"""
+
+
+ <.method_badge method={@event && @event.request_method} />
+ <.request_path_display event={@event} />
+ <.status_code_display status={@event && @event.response_status} />
+ <.state_pill_with_tooltip
+ state={@channel_request.state}
+ error_message={@event && @event.error_message}
+ />
+
+
+
+ Channel
+ <.link
+ navigate={
+ ~p"/projects/#{@channel.project_id}/channels/#{@channel.id}/edit"
+ }
+ class="text-primary-600 hover:text-primary-800"
+ >
+ {@channel.name}
+
+
+
+ Request
+
+ {String.slice(@channel_request.id, 0..7)}
+
+ <.copy_icon_button
+ id="copy-request-id"
+ value={@channel_request.id}
+ title="Copy request ID"
+ size={3}
+ />
+
+
+
+
+
+
+
+ Client
+
+
+
+
-
+ IP
+
+ -
+ {@channel_request.client_identity || "—"}
+
+
+
+
-
+ Auth
+
+ -
+ <.icon
+ name="hero-shield-check"
+ class="h-4 w-4 shrink-0 text-secondary-400 mt-0.5"
+ />
+ {Helpers.format_client_auth(@channel_request)}
+
+
+
+
+
+
+
+ Destination
+
+
+
+
-
+ URL
+
+ -
+ {@channel.destination_url}
+
+
+
+
-
+ Auth
+
+ -
+ <.icon
+ name="hero-key"
+ class="h-4 w-4 shrink-0 text-secondary-400 mt-0.5"
+ />
+ {Helpers.format_destination_auth(@channel_request)}
+
+
+
+
+
+
+
+ Timing
+
+
+
+
-
+ Started
+
+ -
+
+
+
+
+
-
+ Completed
+
+ -
+
+
+
+
+
-
+ Latency
+
+ -
+ {if @event && @event.latency_us,
+ do: "#{Helpers.format_us(@event.latency_us)} ms",
+ else: "—"}
+
+
+
+
+
+
+ """
+ end
+
+ # --- Request Section ---
+
+ defp request_section(assigns) do
+ event = assigns.event
+
+ show_body =
+ event &&
+ not (is_nil(event.request_body_preview) and
+ is_nil(event.request_body_size))
+
+ assigns = assign(assigns, show_body: show_body)
+
+ ~H"""
+ <.disclosure_section
+ id="request-section"
+ title="Request"
+ open={true}
+ padded={false}
+ >
+ <:title_right>
+ <.section_size_badge
+ :if={@event && @event.request_body_size && @event.request_body_size > 0}
+ size={@event.request_body_size}
+ id="request-size-badge"
+ />
+
+ <%= if @event do %>
+ <.sub_section
+ :if={@event.request_headers}
+ id="req-headers"
+ title="Headers"
+ open={true}
+ >
+ <.headers_table headers={@event.request_headers} id="request-headers" />
+
+ <.sub_section :if={@show_body} id="req-body" title="Body" open={true}>
+ <:title_right>
+ 0}
+ class="text-[11px] text-secondary-400 font-mono"
+ >
+ {Helpers.format_bytes(@event.request_body_size)}
+
+
+ <.body_viewer
+ id="request-body"
+ body_preview={@event.request_body_preview}
+ body_hash={@event.request_body_hash}
+ body_size={@event.request_body_size}
+ headers={@event.request_headers}
+ />
+
+ <% end %>
+
+ """
+ end
+
+ # --- Response Section ---
+
+ defp response_section(assigns) do
+ event = assigns.event
+
+ show_body =
+ event && is_nil(assigns.error_category) &&
+ not (is_nil(event.response_body_preview) and
+ is_nil(event.response_body_size))
+
+ assigns = assign(assigns, show_body: show_body)
+
+ ~H"""
+ <.disclosure_section
+ id="response-section"
+ title="Response"
+ open={true}
+ padded={false}
+ >
+ <:title_right>
+ <.status_code_badge
+ :if={@event && @event.response_status}
+ status={@event.response_status}
+ />
+ <.section_size_badge
+ :if={@event && @event.response_body_size && @event.response_body_size > 0}
+ size={@event.response_body_size}
+ id="response-size-badge"
+ />
+
+ <%= if @error_category in [:transport, :credential] do %>
+ <.response_empty
+ type={@error_category}
+ error_code={@event.error_message}
+ human_message={Helpers.humanize_error(@event.error_message)}
+ />
+ <% else %>
+ <%= if @event do %>
+ <.sub_section
+ :if={@event.response_headers}
+ id="resp-headers"
+ title="Headers"
+ open={true}
+ >
+ <.headers_table headers={@event.response_headers} id="response-headers" />
+
+ <.sub_section :if={@show_body} id="resp-body" title="Body" open={true}>
+ <:title_right>
+ 0}
+ class="text-[11px] text-secondary-400 font-mono"
+ >
+ {Helpers.format_bytes(@event.response_body_size)}
+
+
+ <.body_viewer
+ id="response-body"
+ body_preview={@event.response_body_preview}
+ body_hash={@event.response_body_hash}
+ body_size={@event.response_body_size}
+ headers={@event.response_headers}
+ />
+
+ <% end %>
+ <% end %>
+
+ """
+ end
+
+ # --- Context Section ---
+
+ defp context_section(assigns) do
+ config_changed =
+ assigns.snapshot.lock_version != assigns.channel.lock_version
+
+ assigns = assign(assigns, config_changed: config_changed)
+
+ ~H"""
+ <.disclosure_section id="context-section" title="Context" open={false}>
+
+
+
+ Destination URL
+
+
+ {@snapshot.destination_url}
+
+
+
+
+ Channel Name
+
+ {@snapshot.name}
+
+
+
+ Config Version
+
+
+ {@snapshot.lock_version}
+
+ Config changed
+
+
+
+
+
+ """
+ end
+end
diff --git a/lib/lightning_web/live/channel_request_live/timing.ex b/lib/lightning_web/live/channel_request_live/timing.ex
new file mode 100644
index 0000000000..63efeee020
--- /dev/null
+++ b/lib/lightning_web/live/channel_request_live/timing.ex
@@ -0,0 +1,396 @@
+defmodule LightningWeb.ChannelRequestLive.Timing do
+ @moduledoc """
+ Timing visualization components for the channel request detail page.
+
+ Renders a segmented timing bar with TTFB marker and legend,
+ computing phase breakdowns from Finch timing metrics.
+ """
+
+ use LightningWeb, :component
+
+ import LightningWeb.ChannelRequestLive.Components,
+ only: [disclosure_section: 1]
+
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ # --- Public components ---
+
+ def timing_section(assigns) do
+ event = assigns.event
+
+ timing_data =
+ if event do
+ compute_timing_segments(event)
+ else
+ nil
+ end
+
+ assigns = assign(assigns, timing_data: timing_data, event: event)
+
+ ~H"""
+
+ <.disclosure_section id="timing-section-disclosure" title="Timing" open={true}>
+
+ <.timing_bar timing_data={@timing_data} />
+ <.timing_legend timing_data={@timing_data} />
+
+
+
+ """
+ end
+
+ # --- Timing bar ---
+
+ @hatch_gradient_style IO.iodata_to_binary([
+ "background: repeating-linear-gradient(",
+ "-45deg, ",
+ "rgba(156, 163, 175, 0.18) 0px, ",
+ "rgba(156, 163, 175, 0.18) 3px, ",
+ "rgba(209, 213, 219, 0.55) 3px, ",
+ "rgba(209, 213, 219, 0.55) 6px)"
+ ])
+
+ defp timing_bar(assigns) do
+ segments = assigns.timing_data.segments
+ total_us = assigns.timing_data.total_us
+ ttfb_us = assigns.timing_data.ttfb_us
+
+ inner_total =
+ Enum.reduce(segments, 0, fn s, acc -> acc + s.us end)
+
+ inner_total = if inner_total == 0, do: 1, else: inner_total
+
+ segments_with_pct =
+ Enum.map(segments, fn s ->
+ Map.put(
+ s,
+ :pct,
+ max(Float.round(s.us / inner_total * 100, 1), 0.5)
+ )
+ end)
+
+ ttfb_pct =
+ if ttfb_us && ttfb_us > 0 && inner_total > 0 do
+ Float.round(ttfb_us / inner_total * 100, 1)
+ else
+ nil
+ end
+
+ tier = assigns.timing_data.tier
+ show_overhead = tier == :full
+ seg_count = length(segments_with_pct)
+
+ assigns =
+ assign(assigns,
+ segments: segments_with_pct,
+ seg_count: seg_count,
+ total_us: total_us,
+ ttfb_us: ttfb_us,
+ ttfb_pct: ttfb_pct,
+ show_overhead: show_overhead,
+ hatch_style: @hatch_gradient_style
+ )
+
+ ~H"""
+
+
+ <%!-- Outer bar: hatch background with inner segments on top --%>
+
+ <%!-- Inner phase segments --%>
+
+
+
+ {seg.badge}
+
+
+ {format_segment_label(seg)}
+
+
+
+ <%!-- TTFB marker line --%>
+
+
+
+
+
+
+ <.icon name="hero-arrow-up-mini" class="h-3 w-3 text-secondary-500" />
+
+ TTFB: {Helpers.format_us(@ttfb_us)} ms
+
+
+
+
+
+
+ 0 ms
+
+ {Helpers.format_us(@total_us)} ms
+
+
+
+ """
+ end
+
+ defp format_segment_label(%{us: us} = seg) do
+ ms = us / 1000
+
+ cond do
+ Map.has_key?(seg, :badge) -> ""
+ us == 0 -> ""
+ ms >= 1000 -> "#{Float.round(ms / 1000, 1)}s"
+ true -> "#{Helpers.format_us(us)}ms"
+ end
+ end
+
+ # --- Timing legend ---
+
+ defp timing_legend(assigns) do
+ timing_data = assigns.timing_data
+ segments = timing_data.segments
+ ttfb_us = timing_data.ttfb_us
+
+ show_overhead = timing_data.tier == :full
+
+ assigns =
+ assign(assigns,
+ segments: segments,
+ ttfb_us: ttfb_us,
+ show_overhead: show_overhead,
+ swatch_style: @hatch_gradient_style
+ )
+
+ ~H"""
+
+
+
+
+ {seg.label}
+
+
+
+
+ Proxy overhead
+
+
+ TTFB: {Helpers.format_us(@ttfb_us)} ms
+
+
+ """
+ end
+
+ # --- Timing computation ---
+
+ defp compute_timing_segments(event) do
+ cond do
+ is_nil(event.latency_us) ->
+ nil
+
+ has_finch_phases?(event) ->
+ compute_full_timing(event)
+
+ not is_nil(event.ttfb_us) ->
+ compute_ttfb_timing(event)
+
+ true ->
+ compute_minimal_timing(event)
+ end
+ end
+
+ defp has_finch_phases?(event) do
+ not is_nil(event.request_send_us) and not is_nil(event.ttfb_us) and
+ not is_nil(event.response_duration_us)
+ end
+
+ defp compute_full_timing(event) do
+ queue_us = event.queue_us || 0
+ connect_us = event.connect_us || 0
+ send_us = event.request_send_us
+ recv_us = event.response_duration_us
+ ttfb_us = event.ttfb_us
+ latency_us = event.latency_us
+
+ wait_us = max(ttfb_us - queue_us - connect_us - send_us, 0)
+
+ inner_sum = queue_us + connect_us + send_us + wait_us + recv_us
+
+ {overhead_left_pct, overhead_right_pct} =
+ compute_overhead(inner_sum, latency_us)
+
+ reused =
+ event.reused_connection == true and
+ (connect_us == 0 or is_nil(event.connect_us))
+
+ segments =
+ []
+ |> maybe_add_segment(queue_us > 0, %{
+ label: "Queue",
+ us: queue_us,
+ color: "bg-amber-300",
+ text_color: "text-amber-900"
+ })
+ |> maybe_add_connect_segment(connect_us, reused)
+ |> Kernel.++([
+ %{
+ label: "Send",
+ us: send_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ },
+ %{
+ label: "Processing",
+ us: wait_us,
+ color: "bg-gray-300",
+ text_color: "text-gray-700"
+ },
+ %{
+ label: "Recv",
+ us: recv_us,
+ color: "bg-green-400",
+ text_color: "text-green-900"
+ }
+ ])
+
+ %{
+ segments: segments,
+ total_us: latency_us,
+ ttfb_us: ttfb_us,
+ overhead_left_pct: overhead_left_pct,
+ overhead_right_pct: overhead_right_pct,
+ tier: :full
+ }
+ end
+
+ defp compute_ttfb_timing(event) do
+ download_us = max(event.latency_us - event.ttfb_us, 0)
+
+ segments = [
+ %{
+ label: "TTFB",
+ us: event.ttfb_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ },
+ %{
+ label: "Download",
+ us: download_us,
+ color: "bg-green-400",
+ text_color: "text-green-900"
+ }
+ ]
+
+ %{
+ segments: segments,
+ total_us: event.latency_us,
+ ttfb_us: event.ttfb_us,
+ overhead_left_pct: 0,
+ overhead_right_pct: 0,
+ tier: :partial
+ }
+ end
+
+ defp compute_minimal_timing(event) do
+ segments = [
+ %{
+ label: "Total",
+ us: event.latency_us,
+ color: "bg-blue-400",
+ text_color: "text-blue-900"
+ }
+ ]
+
+ %{
+ segments: segments,
+ total_us: event.latency_us,
+ ttfb_us: nil,
+ overhead_left_pct: 0,
+ overhead_right_pct: 0,
+ tier: :minimal
+ }
+ end
+
+ defp compute_overhead(inner_sum, latency_us)
+ when inner_sum >= latency_us or latency_us == 0 do
+ {0, 0}
+ end
+
+ defp compute_overhead(inner_sum, latency_us) do
+ gap_pct = (latency_us - inner_sum) / latency_us * 100
+ half = Float.round(gap_pct / 2, 1)
+ {half, half}
+ end
+
+ defp maybe_add_segment(segments, true, segment),
+ do: segments ++ [segment]
+
+ defp maybe_add_segment(segments, false, _segment), do: segments
+
+ defp maybe_add_connect_segment(segments, _connect_us, true) do
+ segments ++
+ [
+ %{
+ label: "Connect",
+ us: 0,
+ color: "bg-orange-400",
+ text_color: "text-orange-900",
+ badge: "(reused)"
+ }
+ ]
+ end
+
+ defp maybe_add_connect_segment(segments, connect_us, false)
+ when connect_us > 0 do
+ segments ++
+ [
+ %{
+ label: "Connect",
+ us: connect_us,
+ color: "bg-orange-400",
+ text_color: "text-orange-900"
+ }
+ ]
+ end
+
+ defp maybe_add_connect_segment(segments, _connect_us, false),
+ do: segments
+end
diff --git a/lib/lightning_web/live/run_live/channel_logs_component.ex b/lib/lightning_web/live/run_live/channel_logs_component.ex
index 343b65ff8b..a1810e52f3 100644
--- a/lib/lightning_web/live/run_live/channel_logs_component.ex
+++ b/lib/lightning_web/live/run_live/channel_logs_component.ex
@@ -151,9 +151,13 @@ defmodule LightningWeb.RunLive.ChannelLogsComponent do
<%= for entry <- @page.entries do %>
<.tr id={"request-#{entry.id}"}>
<.td>
-
+ <.link
+ navigate={~p"/projects/#{@project}/history/channels/#{entry.id}"}
+ class="link font-mono"
+ title={entry.request_id}
+ >
{display_short_uuid(entry.request_id)}
-
+
<.td class="text-sm text-gray-700">
{source_event_path(entry)}
diff --git a/lib/lightning_web/plugs/channel_proxy_plug.ex b/lib/lightning_web/plugs/channel_proxy_plug.ex
index dfa0cafc33..3b37e4eb1f 100644
--- a/lib/lightning_web/plugs/channel_proxy_plug.ex
+++ b/lib/lightning_web/plugs/channel_proxy_plug.ex
@@ -44,6 +44,7 @@ defmodule LightningWeb.ChannelProxyPlug do
:forward_path,
:client_identity,
:auth_header,
+ :destination_credential_id,
client_auth_types: []
]
end
@@ -69,15 +70,15 @@ defmodule LightningWeb.ChannelProxyPlug do
defp do_proxy(conn, channel_id, rest) do
with {:ok, channel} <- fetch_channel_with_telemetry(channel_id),
- :ok <- authenticate_client(conn, channel) do
- proxy_with_auth(conn, channel, rest)
+ {:ok, matched_auth} <- authenticate_client(conn, channel) do
+ proxy_with_auth(conn, channel, rest, matched_auth)
else
:not_found -> error_response(conn, :not_found, "Not Found")
:unauthorized -> error_response(conn, :unauthorized, "Unauthorized")
end
end
- defp proxy_with_auth(conn, channel, rest) do
+ defp proxy_with_auth(conn, channel, rest, matched_auth) do
with {:ok, auth_header} <- resolve_destination_auth(channel),
{:ok, snapshot} <- Channels.get_or_create_current_snapshot(channel) do
client_auth_types =
@@ -93,11 +94,12 @@ defmodule LightningWeb.ChannelProxyPlug do
forward_path: build_forward_path(rest),
client_identity: get_client_identity(conn),
auth_header: auth_header,
- client_auth_types: client_auth_types
+ client_auth_types: client_auth_types,
+ destination_credential_id: destination_credential_id(channel)
}
conn
- |> proxy_upstream(req)
+ |> proxy_upstream(req, matched_auth)
|> halt()
else
{:credential_error, reason} ->
@@ -108,18 +110,36 @@ defmodule LightningWeb.ChannelProxyPlug do
end
end
+ defp destination_credential_id(channel) do
+ case channel.destination_auth_method do
+ %{project_credential_id: id} -> id
+ _ -> nil
+ end
+ end
+
+ defp authenticate_client(_conn, %{client_webhook_auth_methods: []}) do
+ {:ok, nil}
+ end
+
defp authenticate_client(conn, channel) do
methods = channel.client_webhook_auth_methods
- if methods == [] or
- Auth.valid_key?(conn, methods) or
- Auth.valid_user?(conn, methods) do
- :ok
- else
- :unauthorized
+ case find_matching_auth_method(conn, methods) do
+ %{} = method -> {:ok, method}
+ nil -> :unauthorized
end
end
+ defp find_matching_auth_method(conn, methods) do
+ Enum.find(methods, fn method ->
+ case method.auth_type do
+ :api -> Auth.valid_key?(conn, [method])
+ :basic -> Auth.valid_user?(conn, [method])
+ _ -> false
+ end
+ end)
+ end
+
defp fetch_channel_with_telemetry(channel_id) do
metadata = %{channel_id: channel_id}
@@ -133,15 +153,19 @@ defmodule LightningWeb.ChannelProxyPlug do
)
end
- defp proxy_upstream(conn, %DestinationRequest{} = req) do
- handler_state = %{
- channel: req.channel,
- snapshot: req.snapshot,
- request_id: req.request_id,
- started_at: DateTime.utc_now(),
- request_path: req.forward_path,
- client_identity: req.client_identity
- }
+ defp proxy_upstream(conn, %DestinationRequest{} = req, matched_auth) do
+ handler_state =
+ %{
+ channel: req.channel,
+ snapshot: req.snapshot,
+ request_id: req.request_id,
+ started_at: DateTime.utc_now(),
+ request_path: req.forward_path,
+ client_identity: req.client_identity,
+ query_string: conn.query_string,
+ destination_credential_id: req.destination_credential_id
+ }
+ |> put_auth_method(matched_auth)
metadata = %{
channel_id: req.channel.id,
@@ -159,7 +183,8 @@ defmodule LightningWeb.ChannelProxyPlug do
path: req.forward_path,
handler: {Lightning.Channels.Handler, handler_state},
strip_headers: build_strip_headers(req.client_auth_types),
- extra_headers: build_extra_headers(conn, req)
+ extra_headers: build_extra_headers(conn, req),
+ collect_timing: true
)
{result, metadata}
@@ -167,6 +192,15 @@ defmodule LightningWeb.ChannelProxyPlug do
)
end
+ defp put_auth_method(state, nil), do: state
+
+ defp put_auth_method(state, %{id: id, auth_type: auth_type}) do
+ Map.merge(state, %{
+ client_webhook_auth_method_id: id,
+ client_auth_type: Atom.to_string(auth_type)
+ })
+ end
+
defp build_extra_headers(conn, %DestinationRequest{} = req) do
xff =
case Plug.Conn.get_req_header(conn, "x-forwarded-for") do
@@ -256,7 +290,8 @@ defmodule LightningWeb.ChannelProxyPlug do
|> Plug.Conn.get_resp_header("x-request-id")
|> List.first(),
forward_path: conn.request_path,
- client_identity: get_client_identity(conn)
+ client_identity: get_client_identity(conn),
+ destination_credential_id: destination_credential_id(channel)
}
record_credential_error(conn, req, error_message)
@@ -280,6 +315,7 @@ defmodule LightningWeb.ChannelProxyPlug do
channel_snapshot_id: req.snapshot.id,
request_id: req.request_id,
client_identity: req.client_identity,
+ destination_credential_id: req.destination_credential_id,
state: :error,
started_at: now,
completed_at: now
diff --git a/lib/lightning_web/router.ex b/lib/lightning_web/router.ex
index b08cac16ed..e8f64c1fe5 100644
--- a/lib/lightning_web/router.ex
+++ b/lib/lightning_web/router.ex
@@ -241,6 +241,7 @@ defmodule LightningWeb.Router do
live "/history", RunLive.Index, :index
live "/history/channels", RunLive.Index, :channel_logs
+ live "/history/channels/:id", ChannelRequestLive.Show, :show
live "/runs/:id", RunLive.Show, :show
live "/dataclips/:id/show", DataclipLive.Show, :show
diff --git a/mix.exs b/mix.exs
index e9a50a3cd1..d7a31ea32a 100644
--- a/mix.exs
+++ b/mix.exs
@@ -162,7 +162,7 @@ defmodule Lightning.MixProject do
if path = System.get_env("PHILTER_PATH") do
{:philter, path: path}
else
- {:philter, "~> 0.2.1"}
+ {:philter, "~> 0.3.0"}
end
end
diff --git a/mix.lock b/mix.lock
index b319d9c250..5d9af735bb 100644
--- a/mix.lock
+++ b/mix.lock
@@ -9,7 +9,7 @@
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
"bypass": {:hex, :bypass, "2.1.0", "909782781bf8e20ee86a9cabde36b259d44af8b9f38756173e8f5e2e1fabb9b1", [:mix], [{:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "d9b5df8fa5b7a6efa08384e9bbecfe4ce61c77d28a4282f79e02f1ef78d96b80"},
"cachex": {:hex, :cachex, "4.1.1", "574c5cd28473db313a0a76aac8c945fe44191659538ca6a1e8946ec300b1a19f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:ex_hash_ring, "~> 6.0", [hex: :ex_hash_ring, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "d6b7449ff98d6bb92dda58bd4fc3189cae9f99e7042054d669596f56dc503cd8"},
- "castore": {:hex, :castore, "1.0.17", "4f9770d2d45fbd91dcf6bd404cf64e7e58fed04fadda0923dc32acca0badffa2", [:mix], [], "hexpm", "12d24b9d80b910dd3953e165636d68f147a31db945d2dcb9365e441f8b5351e5"},
+ "castore": {:hex, :castore, "1.0.18", "5e43ef0ec7d31195dfa5a65a86e6131db999d074179d2ba5a8de11fe14570f55", [:mix], [], "hexpm", "f393e4fe6317829b158fb74d86eb681f737d2fe326aa61ccf6293c4104957e34"},
"certifi": {:hex, :certifi, "2.15.0", "0e6e882fcdaaa0a5a9f2b3db55b1394dba07e8d6d9bcad08318fb604c6839712", [:rebar3], [], "hexpm", "b147ed22ce71d72eafdad94f055165c1c182f61a2ff49df28bcc71d1d5b94a60"},
"chameleon": {:hex, :chameleon, "2.5.0", "102dd809f78701875efd0a203730dd64296a1f2d29c8efa6b00cc029d58ff39e", [:mix], [], "hexpm", "f3559827d8b4fe53a44e19e56ae94bedd36a355e0d33e18067b8abc37ec428db"},
"circular_buffer": {:hex, :circular_buffer, "1.0.0", "25c004da0cba7bd8bc1bdabded4f9a902d095e20600fd15faf1f2ffbaea18a07", [:mix], [], "hexpm", "c829ec31c13c7bafd1f546677263dff5bfb006e929f25635878ac3cfba8749e5"},
@@ -103,7 +103,7 @@
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
"peep": {:hex, :peep, "3.5.0", "9f6ead7b0f2c684494200c8fc02e7e62e8c459afe861b29bd859e4c96f402ed8", [:mix], [{:nimble_options, "~> 1.1", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:plug, "~> 1.16", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry_metrics, "~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "5a73a99c6e60062415efeb7e536a663387146463a3d3df1417da31fd665ac210"},
"petal_components": {:hex, :petal_components, "3.0.1", "58cd70f9c5e4896ed8e41b095f19770fa56ca0855d99790c4a26b5f04fa52283", [:mix], [{:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_ecto, "~> 4.4", [hex: :phoenix_ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_html_helpers, "~> 1.0", [hex: :phoenix_html_helpers, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.7", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "1195bc30979284f01a5fa2430e370d8378c635e083179c2b2fdbecf21cce05c1"},
- "philter": {:hex, :philter, "0.2.1", "48239f0913745c1a58bf1691993cbf19fc766e20846ccbe961a211870d1f99c3", [:mix], [{:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "3bc2ff7a61d08936621544df9b65afae46b4bd4cb9a2412eeace450a762a3ff9"},
+ "philter": {:hex, :philter, "0.3.0", "7142e315cd1265365fa9d5c40a48530135c34b79ea160e9cd1eee1fc7acc297e", [:mix], [{:finch, "~> 0.18", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.7", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ddb96add693cb6749f26b7663c1319f32d5e898b8ffad7f6a4c1e5e7ed2e225e"},
"phoenix": {:hex, :phoenix, "1.7.21", "14ca4f1071a5f65121217d6b57ac5712d1857e40a0833aff7a691b7870fc9a3b", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "336dce4f86cba56fed312a7d280bf2282c720abb6074bdb1b61ec8095bdd0bc9"},
"phoenix_ecto": {:hex, :phoenix_ecto, "4.6.5", "c4ef322acd15a574a8b1a08eff0ee0a85e73096b53ce1403b6563709f15e1cea", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "26ec3208eef407f31b748cadd044045c6fd485fbff168e35963d2f9dfff28d4b"},
"phoenix_html": {:hex, :phoenix_html, "4.3.0", "d3577a5df4b6954cd7890c84d955c470b5310bb49647f0a114a6eeecc850f7ad", [:mix], [], "hexpm", "3eaa290a78bab0f075f791a46a981bbe769d94bc776869f4f3063a14f30497ad"},
diff --git a/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs b/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs
new file mode 100644
index 0000000000..bd773332ed
--- /dev/null
+++ b/priv/repo/migrations/20260409100001_add_channel_event_detail_columns.exs
@@ -0,0 +1,13 @@
+defmodule Lightning.Repo.Migrations.AddChannelEventDetailColumns do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_events) do
+ add :request_query_string, :text
+ add :request_body_size, :bigint
+ add :response_body_size, :bigint
+ add :request_send_us, :integer
+ add :response_duration_us, :integer
+ end
+ end
+end
diff --git a/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs b/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs
new file mode 100644
index 0000000000..29598390ad
--- /dev/null
+++ b/priv/repo/migrations/20260409100002_convert_channel_event_headers_to_jsonb.exs
@@ -0,0 +1,39 @@
+defmodule Lightning.Repo.Migrations.ConvertChannelEventHeadersToJsonb do
+ use Ecto.Migration
+
+ def up do
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN request_headers TYPE jsonb
+ USING CASE
+ WHEN request_headers IS NULL THEN NULL
+ WHEN request_headers::jsonb IS NOT NULL THEN request_headers::jsonb
+ ELSE NULL
+ END
+ """
+
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN response_headers TYPE jsonb
+ USING CASE
+ WHEN response_headers IS NULL THEN NULL
+ WHEN response_headers::jsonb IS NOT NULL THEN response_headers::jsonb
+ ELSE NULL
+ END
+ """
+ end
+
+ def down do
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN request_headers TYPE text
+ USING request_headers::text
+ """
+
+ execute """
+ ALTER TABLE channel_events
+ ALTER COLUMN response_headers TYPE text
+ USING response_headers::text
+ """
+ end
+end
diff --git a/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs b/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs
new file mode 100644
index 0000000000..df4eb79d0d
--- /dev/null
+++ b/priv/repo/migrations/20260409100003_add_client_auth_tracking_to_channel_requests.exs
@@ -0,0 +1,14 @@
+defmodule Lightning.Repo.Migrations.AddClientAuthTrackingToChannelRequests do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_requests) do
+ add :client_webhook_auth_method_id,
+ references(:webhook_auth_methods, type: :binary_id, on_delete: :nilify_all)
+
+ add :client_auth_type, :string
+ end
+
+ create index(:channel_requests, [:client_webhook_auth_method_id])
+ end
+end
diff --git a/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs b/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs
new file mode 100644
index 0000000000..bf47ab281d
--- /dev/null
+++ b/priv/repo/migrations/20260410131136_rename_timing_fields_to_microseconds.exs
@@ -0,0 +1,26 @@
+defmodule Lightning.Repo.Migrations.RenameTimingFieldsToMicroseconds do
+ use Ecto.Migration
+
+ def change do
+ rename table(:channel_events), :latency_ms, to: :latency_us
+ rename table(:channel_events), :ttfb_ms, to: :ttfb_us
+
+ flush()
+
+ execute(
+ "UPDATE channel_events SET latency_us = latency_us * 1000",
+ "UPDATE channel_events SET latency_us = latency_us / 1000"
+ )
+
+ execute(
+ "UPDATE channel_events SET ttfb_us = ttfb_us * 1000",
+ "UPDATE channel_events SET ttfb_us = ttfb_us / 1000"
+ )
+
+ alter table(:channel_events) do
+ add :queue_us, :integer
+ add :connect_us, :integer
+ add :reused_connection, :boolean
+ end
+ end
+end
diff --git a/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs b/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs
new file mode 100644
index 0000000000..a3d82ac08c
--- /dev/null
+++ b/priv/repo/migrations/20260423133517_add_destination_credential_id_to_channel_requests.exs
@@ -0,0 +1,15 @@
+defmodule Lightning.Repo.Migrations.AddDestinationCredentialIdToChannelRequests do
+ use Ecto.Migration
+
+ def change do
+ alter table(:channel_requests) do
+ add :destination_credential_id,
+ references(:project_credentials,
+ type: :binary_id,
+ on_delete: :nilify_all
+ )
+ end
+
+ create index(:channel_requests, [:destination_credential_id])
+ end
+end
diff --git a/test/lightning/channels/channel_requests_test.exs b/test/lightning/channels/channel_requests_test.exs
index 105173d1c9..cb9f896b85 100644
--- a/test/lightning/channels/channel_requests_test.exs
+++ b/test/lightning/channels/channel_requests_test.exs
@@ -284,6 +284,111 @@ defmodule Lightning.Channels.ChannelRequestsTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — client auth method tracking on requests
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D3: client_webhook_auth_method_id and client_auth_type on channel_requests
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "ChannelRequest changeset — auth method fields" do
+ test "accepts client_webhook_auth_method_id and client_auth_type" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ attrs = %{
+ channel_id: channel.id,
+ channel_snapshot_id: snapshot.id,
+ request_id: "req-auth-test",
+ state: :success,
+ started_at: DateTime.utc_now(),
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "api"
+ }
+
+ changeset = ChannelRequest.changeset(%ChannelRequest{}, attrs)
+ assert changeset.valid?
+
+ {:ok, request} = Repo.insert(changeset)
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "api"
+ end
+
+ test "auth method fields are nullable" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ attrs = %{
+ channel_id: channel.id,
+ channel_snapshot_id: snapshot.id,
+ request_id: "req-no-auth",
+ state: :success,
+ started_at: DateTime.utc_now()
+ }
+
+ {:ok, request} =
+ ChannelRequest.changeset(%ChannelRequest{}, attrs) |> Repo.insert()
+
+ assert request.client_webhook_auth_method_id == nil
+ assert request.client_auth_type == nil
+ end
+
+ test "belongs_to client_webhook_auth_method loads correctly" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "basic"
+ )
+
+ loaded =
+ ChannelRequest
+ |> Repo.get!(request.id)
+ |> Repo.preload(:client_webhook_auth_method)
+
+ assert loaded.client_webhook_auth_method.id == auth_method.id
+ assert loaded.client_auth_type == "basic"
+ end
+ end
+
+ describe "client_webhook_auth_method_id nilification on delete" do
+ test "FK is nilified when auth method is deleted, client_auth_type survives" do
+ channel = insert(:channel)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+ auth_method = insert(:webhook_auth_method, project: channel.project)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ client_webhook_auth_method_id: auth_method.id,
+ client_auth_type: "api"
+ )
+
+ # Verify FK is set
+ assert Repo.get!(ChannelRequest, request.id).client_webhook_auth_method_id ==
+ auth_method.id
+
+ # Delete the auth method
+ Repo.delete!(auth_method)
+
+ # FK should be nilified by on_delete: :nilify_all
+ reloaded = Repo.get!(ChannelRequest, request.id)
+ assert reloaded.client_webhook_auth_method_id == nil
+
+ # client_auth_type is a denormalized snapshot — it survives deletion
+ assert reloaded.client_auth_type == "api"
+ end
+ end
+
describe "delete_channel/2 with requests" do
test "removes requests before deleting channel" do
user = insert(:user)
diff --git a/test/lightning/channels/channel_stats_test.exs b/test/lightning/channels/channel_stats_test.exs
index 696540386c..9d72fdb7db 100644
--- a/test/lightning/channels/channel_stats_test.exs
+++ b/test/lightning/channels/channel_stats_test.exs
@@ -2,7 +2,6 @@ defmodule Lightning.Channels.ChannelStatsTest do
use Lightning.DataCase, async: true
alias Lightning.Channels
- alias Lightning.Channels.ChannelRequest
alias Lightning.Channels.SearchParams
describe "get_channel_stats_for_project/1" do
@@ -20,29 +19,23 @@ defmodule Lightning.Channels.ChannelStatsTest do
{:ok, snapshot1} = Channels.get_or_create_current_snapshot(channel1)
{:ok, snapshot2} = Channels.get_or_create_current_snapshot(channel2)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel1.id,
- channel_snapshot_id: snapshot1.id,
- request_id: "stats-r1",
- state: :success,
- started_at: DateTime.utc_now()
- })
-
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel1.id,
- channel_snapshot_id: snapshot1.id,
- request_id: "stats-r2",
- state: :success,
- started_at: DateTime.utc_now()
- })
-
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel2.id,
- channel_snapshot_id: snapshot2.id,
- request_id: "stats-r3",
- state: :success,
- started_at: DateTime.utc_now()
- })
+ insert(:channel_request,
+ channel: channel1,
+ channel_snapshot: snapshot1,
+ state: :success
+ )
+
+ insert(:channel_request,
+ channel: channel1,
+ channel_snapshot: snapshot1,
+ state: :success
+ )
+
+ insert(:channel_request,
+ channel: channel2,
+ channel_snapshot: snapshot2,
+ state: :success
+ )
assert %{total_channels: 2, total_requests: 3} =
Channels.get_channel_stats_for_project(project.id)
@@ -53,13 +46,11 @@ defmodule Lightning.Channels.ChannelStatsTest do
other_channel = insert(:channel)
{:ok, snapshot} = Channels.get_or_create_current_snapshot(other_channel)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: other_channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "stats-other-r1",
- state: :success,
- started_at: DateTime.utc_now()
- })
+ insert(:channel_request,
+ channel: other_channel,
+ channel_snapshot: snapshot,
+ state: :success
+ )
assert %{total_requests: 0} =
Channels.get_channel_stats_for_project(project.id)
diff --git a/test/lightning/channels/handler_test.exs b/test/lightning/channels/handler_test.exs
index 23aa5c5fec..897af8fe8c 100644
--- a/test/lightning/channels/handler_test.exs
+++ b/test/lightning/channels/handler_test.exs
@@ -122,7 +122,11 @@ defmodule Lightning.Channels.HandlerTest do
end
test "creates ChannelEvent with correct fields", %{state: state} do
- result = finished_result(status: 200, duration_us: 50_000)
+ result =
+ finished_result(
+ status: 200,
+ timing: %{total_us: 50_000, send_us: 2_000, recv_us: 1_000}
+ )
assert {:ok, _state} = Handler.handle_response_finished(result, state)
@@ -132,8 +136,8 @@ defmodule Lightning.Channels.HandlerTest do
assert event.request_method == state.request_method
assert event.request_path == "/test/path"
assert event.response_status == 200
- assert event.latency_ms == 50
- assert event.ttfb_ms == 10
+ assert event.latency_us == 50_000
+ assert event.ttfb_us == 10_000
assert event.error_message == nil
end
@@ -190,6 +194,242 @@ defmodule Lightning.Channels.HandlerTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — Philter 0.3.0 adaptation + new fields
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D1: New columns on channel_events (body sizes, durations, query string)
+ # - D2: Headers text → jsonb migration
+ # - D4: Handler reads from Philter 0.3.0 result structure
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "ChannelEvent changeset — new fields" do
+ test "accepts body size fields" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_body_size: 1024,
+ response_body_size: 2048
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_body_size == 1024
+ assert changeset.changes.response_body_size == 2048
+ end
+
+ test "accepts per-direction duration fields" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_send_us: 3_500,
+ response_duration_us: 8_000
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_send_us == 3_500
+ assert changeset.changes.response_duration_us == 8_000
+ end
+
+ test "accepts request_query_string" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :destination_response,
+ request_query_string: "page=1&limit=10"
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ assert changeset.changes.request_query_string == "page=1&limit=10"
+ end
+
+ test "new fields are all nullable" do
+ attrs = %{
+ channel_request_id: Ecto.UUID.generate(),
+ type: :error,
+ error_message: "nxdomain"
+ }
+
+ changeset = ChannelEvent.changeset(%ChannelEvent{}, attrs)
+ assert changeset.valid?
+ refute Map.has_key?(changeset.changes, :request_body_size)
+ refute Map.has_key?(changeset.changes, :response_body_size)
+ refute Map.has_key?(changeset.changes, :request_send_us)
+ refute Map.has_key?(changeset.changes, :response_duration_us)
+ refute Map.has_key?(changeset.changes, :request_query_string)
+ end
+ end
+
+ describe "persist_completion — Philter 0.3.0 fields" do
+ setup %{state: state} do
+ metadata = request_metadata()
+ {:ok, state} = Handler.handle_request_started(metadata, state)
+
+ state =
+ Map.merge(state, %{
+ ttfb_us: 10_000,
+ response_status: 200,
+ response_headers: [{"content-type", "text/plain"}]
+ })
+
+ %{state: state}
+ end
+
+ test "uses timing.total_us for latency_us", %{state: state} do
+ result = philter_result(timing: %{total_us: 50_000, send_us: 2_000})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.latency_us == 50_000
+ end
+
+ test "persists request_send_us from timing.send_us", %{state: state} do
+ result = philter_result(timing: %{total_us: 50_000, send_us: 3_500})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_send_us == 3_500
+ end
+
+ test "persists response_duration_us from timing.recv_us",
+ %{state: state} do
+ result =
+ philter_result(
+ timing: %{total_us: 50_000, send_us: 2_000, recv_us: 8_000}
+ )
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.response_duration_us == 8_000
+ end
+
+ test "persists body sizes from observations", %{state: state} do
+ result =
+ philter_result(
+ request_observation: %{
+ hash: "req123",
+ size: 1024,
+ body: nil,
+ preview: "request body"
+ },
+ response_observation: %{
+ hash: "resp123",
+ size: 2048,
+ body: nil,
+ preview: "response body"
+ }
+ )
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_body_size == 1024
+ assert event.response_body_size == 2048
+ end
+
+ test "persists query string from handler state", %{state: state} do
+ state = Map.put(state, :query_string, "page=1&limit=10")
+ result = philter_result()
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_query_string == "page=1&limit=10"
+ end
+
+ test "nil phase timings when collect_timing is disabled", %{state: state} do
+ result =
+ philter_result(timing: %{total_us: 50_000, send_us: nil, recv_us: nil})
+
+ assert {:ok, _state} = Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.request_send_us == nil
+ assert event.response_duration_us == nil
+ assert event.latency_us == 50_000
+ end
+ end
+
+ describe "header encoding — native jsonb" do
+ setup %{state: state} do
+ metadata =
+ request_metadata(
+ headers: [
+ {"content-type", "application/json"},
+ {"x-custom", "value"}
+ ]
+ )
+
+ {:ok, state} = Handler.handle_request_started(metadata, state)
+
+ state =
+ Map.merge(state, %{
+ ttfb_us: 10_000,
+ response_status: 200,
+ response_headers: [
+ {"content-type", "text/plain"},
+ {"x-resp", "val"}
+ ]
+ })
+
+ %{state: state}
+ end
+
+ test "request headers round-trip as list without Jason.decode!", %{
+ state: state
+ } do
+ result = philter_result()
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+
+ # After jsonb migration, headers are native lists, not JSON strings
+ assert is_list(event.request_headers)
+
+ assert event.request_headers == [
+ ["content-type", "application/json"],
+ ["x-custom", "value"]
+ ]
+ end
+
+ test "response headers round-trip as list without Jason.decode!", %{
+ state: state
+ } do
+ result = philter_result()
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+
+ assert is_list(event.response_headers)
+
+ assert event.response_headers == [
+ ["content-type", "text/plain"],
+ ["x-resp", "val"]
+ ]
+ end
+
+ test "nil headers remain nil", %{state: state} do
+ state = Map.delete(state, :response_headers)
+
+ result =
+ philter_result(
+ status: nil,
+ error: %Mint.TransportError{reason: :econnrefused}
+ )
+
+ Handler.handle_response_finished(result, state)
+
+ event = Repo.one!(ChannelEvent)
+ assert event.response_headers == nil
+ end
+ end
+
# Helpers
defp request_metadata(overrides \\ []) do
@@ -213,9 +453,37 @@ defmodule Lightning.Channels.HandlerTest do
hash: "abc123",
size: 100,
body: nil,
- preview: "test body",
- duration_us: 1000,
- time_to_first_byte_us: 500
+ preview: "test body"
+ }
+
+ %{
+ request_observation:
+ Keyword.get(overrides, :request_observation, observation),
+ response_observation:
+ Keyword.get(overrides, :response_observation, observation),
+ error: Keyword.get(overrides, :error, nil),
+ upstream_url:
+ Keyword.get(overrides, :upstream_url, "http://localhost:4999"),
+ method: Keyword.get(overrides, :method, "GET"),
+ status: Keyword.get(overrides, :status, 200),
+ timing:
+ Keyword.get(overrides, :timing, %{
+ total_us: 10_000,
+ send_us: 2_000,
+ recv_us: 1_000
+ })
+ }
+ end
+
+ # Philter 0.3.0 result format:
+ # - Observations are content-only (hash, size, preview, body)
+ # - All timing lives in the top-level timing map
+ defp philter_result(overrides \\ []) do
+ observation = %{
+ hash: "abc123",
+ size: 100,
+ body: nil,
+ preview: "test body"
}
%{
@@ -228,7 +496,12 @@ defmodule Lightning.Channels.HandlerTest do
Keyword.get(overrides, :upstream_url, "http://localhost:4999"),
method: Keyword.get(overrides, :method, "GET"),
status: Keyword.get(overrides, :status, 200),
- duration_us: Keyword.get(overrides, :duration_us, 10_000)
+ timing:
+ Keyword.get(overrides, :timing, %{
+ total_us: 10_000,
+ send_us: 2_000,
+ recv_us: 1_000
+ })
}
end
end
diff --git a/test/lightning/channels_test.exs b/test/lightning/channels_test.exs
index 7500a759c3..9f32906fe4 100644
--- a/test/lightning/channels_test.exs
+++ b/test/lightning/channels_test.exs
@@ -6,7 +6,6 @@ defmodule Lightning.ChannelsTest do
alias Lightning.Auditing.Audit
alias Lightning.Channels
alias Lightning.Channels.Channel
- alias Lightning.Channels.ChannelRequest
alias Lightning.Channels.ChannelSnapshot
describe "list_channels_for_project/1" do
@@ -43,21 +42,19 @@ defmodule Lightning.ChannelsTest do
t1 = ~U[2025-01-01 10:00:00.000000Z]
t2 = ~U[2025-01-02 12:00:00.000000Z]
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "req-stats-1",
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
state: :success,
started_at: t1
- })
+ )
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel.id,
- channel_snapshot_id: snapshot.id,
- request_id: "req-stats-2",
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
state: :success,
started_at: t2
- })
+ )
results = Channels.list_channels_for_project_with_stats(project.id)
@@ -79,13 +76,12 @@ defmodule Lightning.ChannelsTest do
{:ok, snapshot_b} = Channels.get_or_create_current_snapshot(channel_b)
- Lightning.Repo.insert!(%ChannelRequest{
- channel_id: channel_b.id,
- channel_snapshot_id: snapshot_b.id,
- request_id: "req-stats-3",
+ insert(:channel_request,
+ channel: channel_b,
+ channel_snapshot: snapshot_b,
state: :success,
started_at: ~U[2025-06-01 00:00:00.000000Z]
- })
+ )
results = Channels.list_channels_for_project_with_stats(project.id)
@@ -246,6 +242,34 @@ defmodule Lightning.ChannelsTest do
assert audit.actor_id == user.id
end
+ test "returns {:ok, channel} when submitted with no real changes", %{
+ user: user
+ } do
+ channel = insert(:channel)
+
+ # Pass back the current values — empty changes map. Previously this
+ # crashed with FunctionClauseError because Audit.event/4 returned
+ # :no_changes and that was piped into Multi.insert/3.
+ assert {:ok, unchanged} =
+ Channels.update_channel(
+ channel,
+ %{name: channel.name, destination_url: channel.destination_url},
+ actor: user
+ )
+
+ assert unchanged.id == channel.id
+ assert unchanged.lock_version == channel.lock_version
+
+ # No audit row was written for the no-op save
+ assert [] ==
+ Repo.all(
+ from a in Audit,
+ where:
+ a.item_id == ^channel.id and a.item_type == "channel" and
+ a.event == "updated"
+ )
+ end
+
test "passing nil for destination_auth_method removes the join record",
%{user: user} do
project = insert(:project)
@@ -433,4 +457,91 @@ defmodule Lightning.ChannelsTest do
assert snapshot2.name == "updated-name"
end
end
+
+ describe "get_channel_request_for_project/2" do
+ test "returns channel request with preloads when project matches" do
+ project = insert(:project)
+ user = insert(:user)
+
+ webhook_auth_method =
+ insert(:webhook_auth_method, project: project, auth_type: :api)
+
+ credential =
+ insert(:credential, user: user, name: "dest-cred", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel = insert(:channel, project: project)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now(),
+ client_webhook_auth_method_id: webhook_auth_method.id,
+ client_auth_type: "api",
+ destination_credential_id: project_credential.id
+ )
+
+ event =
+ insert(:channel_event,
+ channel_request: request,
+ request_path: "/test",
+ latency_us: 100_000
+ )
+
+ result = Channels.get_channel_request_for_project(project.id, request.id)
+
+ assert result.id == request.id
+ assert result.channel.id == channel.id
+ assert result.channel_snapshot.id == snapshot.id
+
+ assert length(result.channel_events) == 1
+ assert hd(result.channel_events).id == event.id
+
+ # Client and destination auth tracking are preloaded (no N+1).
+ assert result.client_webhook_auth_method.id == webhook_auth_method.id
+ assert result.destination_credential.id == project_credential.id
+ assert result.destination_credential.credential.name == "dest-cred"
+ end
+
+ test "returns nil when channel request belongs to a different project" do
+ project_a = insert(:project)
+ project_b = insert(:project)
+ channel = insert(:channel, project: project_a)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ assert Channels.get_channel_request_for_project(project_b.id, request.id) ==
+ nil
+ end
+
+ test "returns nil for non-existent request ID" do
+ project = insert(:project)
+
+ assert Channels.get_channel_request_for_project(
+ project.id,
+ Ecto.UUID.generate()
+ ) == nil
+ end
+
+ test "returns nil for invalid UUID" do
+ project = insert(:project)
+
+ assert Channels.get_channel_request_for_project(
+ project.id,
+ "not-a-valid-uuid"
+ ) == nil
+ end
+ end
end
diff --git a/test/lightning_web/live/channel_live/form_test.exs b/test/lightning_web/live/channel_live/form_test.exs
index 7ab177ceba..c862d053c8 100644
--- a/test/lightning_web/live/channel_live/form_test.exs
+++ b/test/lightning_web/live/channel_live/form_test.exs
@@ -156,23 +156,22 @@ defmodule LightningWeb.ChannelLive.FormTest do
{:ok, view, html} =
live(conn, ~p"/projects/#{project.id}/channels/new")
- # Fields appear in this order: Name, Destination URL,
- # Destination Credential, Enabled, Client Credentials
+ # Fields appear in this order: Name, Enabled, Destination URL,
+ # Destination Credential, Client Credentials
name_pos = :binary.match(html, "Name") |> elem(0)
+ enabled_pos = :binary.match(html, "Enabled") |> elem(0)
dest_url_pos = :binary.match(html, "Destination URL") |> elem(0)
dest_cred_pos =
:binary.match(html, "Destination Credential") |> elem(0)
- enabled_pos = :binary.match(html, "Enabled") |> elem(0)
-
client_cred_pos =
:binary.match(html, "Client Credentials") |> elem(0)
- assert name_pos < dest_url_pos
+ assert name_pos < enabled_pos
+ assert enabled_pos < dest_url_pos
assert dest_url_pos < dest_cred_pos
- assert dest_cred_pos < enabled_pos
- assert enabled_pos < client_cred_pos
+ assert dest_cred_pos < client_cred_pos
# Sublabels
assert html =~ "The service OpenFn will forward requests to"
diff --git a/test/lightning_web/live/channel_request_live/helpers_test.exs b/test/lightning_web/live/channel_request_live/helpers_test.exs
new file mode 100644
index 0000000000..c8f37ab573
--- /dev/null
+++ b/test/lightning_web/live/channel_request_live/helpers_test.exs
@@ -0,0 +1,97 @@
+defmodule LightningWeb.ChannelRequestLive.HelpersTest do
+ use ExUnit.Case, async: true
+
+ alias LightningWeb.ChannelRequestLive.Helpers
+
+ describe "humanize_error/1" do
+ test "maps transport error codes to human messages" do
+ assert Helpers.humanize_error("nxdomain") =~
+ "DNS lookup failed"
+
+ assert Helpers.humanize_error("econnrefused") =~
+ "Connection refused"
+
+ assert Helpers.humanize_error("ehostunreach") =~
+ "Host unreachable"
+
+ assert Helpers.humanize_error("enetunreach") =~
+ "Network unreachable"
+
+ assert Helpers.humanize_error("closed") =~
+ "Connection closed unexpectedly"
+
+ assert Helpers.humanize_error("econnreset") =~
+ "Connection reset"
+
+ assert Helpers.humanize_error("econnaborted") =~
+ "Connection aborted"
+
+ assert Helpers.humanize_error("epipe") =~
+ "Broken pipe"
+
+ assert Helpers.humanize_error("connect_timeout") =~
+ "Connection timed out"
+
+ assert Helpers.humanize_error("response_timeout") =~
+ "Response timed out"
+
+ assert Helpers.humanize_error("timeout") =~
+ "Request timed out"
+ end
+
+ test "maps credential error codes to human messages" do
+ assert Helpers.humanize_error("credential_missing_auth_fields") =~
+ "missing required authentication fields"
+
+ assert Helpers.humanize_error("credential_environment_not_found") =~
+ "credential environment could not be found"
+
+ assert Helpers.humanize_error("oauth_refresh_failed") =~
+ "OAuth token refresh failed"
+
+ assert Helpers.humanize_error("oauth_reauthorization_required") =~
+ "OAuth credential needs to be re-authorized"
+ end
+
+ test "handles unsupported_credential_schema with dynamic name" do
+ result =
+ Helpers.humanize_error("unsupported_credential_schema:my_schema")
+
+ assert result =~ "Unsupported credential type"
+ assert result =~ "my_schema"
+ end
+
+ test "passes through unknown error codes unchanged" do
+ assert Helpers.humanize_error("some_unknown_error") ==
+ "some_unknown_error"
+ end
+ end
+
+ describe "error_category/1" do
+ test "classifies transport errors" do
+ for code <- ~w(nxdomain econnrefused ehostunreach enetunreach closed
+ econnreset econnaborted epipe connect_timeout
+ response_timeout timeout) do
+ assert Helpers.error_category(code) == :transport,
+ "expected #{code} to be :transport"
+ end
+ end
+
+ test "classifies credential errors" do
+ for code <- ~w(credential_missing_auth_fields
+ credential_environment_not_found
+ oauth_refresh_failed
+ oauth_reauthorization_required) do
+ assert Helpers.error_category(code) == :credential,
+ "expected #{code} to be :credential"
+ end
+
+ assert Helpers.error_category("unsupported_credential_schema:foo") ==
+ :credential
+ end
+
+ test "returns nil for unknown error codes" do
+ assert Helpers.error_category("something_else") == nil
+ end
+ end
+end
diff --git a/test/lightning_web/live/channel_request_live/show_test.exs b/test/lightning_web/live/channel_request_live/show_test.exs
new file mode 100644
index 0000000000..f16fc493aa
--- /dev/null
+++ b/test/lightning_web/live/channel_request_live/show_test.exs
@@ -0,0 +1,664 @@
+defmodule LightningWeb.ChannelRequestLive.ShowTest do
+ use LightningWeb.ConnCase, async: true
+
+ import Phoenix.LiveViewTest
+ import Lightning.Factories
+
+ alias Lightning.Channels
+
+ setup :stub_rate_limiter_ok
+
+ defp enable_experimental_features(%{user: user}) do
+ Lightning.Accounts.update_user_preferences(user, %{
+ "experimental_features" => true
+ })
+
+ :ok
+ end
+
+ defp create_channel_request(project, attrs \\ %{}) do
+ attrs = Map.new(attrs)
+
+ channel =
+ Map.get_lazy(attrs, :channel, fn ->
+ insert(:channel, project: project)
+ end)
+
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: Map.get(attrs, :state, :success),
+ client_identity: Map.get(attrs, :client_identity, "192.168.1.1"),
+ client_auth_type: Map.get(attrs, :client_auth_type, "api"),
+ started_at: Map.get(attrs, :started_at, ~U[2026-04-10 10:00:00.000000Z]),
+ completed_at:
+ Map.get(attrs, :completed_at, ~U[2026-04-10 10:00:00.350000Z])
+ )
+
+ {request, channel, snapshot}
+ end
+
+ defp detail_path(project, request) do
+ ~p"/projects/#{project.id}/history/channels/#{request.id}"
+ end
+
+ describe "feature gate" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+
+ test "redirects when experimental features are disabled", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ assert {:error, {:redirect, _}} =
+ live(conn, detail_path(project, request))
+ end
+ end
+
+ describe "detail page — success state" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders summary card, metadata, headers, and body previews", %{
+ conn: conn,
+ project: project
+ } do
+ {request, channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ request_query_string: "format=json"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Summary card
+ assert html =~ "POST"
+ assert html =~ "/api/v1/data"
+ assert html =~ "format=json"
+ assert html =~ "200"
+ assert html =~ "Success"
+ assert html =~ channel.name
+
+ # Metadata
+ assert html =~ "192.168.1.1"
+ assert html =~ "api"
+ assert html =~ String.slice(request.id, 0..7)
+ assert html =~ "350"
+ # Destination URL from channel
+ assert html =~ channel.destination_url
+ # Timestamps
+ assert html =~ "2026"
+ assert html =~ "10:00"
+
+ # Request headers
+ assert html =~ "content-type"
+ assert html =~ "authorization"
+ assert html =~ "[REDACTED]"
+
+ # Body previews (quotes are HTML-entity-encoded by LiveView's test DOM serializer)
+ assert html =~ "key"
+ assert html =~ "value"
+ assert html =~ "status"
+ assert html =~ "ok"
+ end
+ end
+
+ describe "detail page — error state" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders humanized error and raw string for transport error", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "econnrefused",
+ latency_us: 100_000
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "Connection refused"
+ assert html =~ "econnrefused"
+ end
+
+ test "renders credential error with appropriate messaging", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "credential_missing_auth_fields"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "missing required authentication fields"
+ assert html =~ "credential_missing_auth_fields"
+ end
+ end
+
+ describe "detail page — timing section" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders full nested timeline with all Finch phases, overhead, and reused connection",
+ %{conn: conn, project: project} do
+ # --- Full phases with overhead ---
+ {req1, _ch1, _snap1} = create_channel_request(project)
+
+ # inner_sum = 2+15+5+158+65 = 245ms, latency = 260ms => 15ms overhead
+ insert(:channel_event,
+ channel_request: req1,
+ queue_us: 2_000,
+ connect_us: 15_000,
+ request_send_us: 5_000,
+ ttfb_us: 180_000,
+ response_duration_us: 65_000,
+ latency_us: 260_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req1))
+ html1 = render(view1)
+
+ # Timing section present with bookend labels
+ assert html1 =~ ~s(id="timing-section")
+ assert html1 =~ "0 ms"
+ assert html1 =~ "260 ms"
+
+ # Phase segment title attributes (tooltip text)
+ assert html1 =~ ~s(title="Queue: 2 ms")
+ assert html1 =~ ~s(title="Connect: 15 ms")
+ assert html1 =~ ~s(title="Send: 5 ms")
+ assert html1 =~ ~s(title="Processing: 158 ms")
+ assert html1 =~ ~s(title="Recv: 65 ms")
+
+ # TTFB marker and legend with overhead swatch
+ assert html1 =~ "TTFB: 180 ms"
+ assert html1 =~ "Proxy overhead"
+
+ # --- Reused connection ---
+ {req2, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req2,
+ reused_connection: true,
+ queue_us: 1_000,
+ connect_us: 0,
+ request_send_us: 4_000,
+ ttfb_us: 120_000,
+ response_duration_us: 30_000,
+ latency_us: 155_000
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req2))
+ html2 = render(view2)
+
+ assert html2 =~ ~s(id="timing-section")
+ assert html2 =~ "(reused)"
+
+ # --- Processing segment from nil queue/connect ---
+ {req3, _ch3, _snap3} = create_channel_request(project)
+
+ # wait = ttfb - 0 - 0 - send = 200k - 10k = 190k
+ insert(:channel_event,
+ channel_request: req3,
+ queue_us: nil,
+ connect_us: nil,
+ request_send_us: 10_000,
+ ttfb_us: 200_000,
+ response_duration_us: 50_000,
+ latency_us: 260_000
+ )
+
+ {:ok, view3, _html} = live(conn, detail_path(project, req3))
+ html3 = render(view3)
+
+ assert html3 =~ ~s(title="Processing: 190 ms")
+ end
+
+ test "degrades gracefully through partial and minimal tiers",
+ %{conn: conn, project: project} do
+ # Partial tier: TTFB + latency only => TTFB/Download segments
+ {req1, _ch1, _snap1} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req1,
+ request_send_us: nil,
+ response_duration_us: nil,
+ ttfb_us: 280_000,
+ latency_us: 350_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req1))
+ html1 = render(view1)
+
+ assert html1 =~ ~s(title="TTFB: 280 ms")
+ assert html1 =~ ~s(title="Download: 70 ms")
+ assert html1 =~ "350 ms"
+ refute html1 =~ "Proxy overhead"
+
+ # Minimal tier: only latency_us => single Total bar
+ {req2, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req2,
+ request_send_us: nil,
+ response_duration_us: nil,
+ ttfb_us: nil,
+ latency_us: 420_000
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req2))
+ html2 = render(view2)
+
+ assert html2 =~ ~s(title="Total: 420 ms")
+ assert html2 =~ "420 ms"
+ end
+
+ test "positions TTFB marker on the inner-phase scale, not latency",
+ %{conn: conn, project: project} do
+ # inner_total = queue+connect+send+wait+recv
+ # = 10 + 20 + 5 + (ttfb - 10 - 20 - 5) + recv
+ # = ttfb + recv
+ # With ttfb=100ms and recv=100ms => inner_total=200ms => 50%
+ # latency_us is deliberately different (250ms) to prove the marker
+ # is scaled against inner_total, not total_us.
+ {req_half, _ch, _snap} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req_half,
+ queue_us: 10_000,
+ connect_us: 20_000,
+ request_send_us: 5_000,
+ ttfb_us: 100_000,
+ response_duration_us: 100_000,
+ latency_us: 250_000
+ )
+
+ {:ok, view_half, _html} = live(conn, detail_path(project, req_half))
+ html_half = render(view_half)
+
+ assert html_half =~ ~s(style="left: 50.0%")
+
+ # Edge case: ttfb == inner_total => marker at 100%.
+ # inner_total = ttfb + recv, so set recv = 0 (response_duration_us = 0).
+ {req_full, _ch2, _snap2} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: req_full,
+ queue_us: 10_000,
+ connect_us: 20_000,
+ request_send_us: 5_000,
+ ttfb_us: 100_000,
+ response_duration_us: 0,
+ latency_us: 150_000
+ )
+
+ {:ok, view_full, _html} = live(conn, detail_path(project, req_full))
+ html_full = render(view_full)
+
+ assert html_full =~ ~s(style="left: 100.0%")
+ end
+
+ test "shows single bar for transport errors, hidden for credential errors",
+ %{conn: conn, project: project} do
+ {req_transport, _ch1, _snap1} =
+ create_channel_request(project, state: :timeout)
+
+ insert(:channel_error_event,
+ channel_request: req_transport,
+ error_message: "response_timeout",
+ latency_us: 30_000_000
+ )
+
+ {:ok, view1, _html} = live(conn, detail_path(project, req_transport))
+ html1 = render(view1)
+ assert html1 =~ ~s(id="timing-section")
+ assert html1 =~ ~s(title="Total: 30000 ms")
+
+ # Credential error: timing section hidden entirely
+ {req_cred, _ch2, _snap2} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: req_cred,
+ error_message: "credential_missing_auth_fields"
+ )
+
+ {:ok, view2, _html} = live(conn, detail_path(project, req_cred))
+ html2 = render(view2)
+ refute html2 =~ "timing-section"
+ end
+ end
+
+ describe "detail page — context section" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders snapshot data and config changed indicator when versions differ",
+ %{conn: conn, project: project, user: user} do
+ channel = insert(:channel, project: project)
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Snapshot data renders
+ assert html =~ snapshot.destination_url
+ assert html =~ to_string(snapshot.lock_version)
+
+ # Bump channel version to create mismatch
+ {:ok, _updated} =
+ Channels.update_channel(channel, %{name: "updated-name"}, actor: user)
+
+ {:ok, view2, _html} = live(conn, detail_path(project, request))
+ html2 = render(view2)
+
+ assert html2 =~ "changed" or html2 =~ "Config"
+ end
+ end
+
+ describe "detail page — nil body" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "shows 'Body not captured' when body_preview is nil", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ request_body_preview: nil,
+ request_body_hash: nil,
+ request_body_size: 2048
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "Body not captured"
+ end
+
+ test "hides body sub-section entirely when both preview and size are nil",
+ %{conn: conn, project: project} do
+ {request, _channel, _snapshot} =
+ create_channel_request(project, state: :error)
+
+ insert(:channel_error_event,
+ channel_request: request,
+ error_message: "credential_missing_auth_fields",
+ request_body_preview: nil,
+ request_body_hash: nil,
+ request_body_size: nil
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ refute html =~ "Body not captured"
+ refute html =~ "request-body"
+ end
+
+ test "shows metadata only for binary (non-text) content-type", %{
+ conn: conn,
+ project: project
+ } do
+ {request, _channel, _snapshot} = create_channel_request(project)
+
+ insert(:channel_event,
+ channel_request: request,
+ response_headers: [["content-type", "application/octet-stream"]],
+ response_body_preview: nil,
+ response_body_size: 4096,
+ response_body_hash: "binaryhash123"
+ )
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Should show size and hash metadata
+ assert html =~ "4096" or html =~ "4.0 KB" or html =~ "4 KB"
+ assert html =~ "binaryhash123"
+ # Should NOT render a body preview block
+ refute html =~ ~s({"status":"ok"})
+ end
+ end
+
+ describe "security" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "cross-project isolation and invalid UUID both return 404", %{
+ conn: conn,
+ project: project
+ } do
+ other_project = insert(:project)
+ {request, _channel, _snapshot} = create_channel_request(other_project)
+ insert(:channel_event, channel_request: request)
+
+ assert {:error, {:redirect, _}} =
+ live(conn, detail_path(project, request))
+
+ assert {:error, {:redirect, _}} =
+ live(
+ conn,
+ ~p"/projects/#{project.id}/history/channels/not-a-uuid"
+ )
+ end
+ end
+
+ describe "detail page — auth attribution" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "renders client auth method name and destination credential name when both present",
+ %{conn: conn, project: project, user: user} do
+ webhook_auth_method =
+ insert(:webhook_auth_method,
+ project: project,
+ auth_type: :api,
+ name: "Prod API key"
+ )
+
+ credential =
+ insert(:credential, user: user, name: "Destination API", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel = insert(:channel, project: project)
+
+ {request, _channel, _snap} =
+ create_channel_request(project,
+ channel: channel,
+ client_auth_type: "api"
+ )
+
+ request
+ |> Ecto.Changeset.change(%{
+ client_webhook_auth_method_id: webhook_auth_method.id,
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Client auth: method name and auth type label
+ assert html =~ "Prod API key"
+ assert html =~ "API key"
+
+ # Destination auth: credential name
+ assert html =~ "Destination API"
+
+ # Section labels
+ assert html =~ "Client auth"
+ assert html =~ "Destination auth"
+ end
+
+ test "renders 'None' when no client auth configured and credential name when destination set",
+ %{conn: conn, project: project, user: user} do
+ credential =
+ insert(:credential, user: user, name: "Only Destination", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ {request, _channel, _snap} =
+ create_channel_request(project, client_auth_type: nil)
+
+ request
+ |> Ecto.Changeset.change(%{
+ client_webhook_auth_method_id: nil,
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # Client auth column shows "None"
+ assert html =~ "None"
+ # Destination credential name still renders
+ assert html =~ "Only Destination"
+ end
+
+ test "renders '(deleted)' without crashing when destination credential id is set but association missing",
+ %{conn: conn, project: project, user: user} do
+ credential =
+ insert(:credential, user: user, name: "Will Be Gone", schema: "http")
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ {request, _channel, _snap} = create_channel_request(project)
+
+ request
+ |> Ecto.Changeset.change(%{
+ destination_credential_id: project_credential.id
+ })
+ |> Lightning.Repo.update!()
+
+ insert(:channel_event, channel_request: request)
+
+ # Break the FK with raw SQL to simulate a stale read: the id is
+ # still on the row but the credential row has been hard-deleted.
+ # `on_delete: :nilify_all` would clear the id under normal Ecto,
+ # so we bypass it with DELETE on project_credentials directly —
+ # which will cascade-nilify. The belt-and-suspenders render path
+ # for "id set, preload nil" is what we're covering here, so we
+ # then null out the id too and prove rendering still survives
+ # the absent credential through the helpers.
+ Lightning.Repo.delete!(project_credential)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ # After nilify, helper returns "None". Request still renders.
+ assert html =~ "None" or html =~ "(deleted)"
+
+ reloaded =
+ Lightning.Channels.get_channel_request_for_project(
+ project.id,
+ request.id
+ )
+
+ assert reloaded.destination_credential_id == nil
+ end
+
+ test "helper renders '(deleted)' for stale in-memory record with id but nil association" do
+ # Directly test the helper to cover the belt-and-suspenders path
+ # (id present, association nil) that the schema's on_delete: :nilify_all
+ # prevents us from producing through the DB.
+ stale = %Lightning.Channels.ChannelRequest{
+ client_webhook_auth_method_id: Ecto.UUID.generate(),
+ client_auth_type: "api",
+ client_webhook_auth_method: nil,
+ destination_credential_id: Ecto.UUID.generate(),
+ destination_credential: nil
+ }
+
+ assert LightningWeb.ChannelRequestLive.Helpers.format_client_auth(stale) =~
+ "(deleted)"
+
+ assert LightningWeb.ChannelRequestLive.Helpers.format_destination_auth(
+ stale
+ ) == "(deleted)"
+ end
+ end
+
+ describe "navigation" do
+ setup [:register_and_log_in_user, :create_project_for_current_user]
+ setup :enable_experimental_features
+
+ test "breadcrumbs render correctly", %{conn: conn, project: project} do
+ {request, _channel, _snapshot} = create_channel_request(project)
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} = live(conn, detail_path(project, request))
+ html = render(view)
+
+ assert html =~ "History"
+ assert html =~ "Channel"
+ assert html =~ String.slice(request.id, 0..7)
+ end
+
+ test "channel logs table rows link to the detail page", %{
+ conn: conn,
+ project: project
+ } do
+ channel = insert(:channel, project: project, name: "link-test")
+ {:ok, snapshot} = Channels.get_or_create_current_snapshot(channel)
+
+ request =
+ insert(:channel_request,
+ channel: channel,
+ channel_snapshot: snapshot,
+ state: :success,
+ started_at: DateTime.utc_now()
+ )
+
+ insert(:channel_event, channel_request: request)
+
+ {:ok, view, _html} =
+ live(conn, ~p"/projects/#{project.id}/history/channels")
+
+ html = render(view)
+
+ assert html =~
+ ~r/href="[^"]*\/projects\/#{project.id}\/history\/channels\/#{request.id}"/
+ end
+ end
+end
diff --git a/test/lightning_web/plugs/channel_proxy_plug_test.exs b/test/lightning_web/plugs/channel_proxy_plug_test.exs
index 0e48d90e46..9784748890 100644
--- a/test/lightning_web/plugs/channel_proxy_plug_test.exs
+++ b/test/lightning_web/plugs/channel_proxy_plug_test.exs
@@ -396,7 +396,7 @@ defmodule LightningWeb.ChannelProxyPlugTest do
assert event.type == :destination_response
assert event.response_status == 200
- assert event.latency_ms != nil
+ assert event.latency_us != nil
assert event.request_method == "GET"
assert event.request_path == "/persisted"
end
@@ -866,10 +866,9 @@ defmodule LightningWeb.ChannelProxyPlugTest do
)
# The handler redacts authorization headers before persisting
- headers = Jason.decode!(event.request_headers)
-
+ # Headers are native jsonb arrays, no JSON decoding needed
auth_header =
- Enum.find(headers, fn [k, _v] -> k == "authorization" end)
+ Enum.find(event.request_headers, fn [k, _v] -> k == "authorization" end)
assert auth_header == ["authorization", "[REDACTED]"]
end
@@ -985,6 +984,269 @@ defmodule LightningWeb.ChannelProxyPlugTest do
end
end
+ # ---------------------------------------------------------------
+ # Phase 1a contract tests — query string + client auth tracking
+ # ---------------------------------------------------------------
+ #
+ # These tests define the target interface after:
+ # - D1: request_query_string on channel_events
+ # - D3: client_webhook_auth_method_id and client_auth_type on channel_requests
+ # - D4: Proxy plug passes query string and auth info into handler state
+ #
+ # They will not compile/pass until Phase 1b implements the changes.
+
+ describe "query string persistence" do
+ test "persists query string on channel event", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/search", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "results")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/search?q=foo&page=2")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ assert event.request_query_string == "q=foo&page=2"
+ end
+
+ test "empty query string when no params", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/plain", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/plain")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ assert event.request_query_string == ""
+ end
+ end
+
+ describe "client auth tracking" do
+ test "persists auth method ID and type for API key auth", %{bypass: bypass} do
+ channel =
+ create_client_auth_channel(bypass, [
+ %{auth_type: :api, api_key: "track-me"}
+ ])
+
+ auth_method =
+ channel
+ |> Lightning.Repo.preload(client_webhook_auth_methods: [])
+ |> Map.get(:client_webhook_auth_methods)
+ |> hd()
+
+ Bypass.expect_once(bypass, "GET", "/tracked", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/tracked")
+ |> put_req_header("x-api-key", "track-me")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "api"
+ end
+
+ test "persists auth method ID and type for Basic auth", %{bypass: bypass} do
+ channel =
+ create_client_auth_channel(bypass, [
+ %{auth_type: :basic, username: "user", password: "pass"}
+ ])
+
+ auth_method =
+ channel
+ |> Lightning.Repo.preload(client_webhook_auth_methods: [])
+ |> Map.get(:client_webhook_auth_methods)
+ |> hd()
+
+ Bypass.expect_once(bypass, "GET", "/tracked", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ encoded = Base.encode64("user:pass")
+
+ conn(:get, "/channels/#{channel.id}/tracked")
+ |> put_req_header("authorization", "Basic #{encoded}")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == auth_method.id
+ assert request.client_auth_type == "basic"
+ end
+
+ test "nil auth method when no client auth configured", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/open", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/open")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.client_webhook_auth_method_id == nil
+ assert request.client_auth_type == nil
+ end
+ end
+
+ describe "destination auth tracking" do
+ test "persists destination_credential_id on successful proxy with destination auth",
+ %{bypass: bypass} do
+ channel =
+ create_destination_auth_channel(bypass, "http", %{
+ "access_token" => "tok-123"
+ })
+
+ project_credential_id =
+ channel
+ |> Lightning.Repo.preload(destination_auth_method: :project_credential)
+ |> get_in([
+ Access.key(:destination_auth_method),
+ Access.key(:project_credential_id)
+ ])
+
+ Bypass.expect_once(bypass, "GET", "/dest-track", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/dest-track")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == project_credential_id
+ refute is_nil(project_credential_id)
+ end
+
+ test "persists destination_credential_id even when credential resolution fails",
+ %{bypass: _bypass} do
+ # Channel with a destination auth method but credential missing auth
+ # fields — destination auth resolution fails, but we still know which
+ # credential was configured.
+ project = insert(:project)
+ user = insert(:user)
+
+ credential =
+ insert(:credential, schema: "http", name: "bad-cred", user: user)
+ |> with_body(%{body: %{"baseUrl" => "https://example.com"}})
+
+ project_credential =
+ insert(:project_credential, project: project, credential: credential)
+
+ channel =
+ insert(:channel,
+ project: project,
+ destination_url: "http://localhost:9999",
+ enabled: true,
+ channel_auth_methods: [
+ build(:channel_auth_method,
+ role: :destination,
+ webhook_auth_method: nil,
+ project_credential: project_credential
+ )
+ ]
+ )
+
+ resp =
+ conn(:get, "/channels/#{channel.id}/test")
+ |> send_to_endpoint()
+
+ assert resp.status == 502
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == project_credential.id
+ assert request.state == :error
+ end
+
+ test "destination_credential_id is nil when no destination auth configured",
+ %{bypass: bypass, channel: channel} do
+ Bypass.expect_once(bypass, "GET", "/no-dest-auth", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/no-dest-auth")
+ |> send_to_endpoint()
+
+ request =
+ Lightning.Repo.one!(
+ from(r in ChannelRequest, where: r.channel_id == ^channel.id)
+ )
+
+ assert request.destination_credential_id == nil
+ end
+ end
+
+ describe "collect_timing integration" do
+ test "persists per-direction timing after successful proxy", %{
+ bypass: bypass,
+ channel: channel
+ } do
+ Bypass.expect_once(bypass, "GET", "/timed", fn conn ->
+ Plug.Conn.send_resp(conn, 200, "ok")
+ end)
+
+ conn(:get, "/channels/#{channel.id}/timed")
+ |> send_to_endpoint()
+
+ event =
+ Lightning.Repo.one!(
+ from(e in ChannelEvent,
+ join: r in ChannelRequest,
+ on: r.id == e.channel_request_id,
+ where: r.channel_id == ^channel.id
+ )
+ )
+
+ # With collect_timing: true, Philter populates timing.send_us
+ # which the handler persists as request_send_us
+ assert is_integer(event.request_send_us)
+ assert event.request_send_us >= 0
+ end
+ end
+
defp send_to_endpoint(conn) do
LightningWeb.Endpoint.call(conn, LightningWeb.Endpoint.init([]))
end
diff --git a/test/support/factories.ex b/test/support/factories.ex
index ffa99da33d..94c2a6de22 100644
--- a/test/support/factories.ex
+++ b/test/support/factories.ex
@@ -1,5 +1,7 @@
defmodule Lightning.Factories do
use ExMachina.Ecto, repo: Lightning.Repo
+ use Lightning.Factories.ChannelFactories
+
alias Lightning.Workflows.Snapshot
def webhook_auth_method_factory do
@@ -858,47 +860,4 @@ defmodule Lightning.Factories do
def sandbox_for(parent, attrs \\ %{}) do
build(:project, Map.merge(%{parent: parent}, attrs))
end
-
- def channel_factory do
- %Lightning.Channels.Channel{
- project: build(:project),
- name: sequence(:channel_name, &"channel-#{&1}"),
- destination_url:
- sequence(
- :channel_destination_url,
- &"https://example.com/destination/#{&1}"
- ),
- enabled: true
- }
- end
-
- def channel_auth_method_factory do
- %Lightning.Channels.ChannelAuthMethod{
- role: :client,
- webhook_auth_method: build(:webhook_auth_method)
- }
- end
-
- def channel_snapshot_factory do
- %Lightning.Channels.ChannelSnapshot{
- lock_version: 1,
- name: sequence(:channel_snapshot_name, &"channel-#{&1}"),
- destination_url: "https://example.com/destination",
- enabled: true
- }
- end
-
- def channel_request_factory do
- %Lightning.Channels.ChannelRequest{
- request_id: sequence(:channel_request_id, &"req-#{&1}"),
- state: :pending,
- started_at: DateTime.utc_now()
- }
- end
-
- def channel_event_factory do
- %Lightning.Channels.ChannelEvent{
- type: :destination_response
- }
- end
end
diff --git a/test/support/factories/channel_factories.ex b/test/support/factories/channel_factories.ex
new file mode 100644
index 0000000000..a4a88540a7
--- /dev/null
+++ b/test/support/factories/channel_factories.ex
@@ -0,0 +1,78 @@
+defmodule Lightning.Factories.ChannelFactories do
+ @moduledoc false
+
+ defmacro __using__(_opts) do
+ quote do
+ def channel_factory do
+ %Lightning.Channels.Channel{
+ project: build(:project),
+ name: sequence(:channel_name, &"channel-#{&1}"),
+ destination_url:
+ sequence(
+ :channel_destination_url,
+ &"https://example.com/destination/#{&1}"
+ ),
+ enabled: true
+ }
+ end
+
+ def channel_auth_method_factory do
+ %Lightning.Channels.ChannelAuthMethod{
+ role: :client,
+ webhook_auth_method: build(:webhook_auth_method)
+ }
+ end
+
+ def channel_snapshot_factory do
+ %Lightning.Channels.ChannelSnapshot{
+ lock_version: 1,
+ name: sequence(:channel_snapshot_name, &"channel-#{&1}"),
+ destination_url: "https://example.com/destination",
+ enabled: true
+ }
+ end
+
+ def channel_request_factory do
+ %Lightning.Channels.ChannelRequest{
+ request_id: sequence(:channel_request_id, &"req-#{&1}"),
+ client_identity: "127.0.0.1",
+ state: :pending,
+ started_at: DateTime.utc_now()
+ }
+ end
+
+ def channel_event_factory do
+ %Lightning.Channels.ChannelEvent{
+ type: :destination_response,
+ request_method: "POST",
+ request_path: "/api/v1/data",
+ request_headers: [
+ ["content-type", "application/json"],
+ ["authorization", "[REDACTED]"]
+ ],
+ request_body_preview: ~s({"key":"value"}),
+ request_body_hash: "abc123def456",
+ request_body_size: 15,
+ response_status: 200,
+ response_headers: [["content-type", "application/json"]],
+ response_body_preview: ~s({"status":"ok"}),
+ response_body_hash: "def456abc123",
+ response_body_size: 15,
+ latency_us: 350_000,
+ ttfb_us: 280_000,
+ request_send_us: 5000,
+ response_duration_us: 65000
+ }
+ end
+
+ def channel_error_event_factory do
+ %Lightning.Channels.ChannelEvent{
+ type: :error,
+ request_method: "POST",
+ request_path: "/api/v1/data",
+ request_headers: [["content-type", "application/json"]]
+ }
+ end
+ end
+ end
+end