| 1 |
|
defmodule DaProductApp.Transactions.ReqChkTxnService do |
| 2 |
|
@moduledoc """ |
| 3 |
|
Service for ReqChkTxn lifecycle and event logging. |
| 4 |
|
|
| 5 |
|
This service maintains req_chk_txn_events (per-aggregate chain) and |
| 6 |
|
also appends corresponding events to the canonical transaction_events |
| 7 |
|
timeline when the req_chk_txn is linked to a transaction. |
| 8 |
|
""" |
| 9 |
|
|
| 10 |
|
import Ecto.Query, warn: false |
| 11 |
|
alias DaProductApp.Repo |
| 12 |
|
alias DaProductApp.Transactions.{ReqChkTxn, ReqChkTxnEvent} |
| 13 |
|
alias DaProductApp.TransactionEventChainService |
| 14 |
|
require Logger |
| 15 |
|
|
| 16 |
|
@doc """ |
| 17 |
|
Append an event for a ReqChkTxn and ensure dual-logging. |
| 18 |
|
""" |
| 19 |
:-( |
def append_event(%ReqChkTxn{} = req_chk_txn, event_type, payload \\ %{}) when is_binary(event_type) do |
| 20 |
|
# determine seq and prev_hash from last req_chk_txn_event |
| 21 |
1 |
last = |
| 22 |
1 |
from(e in ReqChkTxnEvent, where: e.req_chk_txn_id == ^req_chk_txn.id, order_by: [desc: e.seq], limit: 1) |
| 23 |
|
|> Repo.one() |
| 24 |
|
|
| 25 |
1 |
seq = if last, do: last.seq + 1, else: 1 |
| 26 |
1 |
prev_hash = last && last.hash |
| 27 |
1 |
now = DateTime.utc_now() |
| 28 |
1 |
hash = :crypto.hash(:sha256, :erlang.term_to_binary({prev_hash, seq, event_type, payload, now})) |
| 29 |
|
|
| 30 |
|
# create the req_chk_txn event (per-aggregate chain) |
| 31 |
1 |
attrs = %{ |
| 32 |
1 |
req_chk_txn_id: req_chk_txn.id, |
| 33 |
1 |
transaction_id: req_chk_txn.transaction_id, |
| 34 |
|
seq: seq, |
| 35 |
|
event_type: event_type, |
| 36 |
|
payload: payload, |
| 37 |
|
prev_hash: prev_hash, |
| 38 |
|
hash: hash, |
| 39 |
|
inserted_at: now |
| 40 |
|
} |
| 41 |
|
|
| 42 |
1 |
changeset = ReqChkTxnEvent.changeset(%ReqChkTxnEvent{}, attrs) |
| 43 |
|
|
| 44 |
1 |
case Repo.insert(changeset) do |
| 45 |
|
{:ok, req_event} -> |
| 46 |
|
# dual log: chain + canonical transaction_events |
| 47 |
1 |
add_to_transaction_events(req_chk_txn, req_event) |
| 48 |
|
{:ok, req_event} |
| 49 |
|
|
| 50 |
:-( |
{:error, changeset} -> |
| 51 |
|
{:error, changeset} |
| 52 |
|
end |
| 53 |
|
end |
| 54 |
|
|
| 55 |
|
defp next_seq(req_chk_txn_id) do |
| 56 |
|
from(e in ReqChkTxnEvent, where: e.req_chk_txn_id == ^req_chk_txn_id, order_by: [desc: e.seq], limit: 1) |
| 57 |
|
|> Repo.one() |
| 58 |
|
|> case do |
| 59 |
|
nil -> 1 |
| 60 |
|
last -> last.seq + 1 |
| 61 |
|
end |
| 62 |
|
end |
| 63 |
|
|
| 64 |
|
# Add event to transaction events for unified timeline |
| 65 |
|
# NOTE: The project does not have the `transaction_event_chain` table in some environments. |
| 66 |
|
# Per request, avoid using that table and store req_chk_txn events only in the |
| 67 |
|
# per-aggregate `req_chk_txn_events` table and in the canonical `transaction_events` |
| 68 |
|
# via DaProductApp.Transactions.Service.append_event/3. This preserves the unified |
| 69 |
|
# transaction timeline without relying on `transaction_event_chain`. |
| 70 |
|
defp add_to_transaction_events(req_chk_txn, req_chk_txn_event) do |
| 71 |
1 |
if req_chk_txn.transaction_id do |
| 72 |
1 |
mapped_event_type = String.to_atom("req_chk_txn_" <> req_chk_txn_event.event_type) |
| 73 |
1 |
transaction_payload = %{ |
| 74 |
1 |
req_chk_txn_id: req_chk_txn.id, |
| 75 |
1 |
req_chk_txn_event_seq: req_chk_txn_event.seq, |
| 76 |
|
source: "req_chk_txn_service" |
| 77 |
|
} |
| 78 |
|
|
| 79 |
1 |
case DaProductApp.Transactions.Service.append_event(req_chk_txn.transaction_id, mapped_event_type, transaction_payload) do |
| 80 |
1 |
{:ok, _txn_event} -> :ok |
| 81 |
|
{:error, reason} -> |
| 82 |
:-( |
Logger.error("[ReqChkTxnService] Failed to append transaction_event for req_chk_txn #{req_chk_txn.id}: #{inspect(reason)}") |
| 83 |
|
:error |
| 84 |
|
end |
| 85 |
|
else |
| 86 |
|
:ok |
| 87 |
|
end |
| 88 |
|
end |
| 89 |
|
end |