rename project from SimpleshopTheme to Berrypod
All modules, configs, paths, and references updated. 836 tests pass, zero warnings. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
306
lib/berrypod/accounts.ex
Normal file
306
lib/berrypod/accounts.ex
Normal file
@@ -0,0 +1,306 @@
|
||||
defmodule Berrypod.Accounts do
|
||||
@moduledoc """
|
||||
The Accounts context.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Berrypod.Repo
|
||||
|
||||
alias Berrypod.Accounts.{User, UserToken, UserNotifier}
|
||||
|
||||
## Database getters
|
||||
|
||||
@doc """
|
||||
Gets a user by email.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_user_by_email("foo@example.com")
|
||||
%User{}
|
||||
|
||||
iex> get_user_by_email("unknown@example.com")
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_user_by_email(email) when is_binary(email) do
|
||||
Repo.get_by(User, email: email)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a user by email and password.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_user_by_email_and_password("foo@example.com", "correct_password")
|
||||
%User{}
|
||||
|
||||
iex> get_user_by_email_and_password("foo@example.com", "invalid_password")
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_user_by_email_and_password(email, password)
|
||||
when is_binary(email) and is_binary(password) do
|
||||
user = Repo.get_by(User, email: email)
|
||||
if User.valid_password?(user, password), do: user
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single user.
|
||||
|
||||
Raises `Ecto.NoResultsError` if the User does not exist.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_user!(123)
|
||||
%User{}
|
||||
|
||||
iex> get_user!(456)
|
||||
** (Ecto.NoResultsError)
|
||||
|
||||
"""
|
||||
def get_user!(id), do: Repo.get!(User, id)
|
||||
|
||||
@doc """
|
||||
Returns whether an admin user exists.
|
||||
|
||||
Berrypod is single-tenant — any user in the database is the admin.
|
||||
"""
|
||||
def has_admin? do
|
||||
Repo.exists?(User)
|
||||
end
|
||||
|
||||
## User registration
|
||||
|
||||
@doc """
|
||||
Registers a user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> register_user(%{field: value})
|
||||
{:ok, %User{}}
|
||||
|
||||
iex> register_user(%{field: bad_value})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def register_user(attrs) do
|
||||
%User{}
|
||||
|> User.email_changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
## Settings
|
||||
|
||||
@doc """
|
||||
Checks whether the user is in sudo mode.
|
||||
|
||||
The user is in sudo mode when the last authentication was done no further
|
||||
than 20 minutes ago. The limit can be given as second argument in minutes.
|
||||
"""
|
||||
def sudo_mode?(user, minutes \\ -20)
|
||||
|
||||
def sudo_mode?(%User{authenticated_at: ts}, minutes) when is_struct(ts, DateTime) do
|
||||
DateTime.after?(ts, DateTime.utc_now() |> DateTime.add(minutes, :minute))
|
||||
end
|
||||
|
||||
def sudo_mode?(_user, _minutes), do: false
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for changing the user email.
|
||||
|
||||
See `Berrypod.Accounts.User.email_changeset/3` for a list of supported options.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_user_email(user)
|
||||
%Ecto.Changeset{data: %User{}}
|
||||
|
||||
"""
|
||||
def change_user_email(user, attrs \\ %{}, opts \\ []) do
|
||||
User.email_changeset(user, attrs, opts)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the user email using the given token.
|
||||
|
||||
If the token matches, the user email is updated and the token is deleted.
|
||||
"""
|
||||
def update_user_email(user, token) do
|
||||
context = "change:#{user.email}"
|
||||
|
||||
Repo.transact(fn ->
|
||||
with {:ok, query} <- UserToken.verify_change_email_token_query(token, context),
|
||||
%UserToken{sent_to: email} <- Repo.one(query),
|
||||
{:ok, user} <- Repo.update(User.email_changeset(user, %{email: email})),
|
||||
{_count, _result} <-
|
||||
Repo.delete_all(from(UserToken, where: [user_id: ^user.id, context: ^context])) do
|
||||
{:ok, user}
|
||||
else
|
||||
_ -> {:error, :transaction_aborted}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns an `%Ecto.Changeset{}` for changing the user password.
|
||||
|
||||
See `Berrypod.Accounts.User.password_changeset/3` for a list of supported options.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> change_user_password(user)
|
||||
%Ecto.Changeset{data: %User{}}
|
||||
|
||||
"""
|
||||
def change_user_password(user, attrs \\ %{}, opts \\ []) do
|
||||
User.password_changeset(user, attrs, opts)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the user password.
|
||||
|
||||
Returns a tuple with the updated user, as well as a list of expired tokens.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_user_password(user, %{password: ...})
|
||||
{:ok, {%User{}, [...]}}
|
||||
|
||||
iex> update_user_password(user, %{password: "too short"})
|
||||
{:error, %Ecto.Changeset{}}
|
||||
|
||||
"""
|
||||
def update_user_password(user, attrs) do
|
||||
user
|
||||
|> User.password_changeset(attrs)
|
||||
|> update_user_and_delete_all_tokens()
|
||||
end
|
||||
|
||||
## Session
|
||||
|
||||
@doc """
|
||||
Generates a session token.
|
||||
"""
|
||||
def generate_user_session_token(user) do
|
||||
{token, user_token} = UserToken.build_session_token(user)
|
||||
Repo.insert!(user_token)
|
||||
token
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the user with the given signed token.
|
||||
|
||||
If the token is valid `{user, token_inserted_at}` is returned, otherwise `nil` is returned.
|
||||
"""
|
||||
def get_user_by_session_token(token) do
|
||||
{:ok, query} = UserToken.verify_session_token_query(token)
|
||||
Repo.one(query)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the user with the given magic link token.
|
||||
"""
|
||||
def get_user_by_magic_link_token(token) do
|
||||
with {:ok, query} <- UserToken.verify_magic_link_token_query(token),
|
||||
{user, _token} <- Repo.one(query) do
|
||||
user
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Logs the user in by magic link.
|
||||
|
||||
There are three cases to consider:
|
||||
|
||||
1. The user has already confirmed their email. They are logged in
|
||||
and the magic link is expired.
|
||||
|
||||
2. The user has not confirmed their email and no password is set.
|
||||
In this case, the user gets confirmed, logged in, and all tokens -
|
||||
including session ones - are expired. In theory, no other tokens
|
||||
exist but we delete all of them for best security practices.
|
||||
|
||||
3. The user has not confirmed their email but a password is set.
|
||||
This cannot happen in the default implementation but may be the
|
||||
source of security pitfalls. See the "Mixing magic link and password registration" section of
|
||||
`mix help phx.gen.auth`.
|
||||
"""
|
||||
def login_user_by_magic_link(token) do
|
||||
{:ok, query} = UserToken.verify_magic_link_token_query(token)
|
||||
|
||||
case Repo.one(query) do
|
||||
# Prevent session fixation attacks by disallowing magic links for unconfirmed users with password
|
||||
{%User{confirmed_at: nil, hashed_password: hash}, _token} when not is_nil(hash) ->
|
||||
raise """
|
||||
magic link log in is not allowed for unconfirmed users with a password set!
|
||||
|
||||
This cannot happen with the default implementation, which indicates that you
|
||||
might have adapted the code to a different use case. Please make sure to read the
|
||||
"Mixing magic link and password registration" section of `mix help phx.gen.auth`.
|
||||
"""
|
||||
|
||||
{%User{confirmed_at: nil} = user, _token} ->
|
||||
user
|
||||
|> User.confirm_changeset()
|
||||
|> update_user_and_delete_all_tokens()
|
||||
|
||||
{user, token} ->
|
||||
Repo.delete!(token)
|
||||
{:ok, {user, []}}
|
||||
|
||||
nil ->
|
||||
{:error, :not_found}
|
||||
end
|
||||
end
|
||||
|
||||
@doc ~S"""
|
||||
Delivers the update email instructions to the given user.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> deliver_user_update_email_instructions(user, current_email, &url(~p"/users/settings/confirm-email/#{&1}"))
|
||||
{:ok, %{to: ..., body: ...}}
|
||||
|
||||
"""
|
||||
def deliver_user_update_email_instructions(%User{} = user, current_email, update_email_url_fun)
|
||||
when is_function(update_email_url_fun, 1) do
|
||||
{encoded_token, user_token} = UserToken.build_email_token(user, "change:#{current_email}")
|
||||
|
||||
Repo.insert!(user_token)
|
||||
UserNotifier.deliver_update_email_instructions(user, update_email_url_fun.(encoded_token))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Delivers the magic link login instructions to the given user.
|
||||
"""
|
||||
def deliver_login_instructions(%User{} = user, magic_link_url_fun)
|
||||
when is_function(magic_link_url_fun, 1) do
|
||||
{encoded_token, user_token} = UserToken.build_email_token(user, "login")
|
||||
Repo.insert!(user_token)
|
||||
UserNotifier.deliver_login_instructions(user, magic_link_url_fun.(encoded_token))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes the signed token with the given context.
|
||||
"""
|
||||
def delete_user_session_token(token) do
|
||||
Repo.delete_all(from(UserToken, where: [token: ^token, context: "session"]))
|
||||
:ok
|
||||
end
|
||||
|
||||
## Token helper
|
||||
|
||||
defp update_user_and_delete_all_tokens(changeset) do
|
||||
Repo.transact(fn ->
|
||||
with {:ok, user} <- Repo.update(changeset) do
|
||||
tokens_to_expire = Repo.all_by(UserToken, user_id: user.id)
|
||||
|
||||
Repo.delete_all(from(t in UserToken, where: t.id in ^Enum.map(tokens_to_expire, & &1.id)))
|
||||
|
||||
{:ok, {user, tokens_to_expire}}
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
33
lib/berrypod/accounts/scope.ex
Normal file
33
lib/berrypod/accounts/scope.ex
Normal file
@@ -0,0 +1,33 @@
|
||||
defmodule Berrypod.Accounts.Scope do
|
||||
@moduledoc """
|
||||
Defines the scope of the caller to be used throughout the app.
|
||||
|
||||
The `Berrypod.Accounts.Scope` allows public interfaces to receive
|
||||
information about the caller, such as if the call is initiated from an
|
||||
end-user, and if so, which user. Additionally, such a scope can carry fields
|
||||
such as "super user" or other privileges for use as authorization, or to
|
||||
ensure specific code paths can only be access for a given scope.
|
||||
|
||||
It is useful for logging as well as for scoping pubsub subscriptions and
|
||||
broadcasts when a caller subscribes to an interface or performs a particular
|
||||
action.
|
||||
|
||||
Feel free to extend the fields on this struct to fit the needs of
|
||||
growing application requirements.
|
||||
"""
|
||||
|
||||
alias Berrypod.Accounts.User
|
||||
|
||||
defstruct user: nil
|
||||
|
||||
@doc """
|
||||
Creates a scope for the given user.
|
||||
|
||||
Returns nil if no user is given.
|
||||
"""
|
||||
def for_user(%User{} = user) do
|
||||
%__MODULE__{user: user}
|
||||
end
|
||||
|
||||
def for_user(nil), do: nil
|
||||
end
|
||||
134
lib/berrypod/accounts/user.ex
Normal file
134
lib/berrypod/accounts/user.ex
Normal file
@@ -0,0 +1,134 @@
|
||||
defmodule Berrypod.Accounts.User do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
schema "users" do
|
||||
field :email, :string
|
||||
field :password, :string, virtual: true, redact: true
|
||||
field :hashed_password, :string, redact: true
|
||||
field :confirmed_at, :utc_datetime
|
||||
field :authenticated_at, :utc_datetime, virtual: true
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc """
|
||||
A user changeset for registering or changing the email.
|
||||
|
||||
It requires the email to change otherwise an error is added.
|
||||
|
||||
## Options
|
||||
|
||||
* `:validate_unique` - Set to false if you don't want to validate the
|
||||
uniqueness of the email, useful when displaying live validations.
|
||||
Defaults to `true`.
|
||||
"""
|
||||
def email_changeset(user, attrs, opts \\ []) do
|
||||
user
|
||||
|> cast(attrs, [:email])
|
||||
|> validate_email(opts)
|
||||
end
|
||||
|
||||
defp validate_email(changeset, opts) do
|
||||
changeset =
|
||||
changeset
|
||||
|> validate_required([:email])
|
||||
|> validate_format(:email, ~r/^[^@,;\s]+@[^@,;\s]+$/,
|
||||
message: "must have the @ sign and no spaces"
|
||||
)
|
||||
|> validate_length(:email, max: 160)
|
||||
|
||||
if Keyword.get(opts, :validate_unique, true) do
|
||||
changeset
|
||||
|> unsafe_validate_unique(:email, Berrypod.Repo)
|
||||
|> unique_constraint(:email)
|
||||
|> validate_email_changed()
|
||||
else
|
||||
changeset
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_email_changed(changeset) do
|
||||
if get_field(changeset, :email) && get_change(changeset, :email) == nil do
|
||||
add_error(changeset, :email, "did not change")
|
||||
else
|
||||
changeset
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
A user changeset for changing the password.
|
||||
|
||||
It is important to validate the length of the password, as long passwords may
|
||||
be very expensive to hash for certain algorithms.
|
||||
|
||||
## Options
|
||||
|
||||
* `:hash_password` - Hashes the password so it can be stored securely
|
||||
in the database and ensures the password field is cleared to prevent
|
||||
leaks in the logs. If password hashing is not needed and clearing the
|
||||
password field is not desired (like when using this changeset for
|
||||
validations on a LiveView form), this option can be set to `false`.
|
||||
Defaults to `true`.
|
||||
"""
|
||||
def password_changeset(user, attrs, opts \\ []) do
|
||||
user
|
||||
|> cast(attrs, [:password])
|
||||
|> validate_confirmation(:password, message: "does not match password")
|
||||
|> validate_password(opts)
|
||||
end
|
||||
|
||||
defp validate_password(changeset, opts) do
|
||||
changeset
|
||||
|> validate_required([:password])
|
||||
|> validate_length(:password, min: 12, max: 72)
|
||||
# Examples of additional password validation:
|
||||
# |> validate_format(:password, ~r/[a-z]/, message: "at least one lower case character")
|
||||
# |> validate_format(:password, ~r/[A-Z]/, message: "at least one upper case character")
|
||||
# |> validate_format(:password, ~r/[!?@#$%^&*_0-9]/, message: "at least one digit or punctuation character")
|
||||
|> maybe_hash_password(opts)
|
||||
end
|
||||
|
||||
defp maybe_hash_password(changeset, opts) do
|
||||
hash_password? = Keyword.get(opts, :hash_password, true)
|
||||
password = get_change(changeset, :password)
|
||||
|
||||
if hash_password? && password && changeset.valid? do
|
||||
changeset
|
||||
# If using Bcrypt, then further validate it is at most 72 bytes long
|
||||
|> validate_length(:password, max: 72, count: :bytes)
|
||||
# Hashing could be done with `Ecto.Changeset.prepare_changes/2`, but that
|
||||
# would keep the database transaction open longer and hurt performance.
|
||||
|> put_change(:hashed_password, Bcrypt.hash_pwd_salt(password))
|
||||
|> delete_change(:password)
|
||||
else
|
||||
changeset
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Confirms the account by setting `confirmed_at`.
|
||||
"""
|
||||
def confirm_changeset(user) do
|
||||
now = DateTime.utc_now(:second)
|
||||
change(user, confirmed_at: now)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Verifies the password.
|
||||
|
||||
If there is no user or the user doesn't have a password, we call
|
||||
`Bcrypt.no_user_verify/0` to avoid timing attacks.
|
||||
"""
|
||||
def valid_password?(%Berrypod.Accounts.User{hashed_password: hashed_password}, password)
|
||||
when is_binary(hashed_password) and byte_size(password) > 0 do
|
||||
Bcrypt.verify_pass(password, hashed_password)
|
||||
end
|
||||
|
||||
def valid_password?(_, _) do
|
||||
Bcrypt.no_user_verify()
|
||||
false
|
||||
end
|
||||
end
|
||||
84
lib/berrypod/accounts/user_notifier.ex
Normal file
84
lib/berrypod/accounts/user_notifier.ex
Normal file
@@ -0,0 +1,84 @@
|
||||
defmodule Berrypod.Accounts.UserNotifier do
|
||||
import Swoosh.Email
|
||||
|
||||
alias Berrypod.Mailer
|
||||
alias Berrypod.Accounts.User
|
||||
|
||||
# Delivers the email using the application mailer.
|
||||
defp deliver(recipient, subject, body) do
|
||||
email =
|
||||
new()
|
||||
|> to(recipient)
|
||||
|> from({"Berrypod", "contact@example.com"})
|
||||
|> subject(subject)
|
||||
|> text_body(body)
|
||||
|
||||
with {:ok, _metadata} <- Mailer.deliver(email) do
|
||||
{:ok, email}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deliver instructions to update a user email.
|
||||
"""
|
||||
def deliver_update_email_instructions(user, url) do
|
||||
deliver(user.email, "Update email instructions", """
|
||||
|
||||
==============================
|
||||
|
||||
Hi #{user.email},
|
||||
|
||||
You can change your email by visiting the URL below:
|
||||
|
||||
#{url}
|
||||
|
||||
If you didn't request this change, please ignore this.
|
||||
|
||||
==============================
|
||||
""")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deliver instructions to log in with a magic link.
|
||||
"""
|
||||
def deliver_login_instructions(user, url) do
|
||||
case user do
|
||||
%User{confirmed_at: nil} -> deliver_confirmation_instructions(user, url)
|
||||
_ -> deliver_magic_link_instructions(user, url)
|
||||
end
|
||||
end
|
||||
|
||||
defp deliver_magic_link_instructions(user, url) do
|
||||
deliver(user.email, "Log in instructions", """
|
||||
|
||||
==============================
|
||||
|
||||
Hi #{user.email},
|
||||
|
||||
You can log into your account by visiting the URL below:
|
||||
|
||||
#{url}
|
||||
|
||||
If you didn't request this email, please ignore this.
|
||||
|
||||
==============================
|
||||
""")
|
||||
end
|
||||
|
||||
defp deliver_confirmation_instructions(user, url) do
|
||||
deliver(user.email, "Confirmation instructions", """
|
||||
|
||||
==============================
|
||||
|
||||
Hi #{user.email},
|
||||
|
||||
You can confirm your account by visiting the URL below:
|
||||
|
||||
#{url}
|
||||
|
||||
If you didn't create an account with us, please ignore this.
|
||||
|
||||
==============================
|
||||
""")
|
||||
end
|
||||
end
|
||||
158
lib/berrypod/accounts/user_token.ex
Normal file
158
lib/berrypod/accounts/user_token.ex
Normal file
@@ -0,0 +1,158 @@
|
||||
defmodule Berrypod.Accounts.UserToken do
|
||||
use Ecto.Schema
|
||||
import Ecto.Query
|
||||
alias Berrypod.Accounts.UserToken
|
||||
|
||||
@hash_algorithm :sha256
|
||||
@rand_size 32
|
||||
|
||||
# It is very important to keep the magic link token expiry short,
|
||||
# since someone with access to the email may take over the account.
|
||||
@magic_link_validity_in_minutes 15
|
||||
@change_email_validity_in_days 7
|
||||
@session_validity_in_days 14
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
schema "users_tokens" do
|
||||
field :token, :binary
|
||||
field :context, :string
|
||||
field :sent_to, :string
|
||||
field :authenticated_at, :utc_datetime
|
||||
belongs_to :user, Berrypod.Accounts.User
|
||||
|
||||
timestamps(type: :utc_datetime, updated_at: false)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a token that will be stored in a signed place,
|
||||
such as session or cookie. As they are signed, those
|
||||
tokens do not need to be hashed.
|
||||
|
||||
The reason why we store session tokens in the database, even
|
||||
though Phoenix already provides a session cookie, is because
|
||||
Phoenix's default session cookies are not persisted, they are
|
||||
simply signed and potentially encrypted. This means they are
|
||||
valid indefinitely, unless you change the signing/encryption
|
||||
salt.
|
||||
|
||||
Therefore, storing them allows individual user
|
||||
sessions to be expired. The token system can also be extended
|
||||
to store additional data, such as the device used for logging in.
|
||||
You could then use this information to display all valid sessions
|
||||
and devices in the UI and allow users to explicitly expire any
|
||||
session they deem invalid.
|
||||
"""
|
||||
def build_session_token(user) do
|
||||
token = :crypto.strong_rand_bytes(@rand_size)
|
||||
dt = user.authenticated_at || DateTime.utc_now(:second)
|
||||
{token, %UserToken{token: token, context: "session", user_id: user.id, authenticated_at: dt}}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if the token is valid and returns its underlying lookup query.
|
||||
|
||||
The query returns the user found by the token, if any, along with the token's creation time.
|
||||
|
||||
The token is valid if it matches the value in the database and it has
|
||||
not expired (after @session_validity_in_days).
|
||||
"""
|
||||
def verify_session_token_query(token) do
|
||||
query =
|
||||
from token in by_token_and_context_query(token, "session"),
|
||||
join: user in assoc(token, :user),
|
||||
where: token.inserted_at > ago(@session_validity_in_days, "day"),
|
||||
select: {%{user | authenticated_at: token.authenticated_at}, token.inserted_at}
|
||||
|
||||
{:ok, query}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Builds a token and its hash to be delivered to the user's email.
|
||||
|
||||
The non-hashed token is sent to the user email while the
|
||||
hashed part is stored in the database. The original token cannot be reconstructed,
|
||||
which means anyone with read-only access to the database cannot directly use
|
||||
the token in the application to gain access. Furthermore, if the user changes
|
||||
their email in the system, the tokens sent to the previous email are no longer
|
||||
valid.
|
||||
|
||||
Users can easily adapt the existing code to provide other types of delivery methods,
|
||||
for example, by phone numbers.
|
||||
"""
|
||||
def build_email_token(user, context) do
|
||||
build_hashed_token(user, context, user.email)
|
||||
end
|
||||
|
||||
defp build_hashed_token(user, context, sent_to) do
|
||||
token = :crypto.strong_rand_bytes(@rand_size)
|
||||
hashed_token = :crypto.hash(@hash_algorithm, token)
|
||||
|
||||
{Base.url_encode64(token, padding: false),
|
||||
%UserToken{
|
||||
token: hashed_token,
|
||||
context: context,
|
||||
sent_to: sent_to,
|
||||
user_id: user.id
|
||||
}}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if the token is valid and returns its underlying lookup query.
|
||||
|
||||
If found, the query returns a tuple of the form `{user, token}`.
|
||||
|
||||
The given token is valid if it matches its hashed counterpart in the
|
||||
database. This function also checks if the token is being used within
|
||||
15 minutes. The context of a magic link token is always "login".
|
||||
"""
|
||||
def verify_magic_link_token_query(token) do
|
||||
case Base.url_decode64(token, padding: false) do
|
||||
{:ok, decoded_token} ->
|
||||
hashed_token = :crypto.hash(@hash_algorithm, decoded_token)
|
||||
|
||||
query =
|
||||
from token in by_token_and_context_query(hashed_token, "login"),
|
||||
join: user in assoc(token, :user),
|
||||
where: token.inserted_at > ago(^@magic_link_validity_in_minutes, "minute"),
|
||||
where: token.sent_to == user.email,
|
||||
select: {user, token}
|
||||
|
||||
{:ok, query}
|
||||
|
||||
:error ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if the token is valid and returns its underlying lookup query.
|
||||
|
||||
The query returns the user_token found by the token, if any.
|
||||
|
||||
This is used to validate requests to change the user
|
||||
email.
|
||||
The given token is valid if it matches its hashed counterpart in the
|
||||
database and if it has not expired (after @change_email_validity_in_days).
|
||||
The context must always start with "change:".
|
||||
"""
|
||||
def verify_change_email_token_query(token, "change:" <> _ = context) do
|
||||
case Base.url_decode64(token, padding: false) do
|
||||
{:ok, decoded_token} ->
|
||||
hashed_token = :crypto.hash(@hash_algorithm, decoded_token)
|
||||
|
||||
query =
|
||||
from token in by_token_and_context_query(hashed_token, context),
|
||||
where: token.inserted_at > ago(@change_email_validity_in_days, "day")
|
||||
|
||||
{:ok, query}
|
||||
|
||||
:error ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
defp by_token_and_context_query(token, context) do
|
||||
from UserToken, where: [token: ^token, context: ^context]
|
||||
end
|
||||
end
|
||||
49
lib/berrypod/application.ex
Normal file
49
lib/berrypod/application.ex
Normal file
@@ -0,0 +1,49 @@
|
||||
defmodule Berrypod.Application do
|
||||
# See https://hexdocs.pm/elixir/Application.html
|
||||
# for more information on OTP Applications
|
||||
@moduledoc false
|
||||
|
||||
use Application
|
||||
|
||||
@impl true
|
||||
def start(_type, _args) do
|
||||
children = [
|
||||
BerrypodWeb.Telemetry,
|
||||
Berrypod.Repo,
|
||||
{Ecto.Migrator,
|
||||
repos: Application.fetch_env!(:berrypod, :ecto_repos), skip: skip_migrations?()},
|
||||
# Seed default theme settings if none exist (first boot)
|
||||
Supervisor.child_spec({Task, &Berrypod.Release.seed_defaults/0}, id: :seed_defaults),
|
||||
# Load encrypted secrets from DB into Application env
|
||||
{Task, &Berrypod.Secrets.load_all/0},
|
||||
{DNSCluster, query: Application.get_env(:berrypod, :dns_cluster_query) || :ignore},
|
||||
{Phoenix.PubSub, name: Berrypod.PubSub},
|
||||
# Background job processing
|
||||
{Oban, Application.fetch_env!(:berrypod, Oban)},
|
||||
# Image variant cache - ensures all variants exist on startup
|
||||
Berrypod.Images.VariantCache,
|
||||
# Start to serve requests
|
||||
BerrypodWeb.Endpoint,
|
||||
# Theme CSS cache - must start after Endpoint for static_path/1 to work
|
||||
Berrypod.Theme.CSSCache
|
||||
]
|
||||
|
||||
# See https://hexdocs.pm/elixir/Supervisor.html
|
||||
# for other strategies and supported options
|
||||
opts = [strategy: :one_for_one, name: Berrypod.Supervisor]
|
||||
Supervisor.start_link(children, opts)
|
||||
end
|
||||
|
||||
# Tell Phoenix to update the endpoint configuration
|
||||
# whenever the application is updated.
|
||||
@impl true
|
||||
def config_change(changed, _new, removed) do
|
||||
BerrypodWeb.Endpoint.config_change(changed, removed)
|
||||
:ok
|
||||
end
|
||||
|
||||
defp skip_migrations?() do
|
||||
# By default, sqlite migrations are run when using a release
|
||||
System.get_env("RELEASE_NAME") == nil
|
||||
end
|
||||
end
|
||||
244
lib/berrypod/cart.ex
Normal file
244
lib/berrypod/cart.ex
Normal file
@@ -0,0 +1,244 @@
|
||||
defmodule Berrypod.Cart do
|
||||
@moduledoc """
|
||||
The Cart context.
|
||||
|
||||
Manages shopping cart operations stored in session. Cart items are stored
|
||||
as a list of {variant_id, quantity} tuples for minimal session storage.
|
||||
Items are hydrated with full product/variant data when needed for display.
|
||||
"""
|
||||
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Products.ProductImage
|
||||
|
||||
@session_key "cart"
|
||||
|
||||
# =============================================================================
|
||||
# Session Operations
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Gets cart items from session.
|
||||
|
||||
Returns a list of {variant_id, quantity} tuples.
|
||||
"""
|
||||
def get_from_session(session) do
|
||||
case Map.get(session, @session_key) do
|
||||
nil -> []
|
||||
items when is_list(items) -> items
|
||||
_ -> []
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Puts cart items in session via Plug.Conn.
|
||||
|
||||
Used by the CartController to persist cart to session cookie.
|
||||
"""
|
||||
def put_in_session(conn, cart_items) do
|
||||
Plug.Conn.put_session(conn, @session_key, cart_items)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Cart Operations
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Adds an item to the cart.
|
||||
|
||||
If the variant is already in the cart, increments the quantity.
|
||||
Returns the updated cart items list.
|
||||
"""
|
||||
def add_item(cart_items, variant_id, quantity \\ 1)
|
||||
when is_integer(quantity) and quantity > 0 do
|
||||
case List.keyfind(cart_items, variant_id, 0) do
|
||||
nil ->
|
||||
cart_items ++ [{variant_id, quantity}]
|
||||
|
||||
{^variant_id, existing_qty} ->
|
||||
List.keyreplace(cart_items, variant_id, 0, {variant_id, existing_qty + quantity})
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the quantity of an item in the cart.
|
||||
|
||||
If quantity is 0 or less, removes the item.
|
||||
Returns the updated cart items list.
|
||||
"""
|
||||
def update_quantity(cart_items, variant_id, quantity) when is_integer(quantity) do
|
||||
if quantity <= 0 do
|
||||
remove_item(cart_items, variant_id)
|
||||
else
|
||||
case List.keyfind(cart_items, variant_id, 0) do
|
||||
nil ->
|
||||
cart_items
|
||||
|
||||
{^variant_id, _} ->
|
||||
List.keyreplace(cart_items, variant_id, 0, {variant_id, quantity})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes an item from the cart.
|
||||
|
||||
Returns the updated cart items list.
|
||||
"""
|
||||
def remove_item(cart_items, variant_id) do
|
||||
List.keydelete(cart_items, variant_id, 0)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the quantity of a specific variant in the cart.
|
||||
|
||||
Returns 0 if not found.
|
||||
"""
|
||||
def get_quantity(cart_items, variant_id) do
|
||||
case List.keyfind(cart_items, variant_id, 0) do
|
||||
nil -> 0
|
||||
{_, qty} -> qty
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Hydration
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Hydrates cart items with full variant and product data.
|
||||
|
||||
Takes a list of {variant_id, quantity} tuples and returns a list of maps
|
||||
with full display data including product name, variant options, price, and image.
|
||||
"""
|
||||
def hydrate(cart_items) when is_list(cart_items) do
|
||||
variant_ids = Enum.map(cart_items, fn {id, _qty} -> id end)
|
||||
|
||||
if variant_ids == [] do
|
||||
[]
|
||||
else
|
||||
variants_map = Products.get_variants_with_products(variant_ids)
|
||||
|
||||
cart_items
|
||||
|> Enum.map(fn {variant_id, quantity} ->
|
||||
case Map.get(variants_map, variant_id) do
|
||||
nil ->
|
||||
nil
|
||||
|
||||
variant ->
|
||||
%{
|
||||
variant_id: variant.id,
|
||||
product_id: variant.product.id,
|
||||
name: variant.product.title,
|
||||
variant: format_variant_options(variant.options),
|
||||
price: variant.price,
|
||||
quantity: quantity,
|
||||
image: variant_image_url(variant.product)
|
||||
}
|
||||
end
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
end
|
||||
end
|
||||
|
||||
defp format_variant_options(options) when is_map(options) and map_size(options) > 0 do
|
||||
options
|
||||
|> Map.values()
|
||||
|> Enum.join(" / ")
|
||||
end
|
||||
|
||||
defp format_variant_options(_), do: nil
|
||||
|
||||
defp variant_image_url(product) do
|
||||
case product.images do
|
||||
[first | _] -> ProductImage.url(first, 400)
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Helpers
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Builds the full display state for a cart.
|
||||
|
||||
Takes raw cart items (list of {variant_id, quantity} tuples) and returns
|
||||
a map with hydrated items, count, and formatted subtotal. Single source
|
||||
of truth for cart state computation — used by CartHook.
|
||||
"""
|
||||
def build_state(raw_cart) do
|
||||
hydrated = hydrate(raw_cart)
|
||||
|
||||
%{
|
||||
items: hydrated,
|
||||
count: item_count(raw_cart),
|
||||
subtotal: format_subtotal(hydrated)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the total item count in the cart.
|
||||
"""
|
||||
def item_count(cart_items) do
|
||||
Enum.reduce(cart_items, 0, fn {_, qty}, acc -> acc + qty end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Calculates the subtotal from hydrated cart items.
|
||||
|
||||
Returns the total in pence.
|
||||
"""
|
||||
def calculate_subtotal(hydrated_items) do
|
||||
Enum.reduce(hydrated_items, 0, fn item, acc ->
|
||||
acc + item.price * item.quantity
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Formats a price in pence as a currency string using ex_money.
|
||||
"""
|
||||
def format_price(price_pence) when is_integer(price_pence) do
|
||||
price_pence
|
||||
|> Decimal.new()
|
||||
|> Decimal.div(100)
|
||||
|> then(&Money.new(:GBP, &1))
|
||||
|> Money.to_string!()
|
||||
end
|
||||
|
||||
def format_price(_), do: format_price(0)
|
||||
|
||||
@doc """
|
||||
Formats the subtotal from hydrated items as a GBP string.
|
||||
"""
|
||||
def format_subtotal(hydrated_items) do
|
||||
hydrated_items
|
||||
|> calculate_subtotal()
|
||||
|> format_price()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Serializes cart items for JSON transport.
|
||||
|
||||
Converts {variant_id, quantity} tuples to [variant_id, quantity] lists
|
||||
for JSON compatibility.
|
||||
"""
|
||||
def serialize(cart_items) do
|
||||
Enum.map(cart_items, fn {id, qty} -> [id, qty] end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deserializes cart items from JSON transport.
|
||||
|
||||
Converts [variant_id, quantity] lists back to {variant_id, quantity} tuples.
|
||||
"""
|
||||
def deserialize(items) when is_list(items) do
|
||||
items
|
||||
|> Enum.map(fn
|
||||
[id, qty] when is_binary(id) and is_integer(qty) -> {id, qty}
|
||||
_ -> nil
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
end
|
||||
|
||||
def deserialize(_), do: []
|
||||
end
|
||||
12
lib/berrypod/cldr.ex
Normal file
12
lib/berrypod/cldr.ex
Normal file
@@ -0,0 +1,12 @@
|
||||
defmodule Berrypod.Cldr do
|
||||
@moduledoc """
|
||||
CLDR backend for internationalization and currency formatting.
|
||||
|
||||
Used by ex_money for currency handling.
|
||||
"""
|
||||
|
||||
use Cldr,
|
||||
locales: ["en"],
|
||||
default_locale: "en",
|
||||
providers: [Cldr.Number, Money]
|
||||
end
|
||||
365
lib/berrypod/clients/printful.ex
Normal file
365
lib/berrypod/clients/printful.ex
Normal file
@@ -0,0 +1,365 @@
|
||||
defmodule Berrypod.Clients.Printful do
|
||||
@moduledoc """
|
||||
HTTP client for the Printful API.
|
||||
|
||||
Uses v2 endpoints where available, v1 for sync products (store products).
|
||||
Handles authentication via Bearer tokens stored in the process dictionary.
|
||||
"""
|
||||
|
||||
@base_url "https://api.printful.com"
|
||||
|
||||
# =============================================================================
|
||||
# Auth
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Get the API token.
|
||||
|
||||
Checks process dictionary first (for provider connections with stored credentials),
|
||||
then falls back to environment variable (for development/testing).
|
||||
"""
|
||||
def api_token do
|
||||
Process.get(:printful_api_key) ||
|
||||
System.get_env("PRINTFUL_API_TOKEN") ||
|
||||
raise "PRINTFUL_API_TOKEN environment variable is not set"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get the store ID from the process dictionary.
|
||||
"""
|
||||
def store_id do
|
||||
Process.get(:printful_store_id)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Core HTTP
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Make a GET request to the Printful API.
|
||||
"""
|
||||
def get(path, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.get(url, headers: auth_headers(), receive_timeout: 30_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, unwrap_response(path, body)}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a POST request to the Printful API.
|
||||
"""
|
||||
def post(path, body, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.post(url, json: body, headers: auth_headers(), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, unwrap_response(path, body)}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a DELETE request to the Printful API.
|
||||
"""
|
||||
def delete(path, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.delete(url, headers: auth_headers(), receive_timeout: 30_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, unwrap_response(path, body)}
|
||||
|
||||
{:ok, %Req.Response{status: 204}} ->
|
||||
{:ok, nil}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# v1 responses wrap data in "result", v2 wraps in "data"
|
||||
defp unwrap_response("/v2/" <> _, %{"data" => data}), do: data
|
||||
defp unwrap_response(_path, %{"code" => _, "result" => result}), do: result
|
||||
defp unwrap_response(_path, body), do: body
|
||||
|
||||
# =============================================================================
|
||||
# Stores (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
List all stores for the authenticated account.
|
||||
"""
|
||||
def get_stores do
|
||||
get("/v2/stores")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get the first store's ID.
|
||||
"""
|
||||
def get_store_id do
|
||||
case get_stores() do
|
||||
{:ok, stores} when is_list(stores) and stores != [] ->
|
||||
{:ok, hd(stores)["id"]}
|
||||
|
||||
{:ok, []} ->
|
||||
{:error, :no_stores}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Catalogue (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Get a catalogue product by ID.
|
||||
"""
|
||||
def get_catalog_product(product_id) do
|
||||
get("/v2/catalog-products/#{product_id}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get variants for a catalogue product.
|
||||
"""
|
||||
def get_catalog_variants(product_id) do
|
||||
get("/v2/catalog-products/#{product_id}/catalog-variants")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get stock availability for a catalogue product.
|
||||
"""
|
||||
def get_product_availability(product_id) do
|
||||
get("/v2/catalog-products/#{product_id}/availability")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Sync Products (v1 — no v2 equivalent)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
List the seller's configured products (sync products).
|
||||
|
||||
These are products the seller has set up in Printful's dashboard with designs.
|
||||
Supports pagination via `offset` and `limit` options.
|
||||
"""
|
||||
def list_sync_products(opts \\ []) do
|
||||
offset = Keyword.get(opts, :offset, 0)
|
||||
limit = Keyword.get(opts, :limit, 20)
|
||||
get("/store/products?offset=#{offset}&limit=#{limit}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get a single sync product with all its variants and files.
|
||||
"""
|
||||
def get_sync_product(product_id) do
|
||||
get("/store/products/#{product_id}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Create a sync product with variants and design files.
|
||||
|
||||
## Example
|
||||
|
||||
create_sync_product(%{
|
||||
sync_product: %{name: "My T-Shirt"},
|
||||
sync_variants: [%{
|
||||
variant_id: 4011,
|
||||
retail_price: "29.99",
|
||||
files: [%{url: "https://example.com/design.png", type: "default"}]
|
||||
}]
|
||||
})
|
||||
"""
|
||||
def create_sync_product(product_data) do
|
||||
post("/store/products", product_data)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Delete a sync product and all its variants.
|
||||
"""
|
||||
def delete_sync_product(product_id) do
|
||||
delete("/store/products/#{product_id}")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Shipping (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Calculate shipping rates for a set of items to a recipient.
|
||||
|
||||
## Example
|
||||
|
||||
calculate_shipping(
|
||||
%{country_code: "GB"},
|
||||
[%{source: "catalog", catalog_variant_id: 474, quantity: 1}]
|
||||
)
|
||||
"""
|
||||
def calculate_shipping(recipient, items) do
|
||||
post("/v2/shipping-rates", %{recipient: recipient, order_items: items})
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Orders (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Create a new order (draft status).
|
||||
"""
|
||||
def create_order(order_data) do
|
||||
post("/v2/orders", order_data)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Confirm an order for fulfilment. This triggers charges and production.
|
||||
"""
|
||||
def confirm_order(order_id) do
|
||||
post("/v2/orders/#{order_id}/confirmation", %{})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get an order by ID.
|
||||
"""
|
||||
def get_order(order_id) do
|
||||
get("/v2/orders/#{order_id}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get shipments for an order.
|
||||
"""
|
||||
def get_order_shipments(order_id) do
|
||||
get("/v2/orders/#{order_id}/shipments")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Mockups (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Create a mockup generation task.
|
||||
"""
|
||||
def create_mockup_task(body) do
|
||||
post("/v2/mockup-tasks", body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get mockup task results. Pass `task_id` to poll a specific task.
|
||||
"""
|
||||
def get_mockup_tasks(params \\ %{}) do
|
||||
query = URI.encode_query(params)
|
||||
path = if query == "", do: "/v2/mockup-tasks", else: "/v2/mockup-tasks?#{query}"
|
||||
get(path)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Mockup generator (legacy, multi-angle)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Create a mockup generator task for a catalog product.
|
||||
|
||||
Returns `{:ok, %{"task_key" => "gt-...", "status" => "pending"}}`.
|
||||
"""
|
||||
def create_mockup_generator_task(catalog_product_id, body) do
|
||||
post("/mockup-generator/create-task/#{catalog_product_id}", body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Poll a mockup generator task by task key.
|
||||
|
||||
Returns `{:ok, %{"status" => "completed", "mockups" => [...]}}` when done.
|
||||
"""
|
||||
def get_mockup_generator_task(task_key) do
|
||||
get("/mockup-generator/task?task_key=#{task_key}")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Files (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Upload a file to the Printful file library.
|
||||
"""
|
||||
def upload_file(url) do
|
||||
post("/v2/files", %{url: url})
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Webhooks (v2)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Set up webhook configuration.
|
||||
"""
|
||||
def setup_webhooks(url, events) do
|
||||
post("/v2/webhooks", %{url: url, events: events})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get current webhook configuration.
|
||||
"""
|
||||
def get_webhooks do
|
||||
get("/v2/webhooks")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Disable webhook support.
|
||||
"""
|
||||
def delete_webhooks do
|
||||
delete("/v2/webhooks")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# File Downloads
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Download a file from a URL to a local path.
|
||||
"""
|
||||
def download_file(url, output_path) do
|
||||
case Req.get(url, into: File.stream!(output_path), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status}} when status in 200..299 ->
|
||||
{:ok, output_path}
|
||||
|
||||
{:ok, %Req.Response{status: status}} ->
|
||||
File.rm(output_path)
|
||||
{:error, {:http_error, status}}
|
||||
|
||||
{:error, reason} ->
|
||||
File.rm(output_path)
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Private
|
||||
# =============================================================================
|
||||
|
||||
defp auth_headers do
|
||||
headers = [
|
||||
{"Authorization", "Bearer #{api_token()}"},
|
||||
{"Content-Type", "application/json"}
|
||||
]
|
||||
|
||||
case store_id() do
|
||||
nil -> headers
|
||||
id -> [{"X-PF-Store-Id", to_string(id)} | headers]
|
||||
end
|
||||
end
|
||||
end
|
||||
270
lib/berrypod/clients/printify.ex
Normal file
270
lib/berrypod/clients/printify.ex
Normal file
@@ -0,0 +1,270 @@
|
||||
defmodule Berrypod.Clients.Printify do
|
||||
@moduledoc """
|
||||
HTTP client for the Printify API.
|
||||
|
||||
Handles authentication and provides low-level API access.
|
||||
Requires PRINTIFY_API_TOKEN environment variable to be set.
|
||||
"""
|
||||
|
||||
@base_url "https://api.printify.com/v1"
|
||||
|
||||
@doc """
|
||||
Get the API token.
|
||||
|
||||
Checks process dictionary first (for provider connections with stored credentials),
|
||||
then falls back to environment variable (for development/mockup generation).
|
||||
"""
|
||||
def api_token do
|
||||
Process.get(:printify_api_key) ||
|
||||
System.get_env("PRINTIFY_API_TOKEN") ||
|
||||
raise "PRINTIFY_API_TOKEN environment variable is not set"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a GET request to the Printify API.
|
||||
"""
|
||||
def get(path, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.get(url, headers: auth_headers(), receive_timeout: 30_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, body}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a POST request to the Printify API.
|
||||
"""
|
||||
def post(path, body, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.post(url, json: body, headers: auth_headers(), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, body}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a PUT request to the Printify API.
|
||||
"""
|
||||
def put(path, body, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.put(url, json: body, headers: auth_headers(), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, body}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Make a DELETE request to the Printify API.
|
||||
"""
|
||||
def delete(path, _opts \\ []) do
|
||||
url = @base_url <> path
|
||||
|
||||
case Req.delete(url, headers: auth_headers(), receive_timeout: 30_000) do
|
||||
{:ok, %Req.Response{status: status, body: body}} when status in 200..299 ->
|
||||
{:ok, body}
|
||||
|
||||
{:ok, %Req.Response{status: status}} when status == 204 ->
|
||||
{:ok, nil}
|
||||
|
||||
{:ok, %Req.Response{status: status, body: body}} ->
|
||||
{:error, {status, body}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get all shops for the authenticated account.
|
||||
"""
|
||||
def get_shops do
|
||||
get("/shops.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get the first shop ID for the account.
|
||||
"""
|
||||
def get_shop_id do
|
||||
case get_shops() do
|
||||
{:ok, shops} when is_list(shops) and shops != [] ->
|
||||
{:ok, hd(shops)["id"]}
|
||||
|
||||
{:ok, []} ->
|
||||
{:error, :no_shops}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get all blueprints (product types) from the catalog.
|
||||
"""
|
||||
def get_blueprints do
|
||||
get("/catalog/blueprints.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get print providers for a specific blueprint.
|
||||
"""
|
||||
def get_print_providers(blueprint_id) do
|
||||
get("/catalog/blueprints/#{blueprint_id}/print_providers.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get variants for a specific blueprint and print provider.
|
||||
"""
|
||||
def get_variants(blueprint_id, print_provider_id) do
|
||||
get("/catalog/blueprints/#{blueprint_id}/print_providers/#{print_provider_id}/variants.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get shipping information for a blueprint/provider combination.
|
||||
"""
|
||||
def get_shipping(blueprint_id, print_provider_id) do
|
||||
get("/catalog/blueprints/#{blueprint_id}/print_providers/#{print_provider_id}/shipping.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Upload an image to Printify via URL.
|
||||
"""
|
||||
def upload_image(file_name, url) do
|
||||
post("/uploads/images.json", %{
|
||||
file_name: file_name,
|
||||
url: url
|
||||
})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Create a product in a shop.
|
||||
"""
|
||||
def create_product(shop_id, product_data) do
|
||||
post("/shops/#{shop_id}/products.json", product_data)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get a product by ID.
|
||||
"""
|
||||
def get_product(shop_id, product_id) do
|
||||
get("/shops/#{shop_id}/products/#{product_id}.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
List all products in a shop.
|
||||
Printify allows a maximum of 50 products per page.
|
||||
"""
|
||||
def list_products(shop_id, opts \\ []) do
|
||||
limit = Keyword.get(opts, :limit, 50)
|
||||
page = Keyword.get(opts, :page, 1)
|
||||
get("/shops/#{shop_id}/products.json?limit=#{limit}&page=#{page}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Update a product in a shop.
|
||||
"""
|
||||
def update_product(shop_id, product_id, product_data) do
|
||||
put("/shops/#{shop_id}/products/#{product_id}.json", product_data)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Delete a product from a shop.
|
||||
"""
|
||||
def delete_product(shop_id, product_id) do
|
||||
delete("/shops/#{shop_id}/products/#{product_id}.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Create an order in a shop.
|
||||
"""
|
||||
def create_order(shop_id, order_data) do
|
||||
post("/shops/#{shop_id}/orders.json", order_data)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get an order by ID.
|
||||
"""
|
||||
def get_order(shop_id, order_id) do
|
||||
get("/shops/#{shop_id}/orders/#{order_id}.json")
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Webhooks
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Register a webhook with Printify.
|
||||
|
||||
## Event types
|
||||
- "product:publish:started"
|
||||
- "product:updated"
|
||||
- "product:deleted"
|
||||
- "shop:disconnected"
|
||||
"""
|
||||
def create_webhook(shop_id, url, topic, secret) do
|
||||
post("/shops/#{shop_id}/webhooks.json", %{
|
||||
topic: topic,
|
||||
url: url,
|
||||
secret: secret
|
||||
})
|
||||
end
|
||||
|
||||
@doc """
|
||||
List registered webhooks for a shop.
|
||||
"""
|
||||
def list_webhooks(shop_id) do
|
||||
get("/shops/#{shop_id}/webhooks.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Delete a webhook.
|
||||
"""
|
||||
def delete_webhook(shop_id, webhook_id) do
|
||||
delete("/shops/#{shop_id}/webhooks/#{webhook_id}.json")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Download a file from a URL to a local path.
|
||||
"""
|
||||
def download_file(url, output_path) do
|
||||
case Req.get(url, into: File.stream!(output_path), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status}} when status in 200..299 ->
|
||||
{:ok, output_path}
|
||||
|
||||
{:ok, %Req.Response{status: status}} ->
|
||||
File.rm(output_path)
|
||||
{:error, {:http_error, status}}
|
||||
|
||||
{:error, reason} ->
|
||||
File.rm(output_path)
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp auth_headers do
|
||||
[
|
||||
{"Authorization", "Bearer #{api_token()}"},
|
||||
{"Content-Type", "application/json"}
|
||||
]
|
||||
end
|
||||
end
|
||||
102
lib/berrypod/exchange_rate.ex
Normal file
102
lib/berrypod/exchange_rate.ex
Normal file
@@ -0,0 +1,102 @@
|
||||
defmodule Berrypod.ExchangeRate do
|
||||
@moduledoc """
|
||||
Fetches and caches exchange rates for shipping cost conversion.
|
||||
|
||||
Uses the frankfurter.app API (ECB data, free, no API key).
|
||||
Rates are fetched during product sync and cached in Settings so
|
||||
they survive restarts without an API call.
|
||||
"""
|
||||
|
||||
alias Berrypod.Settings
|
||||
|
||||
require Logger
|
||||
|
||||
@api_base "https://api.frankfurter.app"
|
||||
@settings_prefix "exchange_rate_"
|
||||
@default_rates %{"USD" => 0.80, "EUR" => 0.86}
|
||||
|
||||
@doc """
|
||||
Fetches the latest exchange rates to GBP from the API and caches them.
|
||||
|
||||
Returns `{:ok, rates_map}` or `{:error, reason}`.
|
||||
The rates map has currency codes as keys and GBP multipliers as values,
|
||||
e.g. `%{"USD" => 0.7892, "EUR" => 0.8534}`.
|
||||
"""
|
||||
def fetch_and_cache do
|
||||
case fetch_from_api() do
|
||||
{:ok, rates} ->
|
||||
cache_rates(rates)
|
||||
{:ok, rates}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("Exchange rate fetch failed: #{inspect(reason)}, using cached rates")
|
||||
{:ok, get_cached_rates()}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns cached exchange rates from Settings, falling back to defaults.
|
||||
"""
|
||||
def get_cached_rates do
|
||||
Enum.reduce(@default_rates, %{}, fn {currency, default}, acc ->
|
||||
key = @settings_prefix <> String.downcase(currency) <> "_to_gbp"
|
||||
|
||||
rate =
|
||||
case Settings.get_setting(key) do
|
||||
nil -> default
|
||||
val when is_binary(val) -> String.to_float(val)
|
||||
val when is_number(val) -> val / 1
|
||||
end
|
||||
|
||||
Map.put(acc, currency, rate)
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the GBP rate for a given currency, using cached values.
|
||||
"""
|
||||
def rate_for(currency) when currency in ["GBP", "gbp"], do: 1.0
|
||||
|
||||
def rate_for(currency) do
|
||||
rates = get_cached_rates()
|
||||
|
||||
Map.get(
|
||||
rates,
|
||||
String.upcase(currency),
|
||||
Map.get(@default_rates, String.upcase(currency), 0.80)
|
||||
)
|
||||
end
|
||||
|
||||
# Fetch from frankfurter.app API
|
||||
defp fetch_from_api do
|
||||
url = "#{@api_base}/latest?from=USD&to=GBP,EUR"
|
||||
|
||||
case Req.get(url, receive_timeout: 10_000) do
|
||||
{:ok, %{status: 200, body: %{"rates" => rates}}} ->
|
||||
# API returns rates FROM USD, so "GBP" value is the USD→GBP multiplier
|
||||
gbp_per_usd = rates["GBP"]
|
||||
# Derive EUR→GBP: if 1 USD = X GBP and 1 USD = Y EUR, then 1 EUR = X/Y GBP
|
||||
eur_rate = rates["EUR"]
|
||||
|
||||
eur_to_gbp =
|
||||
if eur_rate && eur_rate > 0,
|
||||
do: gbp_per_usd / eur_rate,
|
||||
else: @default_rates["EUR"]
|
||||
|
||||
{:ok, %{"USD" => gbp_per_usd, "EUR" => eur_to_gbp}}
|
||||
|
||||
{:ok, %{status: status}} ->
|
||||
{:error, {:http_status, status}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp cache_rates(rates) do
|
||||
Enum.each(rates, fn {currency, rate} ->
|
||||
key = @settings_prefix <> String.downcase(currency) <> "_to_gbp"
|
||||
Settings.put_setting(key, Float.to_string(rate))
|
||||
end)
|
||||
end
|
||||
end
|
||||
53
lib/berrypod/images/optimize_worker.ex
Normal file
53
lib/berrypod/images/optimize_worker.ex
Normal file
@@ -0,0 +1,53 @@
|
||||
defmodule Berrypod.Images.OptimizeWorker do
|
||||
@moduledoc """
|
||||
Oban worker for processing image variants in the background.
|
||||
Handles both database images and filesystem mockups.
|
||||
"""
|
||||
use Oban.Worker, queue: :images, max_attempts: 3
|
||||
|
||||
alias Berrypod.Images.Optimizer
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"type" => "mockup", "source_path" => source_path}}) do
|
||||
output_dir = Path.dirname(source_path)
|
||||
basename = Path.basename(source_path, Path.extname(source_path))
|
||||
|
||||
case File.read(source_path) do
|
||||
{:ok, data} ->
|
||||
case Optimizer.process_file(data, basename, output_dir) do
|
||||
{:ok, _} -> :ok
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
def perform(%Oban.Job{args: %{"image_id" => image_id}}) do
|
||||
case Optimizer.process_for_image(image_id) do
|
||||
{:ok, _} -> :ok
|
||||
{:error, :not_found} -> {:cancel, :image_not_found}
|
||||
{:error, :no_data} -> {:cancel, :no_image_data}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueue a database image for optimization.
|
||||
"""
|
||||
def enqueue(image_id) do
|
||||
%{image_id: image_id}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueue a mockup file for variant generation.
|
||||
"""
|
||||
def enqueue_mockup(source_path) when is_binary(source_path) do
|
||||
%{type: "mockup", source_path: source_path}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
214
lib/berrypod/images/optimizer.ex
Normal file
214
lib/berrypod/images/optimizer.ex
Normal file
@@ -0,0 +1,214 @@
|
||||
defmodule Berrypod.Images.Optimizer do
|
||||
@moduledoc """
|
||||
Generates optimized image variants. Only creates sizes ≤ source dimensions.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Media.Image, as: ImageSchema
|
||||
|
||||
@all_widths [400, 800, 1200]
|
||||
@pregenerated_formats [:avif, :webp, :jpg]
|
||||
@thumb_size 200
|
||||
@max_stored_width 2000
|
||||
@storage_quality 90
|
||||
|
||||
def cache_dir do
|
||||
Application.get_env(:berrypod, :image_cache_dir) ||
|
||||
Application.app_dir(:berrypod, "priv/static/image_cache")
|
||||
end
|
||||
|
||||
def all_widths, do: @all_widths
|
||||
|
||||
@doc """
|
||||
Returns the expected disk path for a variant file.
|
||||
Used to check the cache without loading the BLOB from the database.
|
||||
"""
|
||||
def variant_path(image_id, width, format) do
|
||||
Path.join(cache_dir(), "#{image_id}-#{width}.#{format_ext(format)}")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Convert uploaded image to optimized WebP for storage.
|
||||
Images larger than #{@max_stored_width}px are resized down.
|
||||
Uses lossy WebP (quality #{@storage_quality}) for efficient storage.
|
||||
Returns {:ok, webp_data, width, height} or {:error, reason}.
|
||||
"""
|
||||
def to_optimized_webp(image_data) when is_binary(image_data) do
|
||||
with {:ok, image} <- Image.from_binary(image_data),
|
||||
{width, _height, _} <- Image.shape(image),
|
||||
{:ok, resized} <- maybe_resize(image, width),
|
||||
{final_width, final_height, _} <- Image.shape(resized),
|
||||
{:ok, webp_data} <-
|
||||
Image.write(resized, :memory, suffix: ".webp", quality: @storage_quality) do
|
||||
{:ok, webp_data, final_width, final_height}
|
||||
end
|
||||
rescue
|
||||
e -> {:error, Exception.message(e)}
|
||||
end
|
||||
|
||||
defp maybe_resize(image, width) when width <= @max_stored_width, do: {:ok, image}
|
||||
defp maybe_resize(image, _width), do: Image.thumbnail(image, @max_stored_width)
|
||||
|
||||
@doc """
|
||||
Compute applicable widths from source dimensions.
|
||||
Only returns widths that are <= source_width (no upscaling).
|
||||
"""
|
||||
def applicable_widths(source_width) when is_integer(source_width) do
|
||||
@all_widths
|
||||
|> Enum.filter(&(&1 <= source_width))
|
||||
|> case do
|
||||
[] -> [source_width]
|
||||
widths -> widths
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Process image and generate all applicable variants.
|
||||
Called by Oban worker.
|
||||
"""
|
||||
def process_for_image(image_id) do
|
||||
case Repo.get(ImageSchema, image_id) do
|
||||
nil ->
|
||||
{:error, :not_found}
|
||||
|
||||
%{data: nil} ->
|
||||
{:error, :no_data}
|
||||
|
||||
%{is_svg: true} = image ->
|
||||
Repo.update!(ImageSchema.changeset(image, %{variants_status: "complete"}))
|
||||
{:ok, :svg_skipped}
|
||||
|
||||
%{data: data, source_width: width} = image ->
|
||||
File.mkdir_p!(cache_dir())
|
||||
|
||||
# Write source WebP to disk so it can be served by Plug.Static
|
||||
source_path = Path.join(cache_dir(), "#{image_id}.webp")
|
||||
unless File.exists?(source_path), do: File.write!(source_path, data)
|
||||
|
||||
with {:ok, vips_image} <- Image.from_binary(data) do
|
||||
widths = applicable_widths(width)
|
||||
|
||||
tasks = [
|
||||
Task.async(fn -> generate_thumbnail(vips_image, image_id) end)
|
||||
| for w <- widths, fmt <- @pregenerated_formats do
|
||||
Task.async(fn -> generate_variant(vips_image, image_id, w, fmt) end)
|
||||
end
|
||||
]
|
||||
|
||||
Task.await_many(tasks, :timer.seconds(120))
|
||||
|
||||
Repo.update!(ImageSchema.changeset(image, %{variants_status: "complete"}))
|
||||
{:ok, widths}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_thumbnail(image, id) do
|
||||
path = Path.join(cache_dir(), "#{id}-thumb.jpg")
|
||||
|
||||
return_if_exists(path, fn ->
|
||||
with {:ok, thumb} <- Image.thumbnail(image, @thumb_size),
|
||||
{:ok, _} <- Image.write(thumb, path, quality: 80) do
|
||||
:ok
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp generate_variant(image, id, width, format) do
|
||||
path = Path.join(cache_dir(), "#{id}-#{width}.#{format_ext(format)}")
|
||||
|
||||
return_if_exists(path, fn ->
|
||||
with {:ok, resized} <- Image.thumbnail(image, width),
|
||||
{:ok, _} <- write_format(resized, path, format) do
|
||||
:ok
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp return_if_exists(path, generate_fn) do
|
||||
if File.exists?(path), do: {:ok, :cached}, else: generate_fn.()
|
||||
end
|
||||
|
||||
defp format_ext(:jpg), do: "jpg"
|
||||
defp format_ext(:webp), do: "webp"
|
||||
defp format_ext(:avif), do: "avif"
|
||||
|
||||
defp write_format(image, path, :avif) do
|
||||
Image.write(image, path, effort: 5, minimize_file_size: true)
|
||||
end
|
||||
|
||||
defp write_format(image, path, :webp) do
|
||||
Image.write(image, path, effort: 6, minimize_file_size: true)
|
||||
end
|
||||
|
||||
defp write_format(image, path, :jpg) do
|
||||
Image.write(image, path, quality: 80, minimize_file_size: true)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Check if disk variants exist for an image.
|
||||
"""
|
||||
def disk_variants_exist?(image_id, source_width) do
|
||||
widths = applicable_widths(source_width)
|
||||
source = File.exists?(Path.join(cache_dir(), "#{image_id}.webp"))
|
||||
thumb = File.exists?(Path.join(cache_dir(), "#{image_id}-thumb.jpg"))
|
||||
|
||||
variants =
|
||||
Enum.all?(widths, fn w ->
|
||||
Enum.all?(@pregenerated_formats, fn fmt ->
|
||||
File.exists?(Path.join(cache_dir(), "#{image_id}-#{w}.#{format_ext(fmt)}"))
|
||||
end)
|
||||
end)
|
||||
|
||||
source and thumb and variants
|
||||
end
|
||||
|
||||
@doc """
|
||||
Process an image file and generate all variants to the specified directory.
|
||||
Used for both database images (to cache_dir) and mockups (to mockup_dir).
|
||||
Returns {:ok, source_width} or {:error, reason}.
|
||||
"""
|
||||
def process_file(image_data, output_basename, output_dir) when is_binary(image_data) do
|
||||
File.mkdir_p!(output_dir)
|
||||
|
||||
with {:ok, webp_data, source_width, _height} <- to_optimized_webp(image_data),
|
||||
{:ok, vips_image} <- Image.from_binary(webp_data) do
|
||||
widths = applicable_widths(source_width)
|
||||
|
||||
tasks = [
|
||||
Task.async(fn ->
|
||||
generate_variant_to_dir(
|
||||
vips_image,
|
||||
output_basename,
|
||||
output_dir,
|
||||
"thumb",
|
||||
:jpg,
|
||||
@thumb_size
|
||||
)
|
||||
end)
|
||||
| for w <- widths, fmt <- @pregenerated_formats do
|
||||
Task.async(fn ->
|
||||
generate_variant_to_dir(vips_image, output_basename, output_dir, w, fmt, w)
|
||||
end)
|
||||
end
|
||||
]
|
||||
|
||||
Task.await_many(tasks, :timer.seconds(120))
|
||||
{:ok, source_width}
|
||||
end
|
||||
rescue
|
||||
e -> {:error, Exception.message(e)}
|
||||
end
|
||||
|
||||
defp generate_variant_to_dir(image, basename, dir, size_label, format, resize_width) do
|
||||
filename = "#{basename}-#{size_label}.#{format_ext(format)}"
|
||||
path = Path.join(dir, filename)
|
||||
|
||||
with {:ok, resized} <- Image.thumbnail(image, resize_width),
|
||||
{:ok, _} <- write_format(resized, path, format) do
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
151
lib/berrypod/images/variant_cache.ex
Normal file
151
lib/berrypod/images/variant_cache.ex
Normal file
@@ -0,0 +1,151 @@
|
||||
defmodule Berrypod.Images.VariantCache do
|
||||
@moduledoc """
|
||||
Ensures all image variants exist on startup.
|
||||
|
||||
This GenServer runs at startup and checks for:
|
||||
1. Database images with incomplete variants_status or missing disk files
|
||||
2. Mockup source files missing their generated variants
|
||||
|
||||
For any images missing variants, it enqueues Oban jobs to regenerate them.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Media.Image, as: ImageSchema
|
||||
alias Berrypod.Images.{Optimizer, OptimizeWorker}
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Sync.ImageDownloadWorker
|
||||
import Ecto.Query
|
||||
|
||||
defp mockup_dir, do: Application.app_dir(:berrypod, "priv/static/mockups")
|
||||
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
Task.start(fn -> ensure_all_variants() end)
|
||||
{:ok, %{}}
|
||||
end
|
||||
|
||||
defp ensure_all_variants do
|
||||
Logger.info("[VariantCache] Checking image variant cache...")
|
||||
File.mkdir_p!(Optimizer.cache_dir())
|
||||
|
||||
reset_stale_sync_status()
|
||||
ensure_database_image_variants()
|
||||
ensure_mockup_variants()
|
||||
ensure_product_image_downloads()
|
||||
end
|
||||
|
||||
# Reset any provider connections stuck in "syncing" status from interrupted syncs
|
||||
defp reset_stale_sync_status do
|
||||
{count, _} = Products.reset_stale_sync_status()
|
||||
|
||||
if count > 0 do
|
||||
Logger.info("[VariantCache] Reset #{count} stale sync status(es) to idle")
|
||||
end
|
||||
end
|
||||
|
||||
defp ensure_database_image_variants do
|
||||
# Only load IDs and source_width for the disk check — avoids loading BLOBs
|
||||
incomplete_ids =
|
||||
ImageSchema
|
||||
|> where([i], i.variants_status != "complete" or is_nil(i.variants_status))
|
||||
|> where([i], i.is_svg == false)
|
||||
|> select([i], {i.id, i.source_width})
|
||||
|> Repo.all()
|
||||
|
||||
complete_missing_ids =
|
||||
ImageSchema
|
||||
|> where([i], i.variants_status == "complete")
|
||||
|> where([i], i.is_svg == false)
|
||||
|> where([i], not is_nil(i.source_width))
|
||||
|> select([i], {i.id, i.source_width})
|
||||
|> Repo.all()
|
||||
|> Enum.reject(fn {id, source_width} ->
|
||||
Optimizer.disk_variants_exist?(id, source_width)
|
||||
end)
|
||||
|
||||
to_process = incomplete_ids ++ complete_missing_ids
|
||||
|
||||
if to_process == [] do
|
||||
Logger.info("[VariantCache] All database image variants up to date")
|
||||
else
|
||||
Logger.info(
|
||||
"[VariantCache] Processing #{length(to_process)} images with missing variants..."
|
||||
)
|
||||
|
||||
# Process directly instead of round-tripping through Oban — more reliable at startup
|
||||
to_process
|
||||
|> Task.async_stream(
|
||||
fn {id, _source_width} ->
|
||||
case Optimizer.process_for_image(id) do
|
||||
{:ok, _} ->
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[VariantCache] Failed to process #{id}: #{inspect(reason)}")
|
||||
end
|
||||
end,
|
||||
max_concurrency: 4,
|
||||
timeout: :timer.seconds(30),
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.count(fn
|
||||
{:ok, :ok} -> true
|
||||
_ -> false
|
||||
end)
|
||||
|> then(fn count ->
|
||||
Logger.info("[VariantCache] Processed #{count}/#{length(to_process)} image variants")
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
defp ensure_mockup_variants do
|
||||
if File.dir?(mockup_dir()) do
|
||||
sources =
|
||||
Path.wildcard(Path.join(mockup_dir(), "*.webp"))
|
||||
|> Enum.reject(&is_variant?/1)
|
||||
|
||||
missing = Enum.reject(sources, &mockup_variants_exist?/1)
|
||||
|
||||
if missing == [] do
|
||||
Logger.info("[VariantCache] All mockup variants up to date")
|
||||
else
|
||||
Logger.info("[VariantCache] Enqueueing #{length(missing)} mockups for processing")
|
||||
Enum.each(missing, &OptimizeWorker.enqueue_mockup/1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp is_variant?(path) do
|
||||
basename = Path.basename(path) |> Path.rootname()
|
||||
String.match?(basename, ~r/-(400|800|1200|thumb)$/)
|
||||
end
|
||||
|
||||
defp mockup_variants_exist?(source_path) do
|
||||
basename = Path.basename(source_path) |> Path.rootname()
|
||||
dir = Path.dirname(source_path)
|
||||
|
||||
expected =
|
||||
["#{basename}-thumb.jpg"] ++
|
||||
for w <- [400, 800, 1200], ext <- ["avif", "webp", "jpg"], do: "#{basename}-#{w}.#{ext}"
|
||||
|
||||
Enum.all?(expected, &File.exists?(Path.join(dir, &1)))
|
||||
end
|
||||
|
||||
defp ensure_product_image_downloads do
|
||||
pending = Products.list_pending_downloads(limit: 500)
|
||||
|
||||
if pending == [] do
|
||||
Logger.info("[VariantCache] All product images downloaded")
|
||||
else
|
||||
Logger.info("[VariantCache] Enqueueing #{length(pending)} product images for download")
|
||||
Enum.each(pending, fn image -> ImageDownloadWorker.enqueue(image.id) end)
|
||||
end
|
||||
end
|
||||
end
|
||||
3
lib/berrypod/mailer.ex
Normal file
3
lib/berrypod/mailer.ex
Normal file
@@ -0,0 +1,3 @@
|
||||
defmodule Berrypod.Mailer do
|
||||
use Swoosh.Mailer, otp_app: :berrypod
|
||||
end
|
||||
173
lib/berrypod/media.ex
Normal file
173
lib/berrypod/media.ex
Normal file
@@ -0,0 +1,173 @@
|
||||
defmodule Berrypod.Media do
|
||||
@moduledoc """
|
||||
The Media context for managing images and file uploads.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Media.Image, as: ImageSchema
|
||||
alias Berrypod.Images.Optimizer
|
||||
alias Berrypod.Images.OptimizeWorker
|
||||
|
||||
@doc """
|
||||
Uploads an image and stores it in the database.
|
||||
|
||||
For non-SVG images:
|
||||
- Converts to lossless WebP for storage (26-41% smaller than PNG)
|
||||
- Extracts source dimensions for responsive variant generation
|
||||
- Enqueues background job to generate optimized variants (AVIF, WebP, JPEG at multiple sizes)
|
||||
|
||||
## Examples
|
||||
|
||||
iex> upload_image(%{image_type: "logo", filename: "logo.png", ...})
|
||||
{:ok, %Image{}}
|
||||
|
||||
"""
|
||||
def upload_image(attrs) do
|
||||
attrs = prepare_image_attrs(attrs)
|
||||
|
||||
case Repo.insert(ImageSchema.changeset(%ImageSchema{}, attrs)) do
|
||||
{:ok, image} ->
|
||||
# Enqueue background job for non-SVG images
|
||||
unless image.is_svg do
|
||||
OptimizeWorker.enqueue(image.id)
|
||||
end
|
||||
|
||||
{:ok, image}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
# Prepares image attributes, converting to lossless WebP and extracting dimensions
|
||||
defp prepare_image_attrs(%{data: data, content_type: content_type} = attrs)
|
||||
when is_binary(data) do
|
||||
if is_svg?(content_type, attrs[:filename]) do
|
||||
attrs
|
||||
else
|
||||
case Optimizer.to_optimized_webp(data) do
|
||||
{:ok, webp_data, width, height} ->
|
||||
attrs
|
||||
|> Map.put(:data, webp_data)
|
||||
|> Map.put(:content_type, "image/webp")
|
||||
|> Map.put(:file_size, byte_size(webp_data))
|
||||
|> Map.put(:source_width, width)
|
||||
|> Map.put(:source_height, height)
|
||||
|> Map.put(:variants_status, "pending")
|
||||
|
||||
{:error, _reason} ->
|
||||
# If conversion fails, store original image
|
||||
attrs
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp prepare_image_attrs(attrs), do: attrs
|
||||
|
||||
defp is_svg?(content_type, filename) do
|
||||
content_type == "image/svg+xml" or
|
||||
String.ends_with?(filename || "", ".svg")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Uploads an image from a LiveView upload entry.
|
||||
|
||||
This handles consuming the upload and extracting metadata from the entry.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> upload_from_entry(socket, :logo_upload, fn path, entry -> ... end)
|
||||
{:ok, %Image{}}
|
||||
|
||||
"""
|
||||
def upload_from_entry(path, entry, image_type) do
|
||||
file_binary = File.read!(path)
|
||||
|
||||
upload_image(%{
|
||||
image_type: image_type,
|
||||
filename: entry.client_name,
|
||||
content_type: entry.client_type,
|
||||
file_size: entry.client_size,
|
||||
data: file_binary
|
||||
})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single image by ID.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_image(id)
|
||||
%Image{}
|
||||
|
||||
iex> get_image("nonexistent")
|
||||
nil
|
||||
|
||||
"""
|
||||
def get_image(id) do
|
||||
Repo.get(ImageSchema, id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the current logo image.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_logo()
|
||||
%Image{}
|
||||
|
||||
"""
|
||||
def get_logo do
|
||||
Repo.one(
|
||||
from i in ImageSchema,
|
||||
where: i.image_type == "logo",
|
||||
order_by: [desc: i.inserted_at],
|
||||
limit: 1
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the current header image.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_header()
|
||||
%Image{}
|
||||
|
||||
"""
|
||||
def get_header do
|
||||
Repo.one(
|
||||
from i in ImageSchema,
|
||||
where: i.image_type == "header",
|
||||
order_by: [desc: i.inserted_at],
|
||||
limit: 1
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes an image.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> delete_image(image)
|
||||
{:ok, %Image{}}
|
||||
|
||||
"""
|
||||
def delete_image(%ImageSchema{} = image) do
|
||||
Repo.delete(image)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists all images of a specific type.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_images_by_type("logo")
|
||||
[%Image{}, ...]
|
||||
|
||||
"""
|
||||
def list_images_by_type(type) do
|
||||
Repo.all(from i in ImageSchema, where: i.image_type == ^type, order_by: [desc: i.inserted_at])
|
||||
end
|
||||
end
|
||||
68
lib/berrypod/media/image.ex
Normal file
68
lib/berrypod/media/image.ex
Normal file
@@ -0,0 +1,68 @@
|
||||
defmodule Berrypod.Media.Image do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "images" do
|
||||
field :image_type, :string
|
||||
field :filename, :string
|
||||
field :content_type, :string
|
||||
field :file_size, :integer
|
||||
field :data, :binary
|
||||
field :is_svg, :boolean, default: false
|
||||
field :svg_content, :string
|
||||
field :source_width, :integer
|
||||
field :source_height, :integer
|
||||
field :variants_status, :string, default: "pending"
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@max_file_size 5_000_000
|
||||
|
||||
@doc false
|
||||
def changeset(image, attrs) do
|
||||
image
|
||||
|> cast(attrs, [
|
||||
:image_type,
|
||||
:filename,
|
||||
:content_type,
|
||||
:file_size,
|
||||
:data,
|
||||
:is_svg,
|
||||
:svg_content,
|
||||
:source_width,
|
||||
:source_height,
|
||||
:variants_status
|
||||
])
|
||||
|> validate_required([:image_type, :filename, :content_type, :file_size, :data])
|
||||
|> validate_inclusion(:image_type, ~w(logo header product))
|
||||
|> validate_number(:file_size, less_than: @max_file_size)
|
||||
|> detect_svg()
|
||||
end
|
||||
|
||||
defp detect_svg(changeset) do
|
||||
content_type = get_change(changeset, :content_type)
|
||||
|
||||
if content_type == "image/svg+xml" or
|
||||
String.ends_with?(get_change(changeset, :filename) || "", ".svg") do
|
||||
changeset
|
||||
|> put_change(:is_svg, true)
|
||||
|> maybe_store_svg_content()
|
||||
else
|
||||
changeset
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_store_svg_content(changeset) do
|
||||
case get_change(changeset, :data) do
|
||||
nil ->
|
||||
changeset
|
||||
|
||||
svg_binary when is_binary(svg_binary) ->
|
||||
put_change(changeset, :svg_content, svg_binary)
|
||||
end
|
||||
end
|
||||
end
|
||||
135
lib/berrypod/media/svg_recolorer.ex
Normal file
135
lib/berrypod/media/svg_recolorer.ex
Normal file
@@ -0,0 +1,135 @@
|
||||
defmodule Berrypod.Media.SVGRecolorer do
|
||||
@moduledoc """
|
||||
Recolors SVG images by replacing fill and stroke colors with a target color.
|
||||
|
||||
This module provides functionality to dynamically recolor SVG logos
|
||||
to match the site's branding colors.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Recolors an SVG by replacing common color attributes with the target color.
|
||||
|
||||
Replaces:
|
||||
- fill="..." attributes (except fill="none")
|
||||
- stroke="..." attributes (except stroke="none")
|
||||
- style="fill:..." inline styles
|
||||
- style="stroke:..." inline styles
|
||||
|
||||
## Examples
|
||||
|
||||
iex> svg = ~s(<svg><path fill="#000000" d="..."/></svg>)
|
||||
iex> SVGRecolorer.recolor(svg, "#ff6600")
|
||||
~s(<svg><path fill="#ff6600" d="..."/></svg>)
|
||||
|
||||
"""
|
||||
@spec recolor(String.t(), String.t()) :: String.t()
|
||||
def recolor(svg_content, target_color)
|
||||
when is_binary(svg_content) and is_binary(target_color) do
|
||||
svg_content
|
||||
|> recolor_fill_attributes(target_color)
|
||||
|> recolor_stroke_attributes(target_color)
|
||||
|> recolor_inline_fill_styles(target_color)
|
||||
|> recolor_inline_stroke_styles(target_color)
|
||||
|> recolor_css_fill_styles(target_color)
|
||||
|> recolor_css_stroke_styles(target_color)
|
||||
|> recolor_current_color(target_color)
|
||||
end
|
||||
|
||||
defp recolor_fill_attributes(svg, color) do
|
||||
Regex.replace(
|
||||
~r/fill\s*=\s*["'](?!none)([^"']+)["']/i,
|
||||
svg,
|
||||
~s(fill="#{color}")
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_stroke_attributes(svg, color) do
|
||||
Regex.replace(
|
||||
~r/stroke\s*=\s*["'](?!none)([^"']+)["']/i,
|
||||
svg,
|
||||
~s(stroke="#{color}")
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_inline_fill_styles(svg, color) do
|
||||
Regex.replace(
|
||||
~r/style\s*=\s*["']([^"']*)fill\s*:\s*(?!none)[^;}"']+([^"']*)["']/i,
|
||||
svg,
|
||||
~s(style="\\1fill:#{color}\\2")
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_inline_stroke_styles(svg, color) do
|
||||
Regex.replace(
|
||||
~r/style\s*=\s*["']([^"']*)stroke\s*:\s*(?!none)[^;}"']+([^"']*)["']/i,
|
||||
svg,
|
||||
~s(style="\\1stroke:#{color}\\2")
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_css_fill_styles(svg, color) do
|
||||
# Replace fill declarations in CSS style blocks: fill:#XXXXXX or fill: #XXXXXX
|
||||
# But preserve fill:none
|
||||
Regex.replace(
|
||||
~r/fill\s*:\s*(?!none)(#[0-9A-Fa-f]{3,6}|[a-zA-Z]+)(?=[;\s\}])/,
|
||||
svg,
|
||||
"fill:#{color}"
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_css_stroke_styles(svg, color) do
|
||||
# Replace stroke declarations in CSS style blocks: stroke:#XXXXXX or stroke: #XXXXXX
|
||||
# But preserve stroke:none
|
||||
Regex.replace(
|
||||
~r/stroke\s*:\s*(?!none)(#[0-9A-Fa-f]{3,6}|[a-zA-Z]+)(?=[;\s\}])/,
|
||||
svg,
|
||||
"stroke:#{color}"
|
||||
)
|
||||
end
|
||||
|
||||
defp recolor_current_color(svg, color) do
|
||||
String.replace(svg, "currentColor", color)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Validates that a string is a valid hex color code.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> SVGRecolorer.valid_hex_color?("#ff6600")
|
||||
true
|
||||
|
||||
iex> SVGRecolorer.valid_hex_color?("#f60")
|
||||
true
|
||||
|
||||
iex> SVGRecolorer.valid_hex_color?("invalid")
|
||||
false
|
||||
|
||||
"""
|
||||
@spec valid_hex_color?(String.t()) :: boolean()
|
||||
def valid_hex_color?(color) when is_binary(color) do
|
||||
Regex.match?(~r/^#([0-9A-Fa-f]{3}|[0-9A-Fa-f]{6})$/, color)
|
||||
end
|
||||
|
||||
def valid_hex_color?(_), do: false
|
||||
|
||||
@doc """
|
||||
Normalizes a hex color to 6-digit format.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> SVGRecolorer.normalize_hex_color("#f60")
|
||||
"#ff6600"
|
||||
|
||||
iex> SVGRecolorer.normalize_hex_color("#ff6600")
|
||||
"#ff6600"
|
||||
|
||||
"""
|
||||
@spec normalize_hex_color(String.t()) :: String.t()
|
||||
def normalize_hex_color("#" <> hex) when byte_size(hex) == 3 do
|
||||
[r, g, b] = String.graphemes(hex)
|
||||
"##{r}#{r}#{g}#{g}#{b}#{b}"
|
||||
end
|
||||
|
||||
def normalize_hex_color(color), do: color
|
||||
end
|
||||
568
lib/berrypod/mockups/generator.ex
Normal file
568
lib/berrypod/mockups/generator.ex
Normal file
@@ -0,0 +1,568 @@
|
||||
defmodule Berrypod.Mockups.Generator do
|
||||
@moduledoc """
|
||||
Generates product mockups using the Printify API.
|
||||
|
||||
This module handles the end-to-end process of:
|
||||
1. Looking up product blueprints and variants
|
||||
2. Downloading artwork from Unsplash
|
||||
3. Uploading artwork to Printify
|
||||
4. Creating products with the artwork
|
||||
5. Downloading generated mockup images
|
||||
6. Optionally cleaning up created products
|
||||
"""
|
||||
|
||||
alias Berrypod.Clients.Printify, as: Client
|
||||
|
||||
@output_dir "priv/static/mockups"
|
||||
|
||||
@doc """
|
||||
Product definitions with their artwork URLs and Printify product types.
|
||||
"""
|
||||
def product_definitions do
|
||||
[
|
||||
%{
|
||||
name: "Mountain Sunrise Canvas",
|
||||
slug: "mountain-sunrise-canvas",
|
||||
category: "Canvas Prints",
|
||||
artwork_url: unsplash_download_url("UweNcthlmDc"),
|
||||
product_type: :canvas,
|
||||
price: 2400
|
||||
},
|
||||
%{
|
||||
name: "Ocean Waves Canvas",
|
||||
slug: "ocean-waves-canvas",
|
||||
category: "Canvas Prints",
|
||||
artwork_url: unsplash_download_url("XRhUTUVuXAE"),
|
||||
product_type: :canvas,
|
||||
price: 2400
|
||||
},
|
||||
%{
|
||||
name: "Wildflower Meadow Canvas",
|
||||
slug: "wildflower-meadow-canvas",
|
||||
category: "Canvas Prints",
|
||||
artwork_url: unsplash_download_url("QvjL4y7SF9k"),
|
||||
product_type: :canvas,
|
||||
price: 2400
|
||||
},
|
||||
%{
|
||||
name: "Geometric Abstract Canvas",
|
||||
slug: "geometric-abstract-canvas",
|
||||
category: "Canvas Prints",
|
||||
artwork_url: unsplash_download_url("-6GvTDpkkPU"),
|
||||
product_type: :canvas,
|
||||
price: 2800
|
||||
},
|
||||
%{
|
||||
name: "Botanical Illustration Canvas",
|
||||
slug: "botanical-illustration-canvas",
|
||||
category: "Canvas Prints",
|
||||
artwork_url: unsplash_download_url("FNtNIDQWUZY"),
|
||||
product_type: :canvas,
|
||||
price: 2400
|
||||
},
|
||||
%{
|
||||
name: "Forest Silhouette T-Shirt",
|
||||
slug: "forest-silhouette-tshirt",
|
||||
category: "Apparel",
|
||||
artwork_url: unsplash_download_url("EhvMzMRO4_o"),
|
||||
product_type: :tshirt,
|
||||
price: 2999,
|
||||
colors: ["Black", "White", "Sport Grey", "Forest Green"]
|
||||
},
|
||||
%{
|
||||
name: "Forest Light Hoodie",
|
||||
slug: "forest-light-hoodie",
|
||||
category: "Apparel",
|
||||
artwork_url: unsplash_download_url("FwVkxITt8Bg"),
|
||||
product_type: :hoodie,
|
||||
price: 4499,
|
||||
colors: ["Dark Heather", "Navy", "Forest Green", "Sand"]
|
||||
},
|
||||
%{
|
||||
name: "Wildflower Meadow Tote Bag",
|
||||
slug: "wildflower-meadow-tote",
|
||||
category: "Apparel",
|
||||
artwork_url: unsplash_download_url("QvjL4y7SF9k"),
|
||||
product_type: :tote,
|
||||
price: 1999
|
||||
},
|
||||
%{
|
||||
name: "Sunset Gradient Tote Bag",
|
||||
slug: "sunset-gradient-tote",
|
||||
category: "Apparel",
|
||||
artwork_url: unsplash_download_url("XRhUTUVuXAE"),
|
||||
product_type: :tote,
|
||||
price: 1999
|
||||
},
|
||||
%{
|
||||
name: "Fern Leaf Mug",
|
||||
slug: "fern-leaf-mug",
|
||||
category: "Homewares",
|
||||
artwork_url: unsplash_download_url("bYiJojtkHnc"),
|
||||
product_type: :mug,
|
||||
price: 1499
|
||||
},
|
||||
%{
|
||||
name: "Ocean Waves Cushion",
|
||||
slug: "ocean-waves-cushion",
|
||||
category: "Homewares",
|
||||
artwork_url: unsplash_download_url("XRhUTUVuXAE"),
|
||||
product_type: :cushion,
|
||||
price: 2999
|
||||
},
|
||||
%{
|
||||
name: "Night Sky Blanket",
|
||||
slug: "night-sky-blanket",
|
||||
category: "Homewares",
|
||||
artwork_url: unsplash_download_url("oQR1B87HsNs"),
|
||||
product_type: :blanket,
|
||||
price: 5999
|
||||
},
|
||||
%{
|
||||
name: "Autumn Leaves Notebook",
|
||||
slug: "autumn-leaves-notebook",
|
||||
category: "Stationery",
|
||||
artwork_url: unsplash_download_url("Aa3ALtIxEGY"),
|
||||
product_type: :notebook,
|
||||
price: 1999
|
||||
},
|
||||
%{
|
||||
name: "Monstera Leaf Notebook",
|
||||
slug: "monstera-leaf-notebook",
|
||||
category: "Stationery",
|
||||
artwork_url: unsplash_download_url("hETU8_b2IM0"),
|
||||
product_type: :notebook,
|
||||
price: 1999
|
||||
},
|
||||
%{
|
||||
name: "Monstera Leaf Phone Case",
|
||||
slug: "monstera-leaf-phone-case",
|
||||
category: "Accessories",
|
||||
artwork_url: unsplash_download_url("hETU8_b2IM0"),
|
||||
product_type: :phone_case,
|
||||
price: 2499
|
||||
},
|
||||
%{
|
||||
name: "Blue Waves Laptop Sleeve",
|
||||
slug: "blue-waves-laptop-sleeve",
|
||||
category: "Accessories",
|
||||
artwork_url: unsplash_download_url("dYksH3vHorc"),
|
||||
product_type: :laptop_sleeve,
|
||||
price: 3499
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Blueprint configurations for each product type.
|
||||
These IDs need to be looked up from the Printify catalog.
|
||||
"""
|
||||
def blueprint_config do
|
||||
%{
|
||||
# Search terms matched to Printify's actual blueprint titles (partial match).
|
||||
# preferred_provider_id selects a UK-based provider where available:
|
||||
# 29 = Monster Digital (UK) — apparel, mugs
|
||||
# 72 = Print Clever (UK) — canvas prints
|
||||
canvas: %{search_term: "Satin Canvas, Stretched", preferred_provider_id: 72},
|
||||
tshirt: %{search_term: "Softstyle T-Shirt", preferred_provider_id: 29},
|
||||
hoodie: %{search_term: "Heavy Blend™ Hooded Sweatshirt", preferred_provider_id: 29},
|
||||
tote: %{search_term: "Cotton Tote Bag", preferred_provider_id: nil},
|
||||
mug: %{search_term: "Ceramic Mug", preferred_provider_id: 29},
|
||||
cushion: %{search_term: "Spun Polyester Square Pillow", preferred_provider_id: nil},
|
||||
blanket: %{search_term: "Sherpa Fleece Blanket", preferred_provider_id: nil},
|
||||
notebook: %{search_term: "Hardcover Journal Matte", preferred_provider_id: nil},
|
||||
phone_case: %{search_term: "Tough Phone Cases", preferred_provider_id: nil},
|
||||
laptop_sleeve: %{search_term: "Laptop Sleeve", preferred_provider_id: nil}
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generate Unsplash download URL from photo ID.
|
||||
Uses the Unsplash download API which provides high-quality images.
|
||||
"""
|
||||
def unsplash_download_url(photo_id) do
|
||||
"https://unsplash.com/photos/#{photo_id}/download?force=true"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Search for a blueprint by name/term.
|
||||
"""
|
||||
def find_blueprint(search_term) do
|
||||
case Client.get_blueprints() do
|
||||
{:ok, blueprints} ->
|
||||
found =
|
||||
Enum.find(blueprints, fn bp ->
|
||||
String.contains?(String.downcase(bp["title"] || ""), String.downcase(search_term))
|
||||
end)
|
||||
|
||||
case found do
|
||||
nil -> {:error, {:blueprint_not_found, search_term}}
|
||||
bp -> {:ok, bp}
|
||||
end
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Find a suitable print provider for a blueprint.
|
||||
|
||||
When `preferred_provider_id` is given, uses that provider if available
|
||||
for the blueprint. Falls back to the first provider otherwise.
|
||||
"""
|
||||
def find_print_provider(blueprint_id, preferred_provider_id \\ nil) do
|
||||
case Client.get_print_providers(blueprint_id) do
|
||||
{:ok, providers} when is_list(providers) and providers != [] ->
|
||||
provider =
|
||||
if preferred_provider_id do
|
||||
Enum.find(providers, fn p -> p["id"] == preferred_provider_id end)
|
||||
end
|
||||
|
||||
{:ok, provider || hd(providers)}
|
||||
|
||||
{:ok, []} ->
|
||||
{:error, :no_providers}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get variant and placeholder information for a blueprint/provider combination.
|
||||
"""
|
||||
def get_variant_info(blueprint_id, print_provider_id) do
|
||||
case Client.get_variants(blueprint_id, print_provider_id) do
|
||||
{:ok, %{"variants" => variants}} when is_list(variants) ->
|
||||
{:ok, variants}
|
||||
|
||||
{:ok, variants} when is_list(variants) ->
|
||||
{:ok, variants}
|
||||
|
||||
{:ok, response} when is_map(response) ->
|
||||
# Handle case where variants might be nested differently
|
||||
{:ok, response["variants"] || []}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Upload artwork to Printify from a URL.
|
||||
"""
|
||||
def upload_artwork(name, url) do
|
||||
file_name = "#{name}.jpg"
|
||||
Client.upload_image(file_name, url)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Calculate scale factor for "cover" behavior.
|
||||
Image will fill entire placeholder, cropping edges if necessary.
|
||||
Printify scale is relative to placeholder width (1.0 = artwork width matches placeholder width).
|
||||
"""
|
||||
def calculate_cover_scale(artwork_width, artwork_height, placeholder_width, placeholder_height)
|
||||
when is_number(artwork_width) and is_number(artwork_height) and
|
||||
is_number(placeholder_width) and is_number(placeholder_height) and
|
||||
artwork_width > 0 and artwork_height > 0 and
|
||||
placeholder_width > 0 and placeholder_height > 0 do
|
||||
# For cover: use the larger scale to ensure full coverage
|
||||
width_scale = 1.0
|
||||
height_scale = placeholder_height * artwork_width / (artwork_height * placeholder_width)
|
||||
max(width_scale, height_scale)
|
||||
end
|
||||
|
||||
def calculate_cover_scale(_, _, _, _), do: 1.0
|
||||
|
||||
@doc """
|
||||
Create a product with the uploaded artwork.
|
||||
|
||||
When the product definition includes a `colors` list, enables one variant
|
||||
per colour (picking a middle size for each). Printify generates mockup
|
||||
images for every enabled colour automatically.
|
||||
"""
|
||||
def create_product(
|
||||
shop_id,
|
||||
product_def,
|
||||
image_id,
|
||||
image_width,
|
||||
image_height,
|
||||
blueprint_id,
|
||||
print_provider_id,
|
||||
variants
|
||||
) do
|
||||
selected_variants = select_variants(variants, product_def)
|
||||
|
||||
IO.puts(" Enabling #{length(selected_variants)} variant(s)")
|
||||
|
||||
# Use the first selected variant for placeholder/scale calculations
|
||||
variant = hd(selected_variants)
|
||||
placeholders = variant["placeholders"] || []
|
||||
|
||||
front_placeholder =
|
||||
Enum.find(placeholders, fn p -> p["position"] == "front" end) || hd(placeholders)
|
||||
|
||||
placeholder_width = front_placeholder["width"]
|
||||
placeholder_height = front_placeholder["height"]
|
||||
|
||||
scale =
|
||||
calculate_cover_scale(image_width, image_height, placeholder_width, placeholder_height)
|
||||
|
||||
IO.puts(
|
||||
" Scale calculation: artwork #{image_width}x#{image_height}, placeholder #{placeholder_width}x#{placeholder_height} -> scale #{Float.round(scale, 3)}"
|
||||
)
|
||||
|
||||
variant_ids = Enum.map(selected_variants, & &1["id"])
|
||||
|
||||
product_data = %{
|
||||
title: product_def.name,
|
||||
description: "#{product_def.name} - Nature-inspired design from Wildprint Studio",
|
||||
blueprint_id: blueprint_id,
|
||||
print_provider_id: print_provider_id,
|
||||
variants:
|
||||
Enum.map(selected_variants, fn v ->
|
||||
%{id: v["id"], price: product_def.price, is_enabled: true}
|
||||
end),
|
||||
print_areas: [
|
||||
%{
|
||||
variant_ids: variant_ids,
|
||||
placeholders: [
|
||||
%{
|
||||
position: front_placeholder["position"] || "front",
|
||||
images: [
|
||||
%{id: image_id, x: 0.5, y: 0.5, scale: scale, angle: 0}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Client.create_product(shop_id, product_data)
|
||||
end
|
||||
|
||||
# Pick one variant per requested colour (middle size), or fall back to hd.
|
||||
defp select_variants(variants, %{colors: colors}) when is_list(colors) and colors != [] do
|
||||
# Group variants by the colour portion of their title ("Dark Heather / L" → "Dark Heather")
|
||||
by_color =
|
||||
Enum.group_by(variants, fn v ->
|
||||
v["title"] |> to_string() |> String.split(" / ") |> hd() |> String.trim()
|
||||
end)
|
||||
|
||||
selected =
|
||||
Enum.flat_map(colors, fn color ->
|
||||
case Map.get(by_color, color) do
|
||||
nil ->
|
||||
IO.puts(" Warning: colour #{inspect(color)} not found in blueprint variants")
|
||||
[]
|
||||
|
||||
color_variants ->
|
||||
# Pick the middle variant (typically a medium size)
|
||||
mid = div(length(color_variants), 2)
|
||||
[Enum.at(color_variants, mid)]
|
||||
end
|
||||
end)
|
||||
|
||||
if selected == [], do: [hd(variants)], else: selected
|
||||
end
|
||||
|
||||
defp select_variants(variants, _product_def), do: [hd(variants)]
|
||||
|
||||
@doc """
|
||||
Extract mockup image URLs from a created product.
|
||||
"""
|
||||
def extract_mockup_urls(product) do
|
||||
images = product["images"] || []
|
||||
Enum.map(images, fn img -> img["src"] end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Download mockup images, save as WebP source, and generate variants.
|
||||
Sources are saved for regeneration on startup via VariantCache.
|
||||
"""
|
||||
def download_mockups(product_slug, mockup_urls) do
|
||||
alias Berrypod.Images.Optimizer
|
||||
|
||||
File.mkdir_p!(@output_dir)
|
||||
|
||||
mockup_urls
|
||||
|> Enum.with_index(1)
|
||||
|> Enum.map(fn {url, index} ->
|
||||
basename = "#{product_slug}-#{index}"
|
||||
source_path = Path.join(@output_dir, "#{basename}.webp")
|
||||
IO.puts(" Processing mockup #{index}...")
|
||||
|
||||
temp_path = Path.join(System.tmp_dir!(), "#{basename}-temp.jpg")
|
||||
|
||||
with {:ok, _} <- Client.download_file(url, temp_path),
|
||||
{:ok, image_data} <- File.read(temp_path),
|
||||
{:ok, webp_data, source_width, _} <- Optimizer.to_optimized_webp(image_data),
|
||||
:ok <- File.write(source_path, webp_data),
|
||||
{:ok, _} <- Optimizer.process_file(webp_data, basename, @output_dir) do
|
||||
File.rm(temp_path)
|
||||
IO.puts(" Saved source + variants for #{basename} (#{source_width}px)")
|
||||
{:ok, basename, source_width}
|
||||
else
|
||||
{:error, reason} ->
|
||||
File.rm(temp_path)
|
||||
{:error, {url, reason}}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes all products from the Printify shop.
|
||||
Returns the number of products deleted.
|
||||
"""
|
||||
def purge_all_products(shop_id) do
|
||||
case Client.list_products(shop_id) do
|
||||
{:ok, %{"data" => products}} when is_list(products) ->
|
||||
Enum.each(products, fn p ->
|
||||
IO.puts(" Deleting: #{p["title"]} (#{p["id"]})")
|
||||
Client.delete_product(shop_id, p["id"])
|
||||
Process.sleep(200)
|
||||
end)
|
||||
|
||||
length(products)
|
||||
|
||||
_ ->
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generate mockups for all products.
|
||||
"""
|
||||
def generate_all(opts \\ []) do
|
||||
cleanup = Keyword.get(opts, :cleanup, false)
|
||||
|
||||
IO.puts("Starting mockup generation...")
|
||||
IO.puts("")
|
||||
|
||||
# Get shop ID
|
||||
IO.puts("Fetching shop ID...")
|
||||
{:ok, shop_id} = Client.get_shop_id()
|
||||
IO.puts("Using shop ID: #{shop_id}")
|
||||
IO.puts("")
|
||||
|
||||
results =
|
||||
product_definitions()
|
||||
|> Enum.map(fn product_def ->
|
||||
IO.puts("Processing: #{product_def.name}")
|
||||
result = generate_single(shop_id, product_def)
|
||||
|
||||
case result do
|
||||
{:ok, product_id, mockup_paths} ->
|
||||
IO.puts(" ✓ Generated #{length(mockup_paths)} mockups")
|
||||
{:ok, product_def.slug, product_id, mockup_paths}
|
||||
|
||||
{:error, reason} ->
|
||||
IO.puts(" ✗ Error: #{inspect(reason)}")
|
||||
{:error, product_def.slug, reason}
|
||||
end
|
||||
end)
|
||||
|
||||
# Cleanup if requested
|
||||
if cleanup do
|
||||
IO.puts("")
|
||||
IO.puts("Cleaning up created products...")
|
||||
|
||||
results
|
||||
|> Enum.filter(fn
|
||||
{:ok, _, _, _} -> true
|
||||
_ -> false
|
||||
end)
|
||||
|> Enum.each(fn {:ok, slug, product_id, _} ->
|
||||
IO.puts(" Deleting #{slug}...")
|
||||
Client.delete_product(shop_id, product_id)
|
||||
end)
|
||||
|
||||
IO.puts("Cleanup complete.")
|
||||
end
|
||||
|
||||
IO.puts("")
|
||||
IO.puts("Mockup generation complete!")
|
||||
IO.puts("Output directory: #{@output_dir}")
|
||||
|
||||
results
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generate mockups for a single product.
|
||||
"""
|
||||
def generate_single(shop_id, product_def) do
|
||||
config = blueprint_config()[product_def.product_type]
|
||||
|
||||
with {:ok, blueprint} <- find_blueprint(config.search_term),
|
||||
blueprint_id = blueprint["id"],
|
||||
_ = IO.puts(" Found blueprint: #{blueprint["title"]} (#{blueprint_id})"),
|
||||
{:ok, provider} <- find_print_provider(blueprint_id, config[:preferred_provider_id]),
|
||||
provider_id = provider["id"],
|
||||
_ = IO.puts(" Using provider: #{provider["title"]} (#{provider_id})"),
|
||||
{:ok, variants} <- get_variant_info(blueprint_id, provider_id),
|
||||
_ = IO.puts(" Found #{length(variants)} variants"),
|
||||
_ = IO.puts(" Uploading artwork..."),
|
||||
{:ok, upload} <- upload_artwork(product_def.slug, product_def.artwork_url),
|
||||
image_id = upload["id"],
|
||||
image_width = upload["width"],
|
||||
image_height = upload["height"],
|
||||
_ = IO.puts(" Artwork uploaded (ID: #{image_id}, #{image_width}x#{image_height})"),
|
||||
_ = IO.puts(" Creating product..."),
|
||||
{:ok, product} <-
|
||||
create_product(
|
||||
shop_id,
|
||||
product_def,
|
||||
image_id,
|
||||
image_width,
|
||||
image_height,
|
||||
blueprint_id,
|
||||
provider_id,
|
||||
variants
|
||||
),
|
||||
product_id = product["id"],
|
||||
mockup_urls = extract_mockup_urls(product),
|
||||
_ = IO.puts(" Product created (ID: #{product_id})"),
|
||||
_ = IO.puts(" Downloading #{length(mockup_urls)} mockups..."),
|
||||
download_results <- download_mockups(product_def.slug, mockup_urls) do
|
||||
successful_downloads =
|
||||
download_results
|
||||
|> Enum.filter(&match?({:ok, _}, &1))
|
||||
|> Enum.map(fn {:ok, path} -> path end)
|
||||
|
||||
{:ok, product_id, successful_downloads}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
List all available blueprints (for discovery).
|
||||
"""
|
||||
def list_blueprints do
|
||||
case Client.get_blueprints() do
|
||||
{:ok, blueprints} ->
|
||||
blueprints
|
||||
|> Enum.map(fn bp -> {bp["id"], bp["title"]} end)
|
||||
|> Enum.sort_by(fn {_, title} -> title end)
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Search blueprints by keyword.
|
||||
"""
|
||||
def search_blueprints(keyword) do
|
||||
case Client.get_blueprints() do
|
||||
{:ok, blueprints} ->
|
||||
blueprints
|
||||
|> Enum.filter(fn bp ->
|
||||
String.contains?(String.downcase(bp["title"] || ""), String.downcase(keyword))
|
||||
end)
|
||||
|> Enum.map(fn bp -> {bp["id"], bp["title"]} end)
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
end
|
||||
422
lib/berrypod/mockups/printful_generator.ex
Normal file
422
lib/berrypod/mockups/printful_generator.ex
Normal file
@@ -0,0 +1,422 @@
|
||||
defmodule Berrypod.Mockups.PrintfulGenerator do
|
||||
@moduledoc """
|
||||
Generates product mockups and/or creates demo products using the Printful API.
|
||||
|
||||
Two independent capabilities, composable via options:
|
||||
|
||||
1. **Mockup images** — uses the v2 mockup tasks API to generate on-product
|
||||
mockup images for the theme preview. Images are saved to `priv/static/mockups/`.
|
||||
|
||||
2. **Sync products** — creates real products in the Printful store via the
|
||||
v1 store products API. These can later be synced into the shop.
|
||||
"""
|
||||
|
||||
alias Berrypod.Clients.Printful, as: Client
|
||||
alias Berrypod.Images.Optimizer
|
||||
alias Berrypod.Mockups.Generator
|
||||
|
||||
@output_dir "priv/static/mockups"
|
||||
@poll_interval_ms 2_000
|
||||
@max_poll_attempts 30
|
||||
@api_delay_ms 3_000
|
||||
@max_retries 3
|
||||
|
||||
# ============================================================================
|
||||
# Catalog config
|
||||
# ============================================================================
|
||||
|
||||
@doc """
|
||||
Maps product types to Printful catalog product IDs, techniques, and variant IDs.
|
||||
|
||||
Variant IDs are needed for sync product creation. Catalog product IDs and
|
||||
techniques are needed for mockup generation.
|
||||
"""
|
||||
def catalog_config do
|
||||
%{
|
||||
# Multiple variant IDs = multiple colour/size mockups per product
|
||||
canvas: %{
|
||||
catalog_product_id: 3,
|
||||
technique: "digital",
|
||||
variant_ids: [
|
||||
{19303, "16″×24″"},
|
||||
{19309, "20″×24″"},
|
||||
{19315, "24″×30″"},
|
||||
{825, "24″×36″"}
|
||||
]
|
||||
},
|
||||
tshirt: %{
|
||||
catalog_product_id: 71,
|
||||
technique: "dtg",
|
||||
variant_ids: [
|
||||
{4017, "Black / M"},
|
||||
{4027, "Ash / M"},
|
||||
{4022, "Aqua / M"},
|
||||
{4082, "Gold / M"},
|
||||
{8452, "Forest / M"}
|
||||
]
|
||||
},
|
||||
hoodie: %{
|
||||
catalog_product_id: 146,
|
||||
technique: "dtg",
|
||||
variant_ids: [
|
||||
{5531, "Black / M"},
|
||||
{20553, "Ash / M"},
|
||||
{21636, "Carolina Blue / M"},
|
||||
{5555, "Dark Chocolate / M"}
|
||||
]
|
||||
},
|
||||
tote: %{
|
||||
catalog_product_id: 274,
|
||||
technique: "cut-sew",
|
||||
variant_ids: [
|
||||
{9039, "Black"},
|
||||
{9040, "Red"},
|
||||
{9041, "Yellow"}
|
||||
]
|
||||
},
|
||||
mug: %{
|
||||
catalog_product_id: 19,
|
||||
technique: "sublimation",
|
||||
variant_ids: [{1320, "White / 11oz"}]
|
||||
},
|
||||
blanket: %{
|
||||
catalog_product_id: 395,
|
||||
technique: "sublimation",
|
||||
variant_ids: [{13222, "60″×80″"}]
|
||||
},
|
||||
laptop_sleeve: %{
|
||||
catalog_product_id: 394,
|
||||
technique: "sublimation",
|
||||
variant_ids: [{10984, "13″"}]
|
||||
},
|
||||
phone_case: %{
|
||||
catalog_product_id: 181,
|
||||
technique: "uv",
|
||||
variant_ids: [{17616, "iPhone 15 Pro Max"}]
|
||||
},
|
||||
poster: %{
|
||||
catalog_product_id: 1,
|
||||
technique: "digital",
|
||||
variant_ids: [
|
||||
{3876, "12″×18″"},
|
||||
{3877, "16″×20″"},
|
||||
{1, "18″×24″"}
|
||||
]
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Product definitions filtered to types available in Printful's catalog.
|
||||
|
||||
Reuses artwork URLs and slugs from the shared Printify generator definitions,
|
||||
but skips product types not available in Printful (cushion, notebook).
|
||||
"""
|
||||
def product_definitions do
|
||||
supported = Map.keys(catalog_config())
|
||||
|
||||
Generator.product_definitions()
|
||||
|> Enum.filter(fn def -> def.product_type in supported end)
|
||||
end
|
||||
|
||||
# ============================================================================
|
||||
# Main entry point
|
||||
# ============================================================================
|
||||
|
||||
@doc """
|
||||
Generate mockups and/or create products.
|
||||
|
||||
## Options
|
||||
|
||||
* `:mockups` — generate mockup images (default: true)
|
||||
* `:products` — create sync products in Printful (default: true)
|
||||
* `:cleanup` — delete created sync products after (default: false)
|
||||
"""
|
||||
def generate_all(opts \\ []) do
|
||||
do_mockups = Keyword.get(opts, :mockups, true)
|
||||
do_products = Keyword.get(opts, :products, true)
|
||||
cleanup = Keyword.get(opts, :cleanup, false)
|
||||
|
||||
definitions = product_definitions()
|
||||
|
||||
IO.puts("Starting Printful generation...")
|
||||
IO.puts(" Mockups: #{do_mockups}, Products: #{do_products}, Cleanup: #{cleanup}")
|
||||
IO.puts(" #{length(definitions)} product definitions")
|
||||
IO.puts("")
|
||||
|
||||
# Generate mockup images
|
||||
mockup_results =
|
||||
if do_mockups do
|
||||
generate_mockups(definitions)
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
# Create sync products
|
||||
product_results =
|
||||
if do_products do
|
||||
create_products(definitions)
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
# Cleanup if requested
|
||||
if cleanup and do_products do
|
||||
IO.puts("")
|
||||
IO.puts("Cleaning up created products...")
|
||||
|
||||
product_results
|
||||
|> Enum.filter(&match?({:ok, _, _}, &1))
|
||||
|> Enum.each(fn {:ok, slug, product_id} ->
|
||||
IO.puts(" Deleting #{slug} (#{product_id})...")
|
||||
Client.delete_sync_product(product_id)
|
||||
Process.sleep(200)
|
||||
end)
|
||||
|
||||
IO.puts("Cleanup complete.")
|
||||
end
|
||||
|
||||
IO.puts("")
|
||||
IO.puts("Printful generation complete!")
|
||||
|
||||
%{mockups: mockup_results, products: product_results}
|
||||
end
|
||||
|
||||
# ============================================================================
|
||||
# Mockup generation (v2 mockup tasks)
|
||||
# ============================================================================
|
||||
|
||||
defp generate_mockups(definitions) do
|
||||
IO.puts("Generating mockup images...")
|
||||
IO.puts("")
|
||||
|
||||
definitions
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {product_def, index} ->
|
||||
if index > 0, do: Process.sleep(@api_delay_ms)
|
||||
IO.puts(" Mockup: #{product_def.name}")
|
||||
|
||||
case generate_single_mockup(product_def) do
|
||||
{:ok, paths} ->
|
||||
IO.puts(" ✓ #{length(paths)} mockup(s)")
|
||||
{:ok, product_def.slug, paths}
|
||||
|
||||
{:error, reason} ->
|
||||
IO.puts(" ✗ #{inspect(reason)}")
|
||||
{:error, product_def.slug, reason}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp generate_single_mockup(product_def) do
|
||||
config = catalog_config()[product_def.product_type]
|
||||
|
||||
variant_ids = Enum.map(config.variant_ids, fn {id, _label} -> id end)
|
||||
|
||||
task_body = %{
|
||||
format: "jpg",
|
||||
products: [
|
||||
%{
|
||||
source: "catalog",
|
||||
catalog_product_id: config.catalog_product_id,
|
||||
catalog_variant_ids: variant_ids,
|
||||
placements: [
|
||||
%{
|
||||
placement: "front",
|
||||
technique: config.technique,
|
||||
layers: [
|
||||
%{type: "file", url: product_def.artwork_url}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with {:ok, task_data} <- Client.create_mockup_task(task_body),
|
||||
task_id <- extract_task_id(task_data),
|
||||
{:ok, completed} <- poll_mockup_task(task_id),
|
||||
mockup_urls <- extract_mockup_urls(completed) do
|
||||
download_mockups(product_def.slug, mockup_urls)
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_task_id(data) when is_list(data), do: hd(data)["id"]
|
||||
defp extract_task_id(%{"id" => id}), do: id
|
||||
defp extract_task_id(data), do: data["id"]
|
||||
|
||||
defp poll_mockup_task(task_id) do
|
||||
poll_mockup_task(task_id, 0)
|
||||
end
|
||||
|
||||
defp poll_mockup_task(_task_id, attempt) when attempt >= @max_poll_attempts do
|
||||
{:error, :timeout}
|
||||
end
|
||||
|
||||
defp poll_mockup_task(task_id, attempt) do
|
||||
Process.sleep(@poll_interval_ms)
|
||||
|
||||
case Client.get_mockup_tasks(%{"id" => task_id}) do
|
||||
{:ok, data} ->
|
||||
task = find_task(data, task_id)
|
||||
|
||||
case task["status"] do
|
||||
"completed" ->
|
||||
{:ok, task}
|
||||
|
||||
"failed" ->
|
||||
{:error, {:mockup_failed, task["failure_reasons"]}}
|
||||
|
||||
_pending ->
|
||||
IO.puts(" Polling... (#{attempt + 1}/#{@max_poll_attempts})")
|
||||
poll_mockup_task(task_id, attempt + 1)
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp find_task(data, task_id) when is_list(data) do
|
||||
Enum.find(data, fn t -> t["id"] == task_id end) || hd(data)
|
||||
end
|
||||
|
||||
defp find_task(data, _task_id) when is_map(data), do: data
|
||||
|
||||
defp extract_mockup_urls(task) do
|
||||
(task["catalog_variant_mockups"] || [])
|
||||
|> Enum.flat_map(fn cvm -> cvm["mockups"] || [] end)
|
||||
|> Enum.map(fn m -> m["mockup_url"] end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
# ============================================================================
|
||||
# Image download (shared logic with Printify generator)
|
||||
# ============================================================================
|
||||
|
||||
defp download_mockups(product_slug, mockup_urls) do
|
||||
File.mkdir_p!(@output_dir)
|
||||
|
||||
results =
|
||||
mockup_urls
|
||||
|> Enum.with_index(1)
|
||||
|> Enum.map(fn {url, index} ->
|
||||
basename = "#{product_slug}-#{index}"
|
||||
source_path = Path.join(@output_dir, "#{basename}.webp")
|
||||
IO.puts(" Processing mockup #{index}...")
|
||||
|
||||
temp_path = Path.join(System.tmp_dir!(), "#{basename}-pf-temp.jpg")
|
||||
|
||||
with {:ok, _} <- download_file(url, temp_path),
|
||||
{:ok, image_data} <- File.read(temp_path),
|
||||
{:ok, webp_data, source_width, _} <- Optimizer.to_optimized_webp(image_data),
|
||||
:ok <- File.write(source_path, webp_data),
|
||||
{:ok, _} <- Optimizer.process_file(webp_data, basename, @output_dir) do
|
||||
File.rm(temp_path)
|
||||
IO.puts(" Saved #{basename} (#{source_width}px)")
|
||||
{:ok, basename, source_width}
|
||||
else
|
||||
{:error, reason} ->
|
||||
File.rm(temp_path)
|
||||
{:error, {url, reason}}
|
||||
end
|
||||
end)
|
||||
|
||||
successful = Enum.filter(results, &match?({:ok, _, _}, &1))
|
||||
{:ok, Enum.map(successful, fn {:ok, basename, _} -> basename end)}
|
||||
end
|
||||
|
||||
# Download without auth headers (mockup URLs are pre-signed S3 URLs)
|
||||
defp download_file(url, output_path) do
|
||||
case Req.get(url, into: File.stream!(output_path), receive_timeout: 60_000) do
|
||||
{:ok, %Req.Response{status: status}} when status in 200..299 ->
|
||||
{:ok, output_path}
|
||||
|
||||
{:ok, %Req.Response{status: status}} ->
|
||||
File.rm(output_path)
|
||||
{:error, {:http_error, status}}
|
||||
|
||||
{:error, reason} ->
|
||||
File.rm(output_path)
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# ============================================================================
|
||||
# Sync product creation (v1 store products)
|
||||
# ============================================================================
|
||||
|
||||
defp create_products(definitions) do
|
||||
IO.puts("Creating sync products...")
|
||||
IO.puts("")
|
||||
|
||||
definitions
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {product_def, index} ->
|
||||
if index > 0, do: Process.sleep(@api_delay_ms)
|
||||
IO.puts(" Product: #{product_def.name}")
|
||||
|
||||
case create_single_product(product_def) do
|
||||
{:ok, product_id} ->
|
||||
IO.puts(" ✓ Created (ID: #{product_id})")
|
||||
{:ok, product_def.slug, product_id}
|
||||
|
||||
{:error, reason} ->
|
||||
IO.puts(" ✗ #{inspect(reason)}")
|
||||
{:error, product_def.slug, reason}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp create_single_product(product_def) do
|
||||
config = catalog_config()[product_def.product_type]
|
||||
|
||||
price =
|
||||
product_def.price
|
||||
|> Decimal.new()
|
||||
|> Decimal.div(100)
|
||||
|> Decimal.to_string()
|
||||
|
||||
sync_variants =
|
||||
Enum.map(config.variant_ids, fn {variant_id, _label} ->
|
||||
%{
|
||||
variant_id: variant_id,
|
||||
retail_price: price,
|
||||
files: [%{url: product_def.artwork_url, type: "default"}]
|
||||
}
|
||||
end)
|
||||
|
||||
product_data = %{
|
||||
sync_product: %{name: product_def.name},
|
||||
sync_variants: sync_variants
|
||||
}
|
||||
|
||||
with_retry(fn -> Client.create_sync_product(product_data) end)
|
||||
|> case do
|
||||
{:ok, result} -> {:ok, result["id"]}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# ============================================================================
|
||||
# Retry logic for rate limits
|
||||
# ============================================================================
|
||||
|
||||
defp with_retry(fun, attempt \\ 1) do
|
||||
case fun.() do
|
||||
{:error, {429, _}} when attempt <= @max_retries ->
|
||||
wait = attempt * 60_000
|
||||
|
||||
IO.puts(
|
||||
" Rate limited, waiting #{div(wait, 1000)}s (retry #{attempt}/#{@max_retries})..."
|
||||
)
|
||||
|
||||
Process.sleep(wait)
|
||||
with_retry(fun, attempt + 1)
|
||||
|
||||
result ->
|
||||
result
|
||||
end
|
||||
end
|
||||
end
|
||||
387
lib/berrypod/orders.ex
Normal file
387
lib/berrypod/orders.ex
Normal file
@@ -0,0 +1,387 @@
|
||||
defmodule Berrypod.Orders do
|
||||
@moduledoc """
|
||||
The Orders context.
|
||||
|
||||
Handles order creation, payment status tracking, fulfilment submission,
|
||||
and order retrieval. Payment-provider agnostic — all Stripe-specific
|
||||
logic lives in controllers.
|
||||
"""
|
||||
|
||||
import Ecto.Query
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Orders.{Order, OrderItem, OrderNotifier}
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Providers.Provider
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Lists orders, optionally filtered by payment status.
|
||||
|
||||
## Options
|
||||
|
||||
* `:status` - filter by payment_status ("paid", "pending", "failed", "refunded")
|
||||
Pass nil or "all" to return all orders.
|
||||
|
||||
Returns orders sorted by newest first, with items preloaded.
|
||||
"""
|
||||
def list_orders(opts \\ []) do
|
||||
status = opts[:status]
|
||||
|
||||
Order
|
||||
|> maybe_filter_status(status)
|
||||
|> order_by([o], desc: o.inserted_at)
|
||||
|> preload(:items)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
defp maybe_filter_status(query, nil), do: query
|
||||
defp maybe_filter_status(query, "all"), do: query
|
||||
defp maybe_filter_status(query, status), do: where(query, [o], o.payment_status == ^status)
|
||||
|
||||
@doc """
|
||||
Returns a map of payment_status => count for all orders.
|
||||
"""
|
||||
def count_orders_by_status do
|
||||
Order
|
||||
|> group_by(:payment_status)
|
||||
|> select([o], {o.payment_status, count(o.id)})
|
||||
|> Repo.all()
|
||||
|> Map.new()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns total revenue (in minor units) from paid orders.
|
||||
"""
|
||||
def total_revenue do
|
||||
Order
|
||||
|> where(payment_status: "paid")
|
||||
|> select([o], sum(o.total))
|
||||
|> Repo.one() || 0
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates an order with line items from hydrated cart data.
|
||||
|
||||
Expects a map with :items (list of hydrated cart item maps) and optional
|
||||
fields like :customer_email. Returns {:ok, order} with items preloaded.
|
||||
"""
|
||||
def create_order(attrs) do
|
||||
items = attrs[:items] || []
|
||||
|
||||
subtotal = Enum.reduce(items, 0, fn item, acc -> acc + item.price * item.quantity end)
|
||||
|
||||
order_attrs = %{
|
||||
order_number: generate_order_number(),
|
||||
subtotal: subtotal,
|
||||
total: subtotal,
|
||||
currency: Map.get(attrs, :currency, "gbp"),
|
||||
customer_email: attrs[:customer_email],
|
||||
payment_status: "pending"
|
||||
}
|
||||
|
||||
Repo.transaction(fn ->
|
||||
case %Order{} |> Order.changeset(order_attrs) |> Repo.insert() do
|
||||
{:ok, order} ->
|
||||
order_items =
|
||||
Enum.map(items, fn item ->
|
||||
%{
|
||||
order_id: order.id,
|
||||
variant_id: item.variant_id,
|
||||
product_name: item.name,
|
||||
variant_title: item.variant,
|
||||
quantity: item.quantity,
|
||||
unit_price: item.price,
|
||||
inserted_at: order.inserted_at,
|
||||
updated_at: order.updated_at
|
||||
}
|
||||
end)
|
||||
|
||||
Repo.insert_all(OrderItem, order_items)
|
||||
|
||||
Repo.preload(order, :items)
|
||||
|
||||
{:error, changeset} ->
|
||||
Repo.rollback(changeset)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Sets the stripe_session_id on an order after creating the Stripe checkout session.
|
||||
"""
|
||||
def set_stripe_session(order, session_id) do
|
||||
order
|
||||
|> Order.changeset(%{stripe_session_id: session_id})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Finds an order by its Stripe checkout session ID.
|
||||
"""
|
||||
def get_order_by_stripe_session(session_id) do
|
||||
Order
|
||||
|> where([o], o.stripe_session_id == ^session_id)
|
||||
|> preload(:items)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Marks an order as paid and stores the Stripe payment intent ID.
|
||||
|
||||
Returns {:ok, order} or {:error, :already_paid} if idempotency check fails.
|
||||
"""
|
||||
def mark_paid(order, payment_intent_id) do
|
||||
if order.payment_status == "paid" do
|
||||
{:ok, order}
|
||||
else
|
||||
order
|
||||
|> Order.changeset(%{
|
||||
payment_status: "paid",
|
||||
stripe_payment_intent_id: payment_intent_id
|
||||
})
|
||||
|> Repo.update()
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Marks an order as failed.
|
||||
"""
|
||||
def mark_failed(order) do
|
||||
order
|
||||
|> Order.changeset(%{payment_status: "failed"})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets an order by ID with items preloaded.
|
||||
"""
|
||||
def get_order(id) do
|
||||
Order
|
||||
|> preload(:items)
|
||||
|> Repo.get(id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates an order with the given attributes.
|
||||
"""
|
||||
def update_order(order, attrs) do
|
||||
order
|
||||
|> Order.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a human-readable order number.
|
||||
|
||||
Format: SS-YYMMDD-XXXX where XXXX is a random alphanumeric string.
|
||||
"""
|
||||
def generate_order_number do
|
||||
date = Date.utc_today() |> Calendar.strftime("%y%m%d")
|
||||
random = :crypto.strong_rand_bytes(2) |> Base.encode16()
|
||||
"SS-#{date}-#{random}"
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Fulfilment
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Submits an order to the fulfilment provider.
|
||||
|
||||
Looks up product variant data from order items, builds the provider payload,
|
||||
and calls the provider's submit_order callback. Idempotent — returns {:ok, order}
|
||||
if already submitted.
|
||||
"""
|
||||
def submit_to_provider(%Order{provider_order_id: pid} = order) when not is_nil(pid) do
|
||||
{:ok, order}
|
||||
end
|
||||
|
||||
def submit_to_provider(%Order{} = order) do
|
||||
order = Repo.preload(order, :items)
|
||||
|
||||
with {:ok, conn} <- get_provider_connection_for_order(order),
|
||||
{:ok, provider} <- Provider.for_connection(conn),
|
||||
{:ok, enriched_items} <- enrich_items(order.items),
|
||||
order_data <- build_submission_data(order, enriched_items),
|
||||
{:ok, %{provider_order_id: pid}} <- provider.submit_order(conn, order_data) do
|
||||
update_fulfilment(order, %{
|
||||
fulfilment_status: "submitted",
|
||||
provider_order_id: pid,
|
||||
fulfilment_error: nil,
|
||||
submitted_at: DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
})
|
||||
else
|
||||
{:error, reason} ->
|
||||
error_msg = format_submission_error(reason)
|
||||
|
||||
update_fulfilment(order, %{
|
||||
fulfilment_status: "failed",
|
||||
fulfilment_error: error_msg
|
||||
})
|
||||
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Polls the provider for the current fulfilment status of an order.
|
||||
Updates tracking info and timestamps on status transitions.
|
||||
"""
|
||||
def refresh_fulfilment_status(%Order{provider_order_id: nil} = order), do: {:ok, order}
|
||||
|
||||
def refresh_fulfilment_status(%Order{} = order) do
|
||||
order = Repo.preload(order, :items)
|
||||
|
||||
with {:ok, conn} <- get_provider_connection_for_order(order),
|
||||
{:ok, provider} <- Provider.for_connection(conn),
|
||||
{:ok, status_data} <- provider.get_order_status(conn, order.provider_order_id) do
|
||||
attrs =
|
||||
%{
|
||||
fulfilment_status: status_data.status,
|
||||
provider_status: status_data.provider_status,
|
||||
tracking_number: status_data.tracking_number,
|
||||
tracking_url: status_data.tracking_url
|
||||
}
|
||||
|> maybe_set_timestamp(order)
|
||||
|
||||
with {:ok, updated_order} <- update_fulfilment(order, attrs) do
|
||||
if attrs[:fulfilment_status] == "shipped" and order.fulfilment_status != "shipped" do
|
||||
OrderNotifier.deliver_shipping_notification(updated_order)
|
||||
end
|
||||
|
||||
{:ok, updated_order}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates an order's fulfilment fields.
|
||||
"""
|
||||
def update_fulfilment(%Order{} = order, attrs) do
|
||||
order
|
||||
|> Order.fulfilment_changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists orders that need fulfilment status polling (submitted or processing).
|
||||
"""
|
||||
def list_submitted_orders do
|
||||
from(o in Order,
|
||||
where: o.fulfilment_status in ["submitted", "processing"],
|
||||
where: not is_nil(o.provider_order_id),
|
||||
order_by: [asc: o.submitted_at],
|
||||
preload: :items
|
||||
)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets an order by its order number.
|
||||
"""
|
||||
def get_order_by_number(order_number) do
|
||||
Order
|
||||
|> where([o], o.order_number == ^order_number)
|
||||
|> preload(:items)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
defp get_provider_connection_for_order(%Order{items: items}) do
|
||||
first_item = List.first(items)
|
||||
variants_map = Products.get_variants_with_products([first_item.variant_id])
|
||||
|
||||
case Map.get(variants_map, first_item.variant_id) do
|
||||
nil ->
|
||||
{:error, :variant_not_found}
|
||||
|
||||
variant ->
|
||||
case Products.get_provider_connection(variant.product.provider_connection_id) do
|
||||
nil -> {:error, :no_provider_connection}
|
||||
%{enabled: false} -> {:error, :provider_disabled}
|
||||
conn -> {:ok, conn}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp enrich_items(items) do
|
||||
variant_ids = Enum.map(items, & &1.variant_id)
|
||||
variants_map = Products.get_variants_with_products(variant_ids)
|
||||
|
||||
results =
|
||||
Enum.map(items, fn item ->
|
||||
case Map.get(variants_map, item.variant_id) do
|
||||
nil -> {:error, {:variant_not_found, item.variant_id, item.product_name}}
|
||||
variant -> {:ok, %{item: item, variant: variant}}
|
||||
end
|
||||
end)
|
||||
|
||||
case Enum.find(results, &match?({:error, _}, &1)) do
|
||||
nil -> {:ok, Enum.map(results, fn {:ok, e} -> e end)}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp build_submission_data(order, enriched_items) do
|
||||
%{
|
||||
order_number: order.order_number,
|
||||
customer_email: order.customer_email,
|
||||
shipping_address: order.shipping_address,
|
||||
line_items:
|
||||
Enum.map(enriched_items, fn %{item: item, variant: variant} ->
|
||||
%{
|
||||
provider_product_id: variant.product.provider_product_id,
|
||||
provider_variant_id: variant.provider_variant_id,
|
||||
quantity: item.quantity
|
||||
}
|
||||
end)
|
||||
}
|
||||
end
|
||||
|
||||
defp format_submission_error({:variant_not_found, _id, name}) do
|
||||
"Variant for '#{name}' no longer exists in the product catalog"
|
||||
end
|
||||
|
||||
defp format_submission_error(:variant_not_found) do
|
||||
"Order variant no longer exists in the product catalog"
|
||||
end
|
||||
|
||||
defp format_submission_error(:no_provider_connection) do
|
||||
"No fulfilment provider connected"
|
||||
end
|
||||
|
||||
defp format_submission_error(:provider_disabled) do
|
||||
"Fulfilment provider is disabled"
|
||||
end
|
||||
|
||||
defp format_submission_error(:no_api_key) do
|
||||
"Provider API key is missing"
|
||||
end
|
||||
|
||||
defp format_submission_error(:no_shop_id) do
|
||||
"Provider shop ID is not configured"
|
||||
end
|
||||
|
||||
defp format_submission_error({status, body}) when is_integer(status) do
|
||||
message = if is_map(body), do: body["message"] || body["error"], else: inspect(body)
|
||||
"Provider API error (#{status}): #{message}"
|
||||
end
|
||||
|
||||
defp format_submission_error(reason) do
|
||||
"Submission failed: #{inspect(reason)}"
|
||||
end
|
||||
|
||||
defp maybe_set_timestamp(attrs, order) do
|
||||
attrs
|
||||
|> maybe_set(:shipped_at, attrs[:fulfilment_status] == "shipped" and is_nil(order.shipped_at))
|
||||
|> maybe_set(
|
||||
:delivered_at,
|
||||
attrs[:fulfilment_status] == "delivered" and is_nil(order.delivered_at)
|
||||
)
|
||||
end
|
||||
|
||||
defp maybe_set(attrs, key, true),
|
||||
do: Map.put(attrs, key, DateTime.utc_now() |> DateTime.truncate(:second))
|
||||
|
||||
defp maybe_set(attrs, _key, false), do: attrs
|
||||
end
|
||||
49
lib/berrypod/orders/fulfilment_status_worker.ex
Normal file
49
lib/berrypod/orders/fulfilment_status_worker.ex
Normal file
@@ -0,0 +1,49 @@
|
||||
defmodule Berrypod.Orders.FulfilmentStatusWorker do
|
||||
@moduledoc """
|
||||
Oban Cron worker that polls the fulfilment provider for status updates.
|
||||
|
||||
Runs every 30 minutes as a fallback for missed webhook events.
|
||||
Only checks orders that are submitted or processing (i.e. awaiting
|
||||
further status transitions).
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :sync, max_attempts: 1
|
||||
|
||||
alias Berrypod.Orders
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{}) do
|
||||
orders = Orders.list_submitted_orders()
|
||||
|
||||
if orders == [] do
|
||||
:ok
|
||||
else
|
||||
Logger.info("Polling fulfilment status for #{length(orders)} order(s)")
|
||||
|
||||
Enum.each(orders, fn order ->
|
||||
refresh_order(order)
|
||||
Process.sleep(200)
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp refresh_order(order) do
|
||||
case Orders.refresh_fulfilment_status(order) do
|
||||
{:ok, updated} ->
|
||||
if updated.fulfilment_status != order.fulfilment_status do
|
||||
Logger.info(
|
||||
"Order #{order.order_number} status: #{order.fulfilment_status} → #{updated.fulfilment_status}"
|
||||
)
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Failed to refresh status for order #{order.order_number}: #{inspect(reason)}"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
80
lib/berrypod/orders/order.ex
Normal file
80
lib/berrypod/orders/order.ex
Normal file
@@ -0,0 +1,80 @@
|
||||
defmodule Berrypod.Orders.Order do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
@payment_statuses ~w(pending paid failed refunded)
|
||||
@fulfilment_statuses ~w(unfulfilled submitted processing shipped delivered failed cancelled)
|
||||
|
||||
def fulfilment_statuses, do: @fulfilment_statuses
|
||||
|
||||
schema "orders" do
|
||||
field :order_number, :string
|
||||
field :stripe_session_id, :string
|
||||
field :stripe_payment_intent_id, :string
|
||||
field :payment_status, :string, default: "pending"
|
||||
field :customer_email, :string
|
||||
field :shipping_address, :map, default: %{}
|
||||
field :subtotal, :integer
|
||||
field :shipping_cost, :integer
|
||||
field :total, :integer
|
||||
field :currency, :string, default: "gbp"
|
||||
field :metadata, :map, default: %{}
|
||||
|
||||
# Fulfilment
|
||||
field :fulfilment_status, :string, default: "unfulfilled"
|
||||
field :provider_order_id, :string
|
||||
field :provider_status, :string
|
||||
field :fulfilment_error, :string
|
||||
field :tracking_number, :string
|
||||
field :tracking_url, :string
|
||||
field :submitted_at, :utc_datetime
|
||||
field :shipped_at, :utc_datetime
|
||||
field :delivered_at, :utc_datetime
|
||||
|
||||
has_many :items, Berrypod.Orders.OrderItem
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
def changeset(order, attrs) do
|
||||
order
|
||||
|> cast(attrs, [
|
||||
:order_number,
|
||||
:stripe_session_id,
|
||||
:stripe_payment_intent_id,
|
||||
:payment_status,
|
||||
:customer_email,
|
||||
:shipping_address,
|
||||
:subtotal,
|
||||
:shipping_cost,
|
||||
:total,
|
||||
:currency,
|
||||
:metadata
|
||||
])
|
||||
|> validate_required([:order_number, :subtotal, :total, :currency])
|
||||
|> validate_inclusion(:payment_status, @payment_statuses)
|
||||
|> validate_number(:subtotal, greater_than_or_equal_to: 0)
|
||||
|> validate_number(:total, greater_than_or_equal_to: 0)
|
||||
|> unique_constraint(:order_number)
|
||||
|> unique_constraint(:stripe_session_id)
|
||||
end
|
||||
|
||||
def fulfilment_changeset(order, attrs) do
|
||||
order
|
||||
|> cast(attrs, [
|
||||
:fulfilment_status,
|
||||
:provider_order_id,
|
||||
:provider_status,
|
||||
:fulfilment_error,
|
||||
:tracking_number,
|
||||
:tracking_url,
|
||||
:submitted_at,
|
||||
:shipped_at,
|
||||
:delivered_at
|
||||
])
|
||||
|> validate_inclusion(:fulfilment_status, @fulfilment_statuses)
|
||||
end
|
||||
end
|
||||
27
lib/berrypod/orders/order_item.ex
Normal file
27
lib/berrypod/orders/order_item.ex
Normal file
@@ -0,0 +1,27 @@
|
||||
defmodule Berrypod.Orders.OrderItem do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "order_items" do
|
||||
field :variant_id, :string
|
||||
field :product_name, :string
|
||||
field :variant_title, :string
|
||||
field :quantity, :integer
|
||||
field :unit_price, :integer
|
||||
|
||||
belongs_to :order, Berrypod.Orders.Order
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
def changeset(item, attrs) do
|
||||
item
|
||||
|> cast(attrs, [:variant_id, :product_name, :variant_title, :quantity, :unit_price, :order_id])
|
||||
|> validate_required([:variant_id, :product_name, :quantity, :unit_price])
|
||||
|> validate_number(:quantity, greater_than: 0)
|
||||
|> validate_number(:unit_price, greater_than_or_equal_to: 0)
|
||||
end
|
||||
end
|
||||
130
lib/berrypod/orders/order_notifier.ex
Normal file
130
lib/berrypod/orders/order_notifier.ex
Normal file
@@ -0,0 +1,130 @@
|
||||
defmodule Berrypod.Orders.OrderNotifier do
|
||||
@moduledoc """
|
||||
Sends transactional emails for orders.
|
||||
|
||||
Order confirmation after payment, shipping notification when dispatched.
|
||||
"""
|
||||
|
||||
import Swoosh.Email
|
||||
|
||||
alias Berrypod.Cart
|
||||
alias Berrypod.Mailer
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Sends an order confirmation email after successful payment.
|
||||
|
||||
Skips silently if the order has no customer email.
|
||||
"""
|
||||
def deliver_order_confirmation(%{customer_email: nil}), do: {:ok, :no_email}
|
||||
def deliver_order_confirmation(%{customer_email: ""}), do: {:ok, :no_email}
|
||||
|
||||
def deliver_order_confirmation(order) do
|
||||
subject = "Order confirmed - #{order.order_number}"
|
||||
|
||||
body = """
|
||||
==============================
|
||||
|
||||
Thanks for your order!
|
||||
|
||||
Order: #{order.order_number}
|
||||
|
||||
#{format_items(order.items)}
|
||||
Total: #{Cart.format_price(order.total)}
|
||||
|
||||
#{format_shipping_address(order.shipping_address)}
|
||||
We'll send you another email when your order ships.
|
||||
|
||||
==============================
|
||||
"""
|
||||
|
||||
deliver(order.customer_email, subject, body)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Sends a shipping notification with tracking info.
|
||||
|
||||
Skips silently if the order has no customer email.
|
||||
"""
|
||||
def deliver_shipping_notification(%{customer_email: nil}), do: {:ok, :no_email}
|
||||
def deliver_shipping_notification(%{customer_email: ""}), do: {:ok, :no_email}
|
||||
|
||||
def deliver_shipping_notification(order) do
|
||||
subject = "Your order has shipped - #{order.order_number}"
|
||||
|
||||
body = """
|
||||
==============================
|
||||
|
||||
Good news! Your order #{order.order_number} is on its way.
|
||||
|
||||
#{format_tracking(order)}
|
||||
Thanks for shopping with us.
|
||||
|
||||
==============================
|
||||
"""
|
||||
|
||||
deliver(order.customer_email, subject, body)
|
||||
end
|
||||
|
||||
# --- Private ---
|
||||
|
||||
defp deliver(recipient, subject, body) do
|
||||
email =
|
||||
new()
|
||||
|> to(recipient)
|
||||
|> from({"Berrypod", "contact@example.com"})
|
||||
|> subject(subject)
|
||||
|> text_body(body)
|
||||
|
||||
case Mailer.deliver(email) do
|
||||
{:ok, _metadata} = result ->
|
||||
result
|
||||
|
||||
{:error, reason} = error ->
|
||||
Logger.warning("Failed to send email to #{recipient}: #{inspect(reason)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp format_items(items) when is_list(items) do
|
||||
items
|
||||
|> Enum.map_join("\n", fn item ->
|
||||
price = Cart.format_price(item.unit_price * item.quantity)
|
||||
" #{item.quantity}x #{item.product_name} (#{item.variant_title}) - #{price}"
|
||||
end)
|
||||
end
|
||||
|
||||
defp format_items(_), do: ""
|
||||
|
||||
defp format_shipping_address(address) when is_map(address) and map_size(address) > 0 do
|
||||
lines =
|
||||
[
|
||||
address["name"],
|
||||
address["line1"],
|
||||
address["line2"],
|
||||
[address["city"], address["postal_code"]] |> Enum.reject(&is_nil/1) |> Enum.join(" "),
|
||||
address["state"],
|
||||
address["country"]
|
||||
]
|
||||
|> Enum.reject(&(is_nil(&1) or &1 == ""))
|
||||
|> Enum.map_join("\n", &" #{&1}")
|
||||
|
||||
"Shipping to:\n#{lines}\n\n"
|
||||
end
|
||||
|
||||
defp format_shipping_address(_), do: ""
|
||||
|
||||
defp format_tracking(order) do
|
||||
cond do
|
||||
order.tracking_url not in [nil, ""] and order.tracking_number not in [nil, ""] ->
|
||||
"Tracking: #{order.tracking_number}\n#{order.tracking_url}\n\n"
|
||||
|
||||
order.tracking_number not in [nil, ""] ->
|
||||
"Tracking: #{order.tracking_number}\n\n"
|
||||
|
||||
true ->
|
||||
"Tracking details will follow once the carrier updates.\n\n"
|
||||
end
|
||||
end
|
||||
end
|
||||
56
lib/berrypod/orders/order_submission_worker.ex
Normal file
56
lib/berrypod/orders/order_submission_worker.ex
Normal file
@@ -0,0 +1,56 @@
|
||||
defmodule Berrypod.Orders.OrderSubmissionWorker do
|
||||
@moduledoc """
|
||||
Oban worker for submitting paid orders to the fulfilment provider.
|
||||
|
||||
Enqueued after Stripe webhook confirms payment. Guards against
|
||||
missing orders, unpaid orders, and already-submitted orders.
|
||||
Retries up to 3 times with backoff for transient failures.
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :checkout, max_attempts: 3
|
||||
|
||||
alias Berrypod.Orders
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"order_id" => order_id}}) do
|
||||
case Orders.get_order(order_id) do
|
||||
nil ->
|
||||
Logger.warning("Order submission: order #{order_id} not found")
|
||||
{:cancel, :order_not_found}
|
||||
|
||||
%{payment_status: status} when status != "paid" ->
|
||||
Logger.warning("Order submission: order #{order_id} not paid (#{status})")
|
||||
{:cancel, :not_paid}
|
||||
|
||||
%{provider_order_id: pid} when not is_nil(pid) ->
|
||||
Logger.info("Order submission: order #{order_id} already submitted")
|
||||
:ok
|
||||
|
||||
%{shipping_address: addr} when addr == %{} or is_nil(addr) ->
|
||||
Logger.warning("Order submission: order #{order_id} has no shipping address, will retry")
|
||||
{:error, :no_shipping_address}
|
||||
|
||||
order ->
|
||||
case Orders.submit_to_provider(order) do
|
||||
{:ok, updated} ->
|
||||
Logger.info(
|
||||
"Order #{updated.order_number} submitted to provider (#{updated.provider_order_id})"
|
||||
)
|
||||
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Order #{order.order_number} submission failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue(order_id) do
|
||||
%{order_id: order_id}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
808
lib/berrypod/products.ex
Normal file
808
lib/berrypod/products.ex
Normal file
@@ -0,0 +1,808 @@
|
||||
defmodule Berrypod.Products do
|
||||
@moduledoc """
|
||||
The Products context.
|
||||
|
||||
Manages products synced from POD providers, including provider connections,
|
||||
products, images, and variants.
|
||||
"""
|
||||
|
||||
import Ecto.Query
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Products.{ProviderConnection, Product, ProductImage, ProductVariant}
|
||||
|
||||
# =============================================================================
|
||||
# Provider Connections
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Returns the list of provider connections.
|
||||
"""
|
||||
def list_provider_connections do
|
||||
Repo.all(ProviderConnection)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single provider connection.
|
||||
"""
|
||||
def get_provider_connection(id) do
|
||||
Repo.get(ProviderConnection, id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single provider connection, raising if not found.
|
||||
"""
|
||||
def get_provider_connection!(id) do
|
||||
Repo.get!(ProviderConnection, id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a provider connection by type.
|
||||
"""
|
||||
def get_provider_connection_by_type(provider_type) do
|
||||
Repo.get_by(ProviderConnection, provider_type: provider_type)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a provider connection.
|
||||
"""
|
||||
def create_provider_connection(attrs \\ %{}) do
|
||||
%ProviderConnection{}
|
||||
|> ProviderConnection.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a provider connection.
|
||||
"""
|
||||
def update_provider_connection(%ProviderConnection{} = conn, attrs) do
|
||||
conn
|
||||
|> ProviderConnection.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a provider connection.
|
||||
"""
|
||||
def delete_provider_connection(%ProviderConnection{} = conn) do
|
||||
Repo.delete(conn)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the sync status of a provider connection.
|
||||
"""
|
||||
def update_sync_status(%ProviderConnection{} = conn, status, synced_at \\ nil) do
|
||||
attrs = %{sync_status: status}
|
||||
attrs = if synced_at, do: Map.put(attrs, :last_synced_at, synced_at), else: attrs
|
||||
|
||||
conn
|
||||
|> ProviderConnection.sync_changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Resets any stale "syncing" status to "idle".
|
||||
|
||||
Called on application startup to recover from interrupted syncs
|
||||
(e.g., node shutdown while sync was running).
|
||||
"""
|
||||
def reset_stale_sync_status do
|
||||
from(c in ProviderConnection, where: c.sync_status == "syncing")
|
||||
|> Repo.update_all(set: [sync_status: "idle"])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the count of products for a provider connection.
|
||||
"""
|
||||
def count_products_for_connection(nil), do: 0
|
||||
|
||||
def count_products_for_connection(connection_id) do
|
||||
from(p in Product, where: p.provider_connection_id == ^connection_id, select: count())
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueues a product sync job for the given provider connection.
|
||||
Returns `{:ok, job}` or `{:error, changeset}`.
|
||||
"""
|
||||
def enqueue_sync(%ProviderConnection{} = conn) do
|
||||
Berrypod.Sync.ProductSyncWorker.enqueue(conn.id)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Storefront queries
|
||||
# =============================================================================
|
||||
|
||||
# Listing pages only need images (price/stock are denormalized on product)
|
||||
@listing_preloads [images: :image]
|
||||
# Detail page also needs variants for the variant selector
|
||||
@detail_preloads [images: :image, variants: []]
|
||||
|
||||
@doc """
|
||||
Gets a single visible, active product by slug with full preloads (for detail page).
|
||||
"""
|
||||
def get_visible_product(slug) do
|
||||
Product
|
||||
|> where([p], p.slug == ^slug and p.visible == true and p.status == "active")
|
||||
|> preload(^@detail_preloads)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists visible, active products with listing preloads (no variants).
|
||||
|
||||
## Options
|
||||
|
||||
* `:sort` - sort order: "price_asc", "price_desc", "newest", "name_asc", "name_desc"
|
||||
* `:category` - filter by category name
|
||||
* `:on_sale` - if true, only products on sale
|
||||
* `:in_stock` - if true, only products in stock
|
||||
* `:limit` - max number of results
|
||||
* `:exclude` - product ID to exclude
|
||||
|
||||
"""
|
||||
def list_visible_products(opts \\ []) do
|
||||
Product
|
||||
|> where([p], p.visible == true and p.status == "active")
|
||||
|> apply_visible_filters(opts)
|
||||
|> apply_sort(opts[:sort])
|
||||
|> maybe_limit(opts[:limit])
|
||||
|> maybe_exclude(opts[:exclude])
|
||||
|> preload(^@listing_preloads)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists distinct categories from visible, active products.
|
||||
Returns a list of `%{name, slug, image_url}` where `image_url` is the
|
||||
first product image for a representative product in that category.
|
||||
"""
|
||||
def list_categories do
|
||||
from(p in Product,
|
||||
where: p.visible == true and p.status == "active" and not is_nil(p.category),
|
||||
select: p.category,
|
||||
distinct: true,
|
||||
order_by: p.category
|
||||
)
|
||||
|> Repo.all()
|
||||
|> Enum.map(fn name ->
|
||||
image_url = category_image_url(name)
|
||||
%{name: name, slug: Slug.slugify(name), image_url: image_url}
|
||||
end)
|
||||
end
|
||||
|
||||
defp category_image_url(category_name) do
|
||||
from(pi in ProductImage,
|
||||
join: p in Product,
|
||||
on: pi.product_id == p.id,
|
||||
where:
|
||||
p.visible == true and p.status == "active" and
|
||||
p.category == ^category_name,
|
||||
order_by: [asc: pi.position],
|
||||
limit: 1,
|
||||
select: {pi.image_id, pi.src}
|
||||
)
|
||||
|> Repo.one()
|
||||
|> case do
|
||||
{id, _src} when not is_nil(id) -> "/image_cache/#{id}-400.webp"
|
||||
{_, src} when is_binary(src) -> src
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Recomputes denormalized fields from a product's variants.
|
||||
Called after variant sync to keep cached fields up to date.
|
||||
"""
|
||||
def recompute_cached_fields(%Product{} = product) do
|
||||
variants = Repo.all(from v in ProductVariant, where: v.product_id == ^product.id)
|
||||
|
||||
available = Enum.filter(variants, &(&1.is_enabled and &1.is_available))
|
||||
cheapest = Enum.min_by(available, & &1.price, fn -> nil end)
|
||||
|
||||
attrs = %{
|
||||
cheapest_price: if(cheapest, do: cheapest.price, else: 0),
|
||||
compare_at_price: if(cheapest, do: cheapest.compare_at_price),
|
||||
in_stock: available != [],
|
||||
on_sale: Enum.any?(variants, &ProductVariant.on_sale?/1)
|
||||
}
|
||||
|
||||
product
|
||||
|> Product.recompute_changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
defp apply_visible_filters(query, opts) do
|
||||
query
|
||||
|> maybe_filter_category(opts[:category])
|
||||
|> maybe_filter_on_sale(opts[:on_sale])
|
||||
|> maybe_filter_in_stock(opts[:in_stock])
|
||||
end
|
||||
|
||||
defp maybe_filter_category(query, nil), do: query
|
||||
defp maybe_filter_category(query, name), do: where(query, [p], p.category == ^name)
|
||||
|
||||
defp maybe_filter_on_sale(query, true), do: where(query, [p], p.on_sale == true)
|
||||
defp maybe_filter_on_sale(query, _), do: query
|
||||
|
||||
defp maybe_filter_in_stock(query, true), do: where(query, [p], p.in_stock == true)
|
||||
defp maybe_filter_in_stock(query, false), do: where(query, [p], p.in_stock == false)
|
||||
defp maybe_filter_in_stock(query, _), do: query
|
||||
|
||||
defp apply_sort(query, "price_asc"), do: order_by(query, [p], asc: p.cheapest_price)
|
||||
defp apply_sort(query, "price_desc"), do: order_by(query, [p], desc: p.cheapest_price)
|
||||
defp apply_sort(query, "newest"), do: order_by(query, [p], desc: p.inserted_at)
|
||||
defp apply_sort(query, "name_asc"), do: order_by(query, [p], asc: p.title)
|
||||
defp apply_sort(query, "name_desc"), do: order_by(query, [p], desc: p.title)
|
||||
defp apply_sort(query, _), do: order_by(query, [p], desc: p.inserted_at)
|
||||
|
||||
defp maybe_limit(query, nil), do: query
|
||||
defp maybe_limit(query, n) when is_integer(n), do: limit(query, ^n)
|
||||
|
||||
defp maybe_exclude(query, nil), do: query
|
||||
defp maybe_exclude(query, id), do: where(query, [p], p.id != ^id)
|
||||
|
||||
# =============================================================================
|
||||
# Products
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Returns the list of products.
|
||||
|
||||
## Options
|
||||
|
||||
* `:visible` - filter by visibility (boolean)
|
||||
* `:status` - filter by status (string)
|
||||
* `:category` - filter by category (string)
|
||||
* `:provider_connection_id` - filter by provider connection
|
||||
* `:preload` - list of associations to preload
|
||||
|
||||
"""
|
||||
def list_products(opts \\ []) do
|
||||
Product
|
||||
|> apply_product_filters(opts)
|
||||
|> order_by([p], desc: p.inserted_at)
|
||||
|> maybe_preload(opts[:preload])
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns products for the admin list page with sorting, stock filtering,
|
||||
and full preloads for display.
|
||||
|
||||
## Options
|
||||
|
||||
* `:visible` - filter by visibility (boolean)
|
||||
* `:status` - filter by status (string)
|
||||
* `:category` - filter by category (string)
|
||||
* `:provider_connection_id` - filter by provider connection
|
||||
* `:in_stock` - filter by stock status (boolean)
|
||||
* `:sort` - sort order (string)
|
||||
|
||||
"""
|
||||
def list_products_admin(opts \\ []) do
|
||||
Product
|
||||
|> apply_product_filters(opts)
|
||||
|> maybe_filter_in_stock(opts[:in_stock])
|
||||
|> apply_sort(opts[:sort])
|
||||
|> preload([:provider_connection, images: :image, variants: []])
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns distinct category names from all products (including hidden/draft).
|
||||
"""
|
||||
def list_all_categories do
|
||||
from(p in Product,
|
||||
where: not is_nil(p.category),
|
||||
select: p.category,
|
||||
distinct: true,
|
||||
order_by: p.category
|
||||
)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single product by ID.
|
||||
"""
|
||||
def get_product(id, opts \\ []) do
|
||||
Product
|
||||
|> maybe_preload(opts[:preload])
|
||||
|> Repo.get(id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single product by slug.
|
||||
"""
|
||||
def get_product_by_slug(slug, opts \\ []) do
|
||||
Product
|
||||
|> maybe_preload(opts[:preload])
|
||||
|> Repo.get_by(slug: slug)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a product by provider connection and provider product ID.
|
||||
"""
|
||||
def get_product_by_provider(provider_connection_id, provider_product_id) do
|
||||
Repo.get_by(Product,
|
||||
provider_connection_id: provider_connection_id,
|
||||
provider_product_id: provider_product_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a product.
|
||||
"""
|
||||
def create_product(attrs \\ %{}) do
|
||||
%Product{}
|
||||
|> Product.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a product.
|
||||
"""
|
||||
def update_product(%Product{} = product, attrs) do
|
||||
product
|
||||
|> Product.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates storefront-only fields (visibility and category).
|
||||
"""
|
||||
def update_storefront(%Product{} = product, attrs) do
|
||||
product
|
||||
|> Product.storefront_changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Toggles a product's visibility.
|
||||
"""
|
||||
def toggle_visibility(%Product{} = product) do
|
||||
update_storefront(product, %{visible: !product.visible})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a product.
|
||||
"""
|
||||
def delete_product(%Product{} = product) do
|
||||
Repo.delete(product)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Upserts a product from provider data.
|
||||
|
||||
Creates a new product if one doesn't exist for the given provider connection
|
||||
and provider product ID. Updates the existing product if checksum differs.
|
||||
|
||||
Returns `{:ok, product, :created | :updated | :unchanged}`.
|
||||
"""
|
||||
def upsert_product(%ProviderConnection{id: conn_id}, attrs) do
|
||||
provider_product_id = attrs[:provider_product_id] || attrs["provider_product_id"]
|
||||
new_checksum = Product.compute_checksum(attrs[:provider_data] || attrs["provider_data"])
|
||||
title = attrs[:title] || attrs["title"]
|
||||
|
||||
attrs =
|
||||
attrs
|
||||
|> Map.put(:checksum, new_checksum)
|
||||
|> Map.put(:provider_connection_id, conn_id)
|
||||
|
||||
# First check by provider_product_id
|
||||
case get_product_by_provider(conn_id, provider_product_id) do
|
||||
nil ->
|
||||
# Not found by provider ID - check by slug (same title = same product)
|
||||
slug = Slug.slugify(title)
|
||||
find_by_slug_or_insert(conn_id, slug, attrs, new_checksum)
|
||||
|
||||
%Product{checksum: ^new_checksum} = product ->
|
||||
{:ok, product, :unchanged}
|
||||
|
||||
product ->
|
||||
case update_product(product, attrs) do
|
||||
{:ok, product} -> {:ok, product, :updated}
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# If product exists with same slug, update it (including new provider_product_id)
|
||||
# Otherwise insert new product
|
||||
defp find_by_slug_or_insert(conn_id, slug, attrs, new_checksum) do
|
||||
case get_product_by_slug(slug) do
|
||||
%Product{provider_connection_id: ^conn_id, checksum: ^new_checksum} = product ->
|
||||
# Same product, same checksum - just update the provider_product_id if changed
|
||||
if product.provider_product_id != attrs[:provider_product_id] do
|
||||
case update_product(product, %{provider_product_id: attrs[:provider_product_id]}) do
|
||||
{:ok, product} -> {:ok, product, :updated}
|
||||
error -> error
|
||||
end
|
||||
else
|
||||
{:ok, product, :unchanged}
|
||||
end
|
||||
|
||||
%Product{provider_connection_id: ^conn_id} = product ->
|
||||
# Same product, different checksum - full update including new provider_product_id
|
||||
case update_product(product, attrs) do
|
||||
{:ok, product} -> {:ok, product, :updated}
|
||||
error -> error
|
||||
end
|
||||
|
||||
nil ->
|
||||
# Not found at all - insert new
|
||||
do_insert_product(attrs)
|
||||
|
||||
_different_connection ->
|
||||
# Slug taken by a different provider connection - make it unique
|
||||
unique_slug = make_unique_slug(slug)
|
||||
do_insert_product(Map.put(attrs, :slug, unique_slug))
|
||||
end
|
||||
end
|
||||
|
||||
defp make_unique_slug(base_slug, suffix \\ 2) do
|
||||
candidate = "#{base_slug}-#{suffix}"
|
||||
|
||||
case Repo.get_by(Product, slug: candidate) do
|
||||
nil -> candidate
|
||||
_ -> make_unique_slug(base_slug, suffix + 1)
|
||||
end
|
||||
end
|
||||
|
||||
# Insert with conflict handling for race conditions
|
||||
defp do_insert_product(attrs) do
|
||||
case create_product(attrs) do
|
||||
{:ok, product} ->
|
||||
{:ok, product, :created}
|
||||
|
||||
{:error, %Ecto.Changeset{errors: errors} = changeset} ->
|
||||
# Check if it's a unique constraint violation (race condition)
|
||||
if has_unique_constraint_error?(errors) do
|
||||
handle_insert_conflict(attrs, changeset)
|
||||
else
|
||||
{:error, changeset}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_insert_conflict(attrs, changeset) do
|
||||
conn_id = attrs[:provider_connection_id]
|
||||
provider_product_id = attrs[:provider_product_id]
|
||||
new_checksum = attrs[:checksum]
|
||||
|
||||
case get_product_by_provider(conn_id, provider_product_id) do
|
||||
nil ->
|
||||
{:error, changeset}
|
||||
|
||||
%Product{checksum: ^new_checksum} = product ->
|
||||
{:ok, product, :unchanged}
|
||||
|
||||
product ->
|
||||
case update_product(product, attrs) do
|
||||
{:ok, product} -> {:ok, product, :updated}
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp has_unique_constraint_error?(errors) do
|
||||
Enum.any?(errors, fn
|
||||
{_field, {_msg, [constraint: :unique, constraint_name: _]}} -> true
|
||||
_ -> false
|
||||
end)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Product Images
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Creates a product image.
|
||||
"""
|
||||
def create_product_image(attrs \\ %{}) do
|
||||
%ProductImage{}
|
||||
|> ProductImage.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single product image by ID.
|
||||
"""
|
||||
def get_product_image(id) do
|
||||
Repo.get(ProductImage, id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists all images for a product, ordered by position.
|
||||
"""
|
||||
def list_product_images(product_id) do
|
||||
from(i in ProductImage, where: i.product_id == ^product_id, order_by: i.position)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a product image with the given attributes.
|
||||
"""
|
||||
def update_product_image(%ProductImage{} = product_image, attrs) do
|
||||
product_image
|
||||
|> ProductImage.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Links a product image to a Media.Image by setting its image_id.
|
||||
"""
|
||||
def link_product_image(%ProductImage{} = product_image, image_id) do
|
||||
product_image
|
||||
|> ProductImage.changeset(%{image_id: image_id})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists product images that need downloading (have src but no image_id).
|
||||
|
||||
## Options
|
||||
|
||||
* `:limit` - maximum number of images to return (default: 100)
|
||||
"""
|
||||
def list_pending_downloads(opts \\ []) do
|
||||
limit = Keyword.get(opts, :limit, 100)
|
||||
|
||||
from(i in ProductImage,
|
||||
where: not is_nil(i.src) and is_nil(i.image_id),
|
||||
order_by: [asc: i.inserted_at],
|
||||
limit: ^limit
|
||||
)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes all images for a product, including their backing Media.Image records.
|
||||
"""
|
||||
def delete_product_images(%Product{id: product_id}) do
|
||||
image_ids =
|
||||
from(pi in ProductImage,
|
||||
where: pi.product_id == ^product_id and not is_nil(pi.image_id),
|
||||
select: pi.image_id
|
||||
)
|
||||
|> Repo.all()
|
||||
|
||||
result =
|
||||
from(pi in ProductImage, where: pi.product_id == ^product_id)
|
||||
|> Repo.delete_all()
|
||||
|
||||
cleanup_orphaned_images(image_ids)
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
@doc """
|
||||
Syncs product images from a list of image data.
|
||||
|
||||
Preserves existing image_id references when the URL hasn't changed.
|
||||
Returns a list of {:ok, image} tuples for images that need downloading.
|
||||
"""
|
||||
def sync_product_images(%Product{id: product_id}, images) when is_list(images) do
|
||||
# Build map of existing images by position
|
||||
existing_by_position =
|
||||
from(i in ProductImage, where: i.product_id == ^product_id)
|
||||
|> Repo.all()
|
||||
|> Map.new(&{&1.position, &1})
|
||||
|
||||
incoming_positions =
|
||||
images
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {image_data, index} -> image_data[:position] || index end)
|
||||
|> MapSet.new()
|
||||
|
||||
# Delete orphaned positions (images no longer in the list)
|
||||
orphaned =
|
||||
existing_by_position
|
||||
|> Enum.reject(fn {position, _img} -> MapSet.member?(incoming_positions, position) end)
|
||||
|
||||
orphaned_ids = Enum.map(orphaned, fn {_position, img} -> img.id end)
|
||||
|
||||
orphaned_image_ids =
|
||||
orphaned
|
||||
|> Enum.map(fn {_position, img} -> img.image_id end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
if orphaned_ids != [] do
|
||||
from(i in ProductImage, where: i.id in ^orphaned_ids) |> Repo.delete_all()
|
||||
end
|
||||
|
||||
# Upsert incoming images, collecting image_ids displaced by URL changes
|
||||
{results, replaced_image_ids} =
|
||||
images
|
||||
|> Enum.with_index()
|
||||
|> Enum.map_reduce([], fn {image_data, index}, acc ->
|
||||
position = image_data[:position] || index
|
||||
src = image_data[:src]
|
||||
existing = Map.get(existing_by_position, position)
|
||||
|
||||
cond do
|
||||
# Same URL at position - update color if needed, preserve image_id
|
||||
existing && existing.src == src ->
|
||||
result =
|
||||
if existing.color != image_data[:color] do
|
||||
existing
|
||||
|> ProductImage.changeset(%{color: image_data[:color]})
|
||||
|> Repo.update()
|
||||
else
|
||||
{:ok, existing}
|
||||
end
|
||||
|
||||
{result, acc}
|
||||
|
||||
# Different URL at position - update src, clear image_id (triggers re-download)
|
||||
existing ->
|
||||
acc = if existing.image_id, do: [existing.image_id | acc], else: acc
|
||||
|
||||
result =
|
||||
existing
|
||||
|> ProductImage.changeset(%{
|
||||
src: src,
|
||||
alt: image_data[:alt],
|
||||
color: image_data[:color],
|
||||
image_id: nil
|
||||
})
|
||||
|> Repo.update()
|
||||
|
||||
{result, acc}
|
||||
|
||||
# New position - create new
|
||||
true ->
|
||||
attrs =
|
||||
image_data
|
||||
|> Map.put(:product_id, product_id)
|
||||
|> Map.put(:position, position)
|
||||
|
||||
{create_product_image(attrs), acc}
|
||||
end
|
||||
end)
|
||||
|
||||
cleanup_orphaned_images(orphaned_image_ids ++ replaced_image_ids)
|
||||
|
||||
results
|
||||
end
|
||||
|
||||
# Deletes Media.Image records that are no longer referenced by any product_image.
|
||||
defp cleanup_orphaned_images([]), do: :ok
|
||||
|
||||
defp cleanup_orphaned_images(image_ids) do
|
||||
alias Berrypod.Media.Image, as: ImageSchema
|
||||
|
||||
from(i in ImageSchema, where: i.id in ^image_ids)
|
||||
|> Repo.delete_all()
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Product Variants
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Gets multiple variants by their IDs with associated products and images.
|
||||
|
||||
Returns a map of variant_id => variant struct for efficient lookup.
|
||||
Used by Cart.hydrate/1 to fetch variant data for display.
|
||||
"""
|
||||
def get_variants_with_products(variant_ids) when is_list(variant_ids) do
|
||||
from(v in ProductVariant,
|
||||
where: v.id in ^variant_ids,
|
||||
preload: [product: [images: :image]]
|
||||
)
|
||||
|> Repo.all()
|
||||
|> Map.new(&{&1.id, &1})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a product variant.
|
||||
"""
|
||||
def create_product_variant(attrs \\ %{}) do
|
||||
%ProductVariant{}
|
||||
|> ProductVariant.changeset(attrs)
|
||||
|> Repo.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates a product variant.
|
||||
"""
|
||||
def update_product_variant(%ProductVariant{} = variant, attrs) do
|
||||
variant
|
||||
|> ProductVariant.changeset(attrs)
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes all variants for a product.
|
||||
"""
|
||||
def delete_product_variants(%Product{id: product_id}) do
|
||||
from(v in ProductVariant, where: v.product_id == ^product_id)
|
||||
|> Repo.delete_all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a variant by product and provider variant ID.
|
||||
"""
|
||||
def get_variant_by_provider(product_id, provider_variant_id) do
|
||||
Repo.get_by(ProductVariant,
|
||||
product_id: product_id,
|
||||
provider_variant_id: provider_variant_id
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Syncs product variants from a list of variant data.
|
||||
|
||||
Upserts variants based on provider_variant_id.
|
||||
"""
|
||||
def sync_product_variants(%Product{id: product_id}, variants) when is_list(variants) do
|
||||
existing_ids =
|
||||
from(v in ProductVariant,
|
||||
where: v.product_id == ^product_id,
|
||||
select: v.provider_variant_id
|
||||
)
|
||||
|> Repo.all()
|
||||
|> MapSet.new()
|
||||
|
||||
incoming_ids =
|
||||
variants
|
||||
|> Enum.map(&(&1[:provider_variant_id] || &1["provider_variant_id"]))
|
||||
|> MapSet.new()
|
||||
|
||||
# Delete variants that are no longer in the incoming list
|
||||
removed_ids = MapSet.difference(existing_ids, incoming_ids)
|
||||
|
||||
if MapSet.size(removed_ids) > 0 do
|
||||
from(v in ProductVariant,
|
||||
where:
|
||||
v.product_id == ^product_id and v.provider_variant_id in ^MapSet.to_list(removed_ids)
|
||||
)
|
||||
|> Repo.delete_all()
|
||||
end
|
||||
|
||||
# Upsert incoming variants
|
||||
Enum.map(variants, fn variant_data ->
|
||||
provider_variant_id =
|
||||
variant_data[:provider_variant_id] || variant_data["provider_variant_id"]
|
||||
|
||||
attrs = Map.put(variant_data, :product_id, product_id)
|
||||
|
||||
case get_variant_by_provider(product_id, provider_variant_id) do
|
||||
nil ->
|
||||
create_product_variant(attrs)
|
||||
|
||||
existing ->
|
||||
update_product_variant(existing, attrs)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Private Helpers
|
||||
# =============================================================================
|
||||
|
||||
defp apply_product_filters(query, opts) do
|
||||
query
|
||||
|> filter_by_visible(opts[:visible])
|
||||
|> filter_by_status(opts[:status])
|
||||
|> filter_by_category(opts[:category])
|
||||
|> filter_by_provider_connection(opts[:provider_connection_id])
|
||||
end
|
||||
|
||||
defp filter_by_visible(query, nil), do: query
|
||||
defp filter_by_visible(query, visible), do: where(query, [p], p.visible == ^visible)
|
||||
|
||||
defp filter_by_status(query, nil), do: query
|
||||
defp filter_by_status(query, status), do: where(query, [p], p.status == ^status)
|
||||
|
||||
defp filter_by_category(query, nil), do: query
|
||||
defp filter_by_category(query, category), do: where(query, [p], p.category == ^category)
|
||||
|
||||
defp filter_by_provider_connection(query, nil), do: query
|
||||
|
||||
defp filter_by_provider_connection(query, conn_id),
|
||||
do: where(query, [p], p.provider_connection_id == ^conn_id)
|
||||
|
||||
defp maybe_preload(query, nil), do: query
|
||||
defp maybe_preload(query, preloads), do: preload(query, ^preloads)
|
||||
end
|
||||
189
lib/berrypod/products/product.ex
Normal file
189
lib/berrypod/products/product.ex
Normal file
@@ -0,0 +1,189 @@
|
||||
defmodule Berrypod.Products.Product do
|
||||
@moduledoc """
|
||||
Schema for products synced from POD providers.
|
||||
|
||||
Products are uniquely identified by the combination of
|
||||
provider_connection_id and provider_product_id.
|
||||
"""
|
||||
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
@statuses ~w(active draft archived)
|
||||
|
||||
schema "products" do
|
||||
field :provider_product_id, :string
|
||||
field :title, :string
|
||||
field :description, :string
|
||||
field :slug, :string
|
||||
field :status, :string, default: "active"
|
||||
field :visible, :boolean, default: true
|
||||
field :category, :string
|
||||
field :provider_data, :map, default: %{}
|
||||
field :checksum, :string
|
||||
|
||||
# Denormalized from variants — recomputed by Products.recompute_cached_fields/1
|
||||
field :cheapest_price, :integer, default: 0
|
||||
field :compare_at_price, :integer
|
||||
field :in_stock, :boolean, default: true
|
||||
field :on_sale, :boolean, default: false
|
||||
|
||||
belongs_to :provider_connection, Berrypod.Products.ProviderConnection
|
||||
has_many :images, Berrypod.Products.ProductImage
|
||||
has_many :variants, Berrypod.Products.ProductVariant
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the list of valid product statuses.
|
||||
"""
|
||||
def statuses, do: @statuses
|
||||
|
||||
@doc """
|
||||
Changeset for creating or updating a product.
|
||||
"""
|
||||
def changeset(product, attrs) do
|
||||
product
|
||||
|> cast(attrs, [
|
||||
:provider_connection_id,
|
||||
:provider_product_id,
|
||||
:title,
|
||||
:description,
|
||||
:slug,
|
||||
:status,
|
||||
:visible,
|
||||
:category,
|
||||
:provider_data,
|
||||
:checksum
|
||||
])
|
||||
|> generate_slug_if_missing()
|
||||
|> validate_required([:provider_connection_id, :provider_product_id, :title, :slug])
|
||||
|> validate_inclusion(:status, @statuses)
|
||||
|> unique_constraint(:slug)
|
||||
|> unique_constraint([:provider_connection_id, :provider_product_id])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Changeset for admin storefront controls (visibility and category only).
|
||||
"""
|
||||
def storefront_changeset(product, attrs) do
|
||||
product
|
||||
|> cast(attrs, [:visible, :category])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Changeset for recomputing denormalized fields from variants.
|
||||
"""
|
||||
def recompute_changeset(product, attrs) do
|
||||
product
|
||||
|> cast(attrs, [:cheapest_price, :compare_at_price, :in_stock, :on_sale])
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Display helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@doc """
|
||||
Returns the primary (first by position) image, or nil.
|
||||
Works with preloaded images association or plain maps.
|
||||
"""
|
||||
def primary_image(%{images: images}) when is_list(images) do
|
||||
Enum.min_by(images, & &1.position, fn -> nil end)
|
||||
end
|
||||
|
||||
def primary_image(_), do: nil
|
||||
|
||||
@doc """
|
||||
Returns the second image by position (hover image), or nil.
|
||||
"""
|
||||
def hover_image(%{images: images}) when is_list(images) and length(images) >= 2 do
|
||||
images
|
||||
|> Enum.sort_by(& &1.position)
|
||||
|> Enum.at(1)
|
||||
end
|
||||
|
||||
def hover_image(_), do: nil
|
||||
|
||||
@doc """
|
||||
Extracts option types from provider_data.
|
||||
Returns a list of %{name: "Size", type: :size, values: [%{title: "S"}, ...]}.
|
||||
Color options include :hex from the provider's color data.
|
||||
"""
|
||||
def option_types(%{provider_data: %{"options" => options}}) when is_list(options) do
|
||||
Enum.map(options, fn opt ->
|
||||
type = option_type_atom(opt["type"])
|
||||
|
||||
values =
|
||||
Enum.map(opt["values"] || [], fn val ->
|
||||
base = %{title: val["title"]}
|
||||
|
||||
case val["colors"] do
|
||||
[hex | _] -> Map.put(base, :hex, hex)
|
||||
_ -> base
|
||||
end
|
||||
end)
|
||||
|
||||
%{name: singularize_option_name(opt["name"]), type: type, values: values}
|
||||
end)
|
||||
end
|
||||
|
||||
def option_types(%{option_types: option_types}) when is_list(option_types), do: option_types
|
||||
def option_types(_), do: []
|
||||
|
||||
defp option_type_atom("color"), do: :color
|
||||
defp option_type_atom(_), do: :size
|
||||
|
||||
# Printify sends plural names ("Colors", "Sizes") but variant options
|
||||
# use singular — keep them consistent so gallery filtering works.
|
||||
defp singularize_option_name("Colors"), do: "Color"
|
||||
defp singularize_option_name("Sizes"), do: "Size"
|
||||
defp singularize_option_name(name), do: name
|
||||
|
||||
@doc """
|
||||
Generates a checksum from provider data for detecting changes.
|
||||
"""
|
||||
def compute_checksum(provider_data) when is_map(provider_data) do
|
||||
provider_data
|
||||
|> Jason.encode!()
|
||||
|> then(&:crypto.hash(:sha256, &1))
|
||||
|> Base.encode16(case: :lower)
|
||||
|> binary_part(0, 16)
|
||||
end
|
||||
|
||||
def compute_checksum(_), do: nil
|
||||
|
||||
defp generate_slug_if_missing(changeset) do
|
||||
case get_field(changeset, :slug) do
|
||||
nil ->
|
||||
title = get_change(changeset, :title) || get_field(changeset, :title)
|
||||
|
||||
if title do
|
||||
put_change(changeset, :slug, Slug.slugify(title))
|
||||
else
|
||||
changeset
|
||||
end
|
||||
|
||||
_ ->
|
||||
changeset
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Slug do
|
||||
@moduledoc false
|
||||
|
||||
def slugify(nil), do: nil
|
||||
|
||||
def slugify(string) when is_binary(string) do
|
||||
string
|
||||
|> String.downcase()
|
||||
|> String.replace(~r/[^\w\s-]/, "")
|
||||
|> String.replace(~r/\s+/, "-")
|
||||
|> String.replace(~r/-+/, "-")
|
||||
|> String.trim("-")
|
||||
end
|
||||
end
|
||||
71
lib/berrypod/products/product_image.ex
Normal file
71
lib/berrypod/products/product_image.ex
Normal file
@@ -0,0 +1,71 @@
|
||||
defmodule Berrypod.Products.ProductImage do
|
||||
@moduledoc """
|
||||
Schema for product images.
|
||||
|
||||
Images are ordered by position and belong to a single product.
|
||||
"""
|
||||
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "product_images" do
|
||||
field :src, :string
|
||||
field :position, :integer, default: 0
|
||||
field :alt, :string
|
||||
field :color, :string
|
||||
field :image_id, :binary_id
|
||||
|
||||
belongs_to :product, Berrypod.Products.Product
|
||||
belongs_to :image, Berrypod.Media.Image, define_field: false
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Changeset for creating or updating a product image.
|
||||
"""
|
||||
def changeset(product_image, attrs) do
|
||||
product_image
|
||||
|> cast(attrs, [:product_id, :src, :position, :alt, :color, :image_id])
|
||||
|> validate_required([:product_id, :src])
|
||||
|> foreign_key_constraint(:product_id)
|
||||
|> foreign_key_constraint(:image_id)
|
||||
end
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Display helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@doc """
|
||||
Returns the URL for a product image variant at the given width.
|
||||
Prefers local image_id (static file), falls back to CDN src.
|
||||
Handles mockup URL patterns that need size suffixes.
|
||||
"""
|
||||
def url(image, width \\ 800)
|
||||
|
||||
def url(%{image_id: id}, width) when not is_nil(id),
|
||||
do: "/image_cache/#{id}-#{width}.webp"
|
||||
|
||||
def url(%{src: "/mockups/" <> _ = src}, width), do: "#{src}-#{width}.webp"
|
||||
def url(%{src: src}, _width) when is_binary(src), do: src
|
||||
def url(_, _), do: nil
|
||||
|
||||
@doc """
|
||||
Returns the URL for the pre-generated 200px thumbnail.
|
||||
Used for small previews (admin lists, cart items).
|
||||
"""
|
||||
def thumbnail_url(%{image_id: id}) when not is_nil(id),
|
||||
do: "/image_cache/#{id}-thumb.jpg"
|
||||
|
||||
def thumbnail_url(%{src: src}) when is_binary(src), do: src
|
||||
def thumbnail_url(_), do: nil
|
||||
|
||||
@doc """
|
||||
Returns the source width from the linked Media.Image, if preloaded.
|
||||
"""
|
||||
def source_width(%{image: %{source_width: w}}) when not is_nil(w), do: w
|
||||
def source_width(_), do: nil
|
||||
end
|
||||
99
lib/berrypod/products/product_variant.ex
Normal file
99
lib/berrypod/products/product_variant.ex
Normal file
@@ -0,0 +1,99 @@
|
||||
defmodule Berrypod.Products.ProductVariant do
|
||||
@moduledoc """
|
||||
Schema for product variants.
|
||||
|
||||
Variants represent different options (size, color, etc.) for a product.
|
||||
Each variant has its own pricing and availability.
|
||||
|
||||
## Options Field
|
||||
|
||||
The `options` field stores variant options as a map with human-readable labels:
|
||||
|
||||
%{
|
||||
"Size" => "Large",
|
||||
"Color" => "Navy Blue"
|
||||
}
|
||||
|
||||
Labels are denormalized during sync for efficient display.
|
||||
"""
|
||||
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "product_variants" do
|
||||
field :provider_variant_id, :string
|
||||
field :title, :string
|
||||
field :sku, :string
|
||||
field :price, :integer
|
||||
field :compare_at_price, :integer
|
||||
field :cost, :integer
|
||||
field :options, :map, default: %{}
|
||||
field :is_enabled, :boolean, default: true
|
||||
field :is_available, :boolean, default: true
|
||||
|
||||
belongs_to :product, Berrypod.Products.Product
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Changeset for creating or updating a product variant.
|
||||
"""
|
||||
def changeset(product_variant, attrs) do
|
||||
product_variant
|
||||
|> cast(attrs, [
|
||||
:product_id,
|
||||
:provider_variant_id,
|
||||
:title,
|
||||
:sku,
|
||||
:price,
|
||||
:compare_at_price,
|
||||
:cost,
|
||||
:options,
|
||||
:is_enabled,
|
||||
:is_available
|
||||
])
|
||||
|> validate_required([:product_id, :provider_variant_id, :title, :price])
|
||||
|> validate_number(:price, greater_than_or_equal_to: 0)
|
||||
|> validate_number(:compare_at_price, greater_than_or_equal_to: 0)
|
||||
|> validate_number(:cost, greater_than_or_equal_to: 0)
|
||||
|> unique_constraint([:product_id, :provider_variant_id])
|
||||
|> foreign_key_constraint(:product_id)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the profit for this variant (price - cost).
|
||||
Returns nil if cost is not set.
|
||||
"""
|
||||
def profit(%__MODULE__{price: price, cost: cost}) when is_integer(price) and is_integer(cost) do
|
||||
price - cost
|
||||
end
|
||||
|
||||
def profit(_), do: nil
|
||||
|
||||
@doc """
|
||||
Returns true if the variant is on sale (has a compare_at_price higher than price).
|
||||
"""
|
||||
def on_sale?(%__MODULE__{price: price, compare_at_price: compare_at})
|
||||
when is_integer(price) and is_integer(compare_at) and compare_at > price do
|
||||
true
|
||||
end
|
||||
|
||||
def on_sale?(_), do: false
|
||||
|
||||
@doc """
|
||||
Formats the options as a human-readable title.
|
||||
E.g., %{"Size" => "Large", "Color" => "Blue"} -> "Large / Blue"
|
||||
"""
|
||||
def options_title(%__MODULE__{options: options})
|
||||
when is_map(options) and map_size(options) > 0 do
|
||||
options
|
||||
|> Map.values()
|
||||
|> Enum.join(" / ")
|
||||
end
|
||||
|
||||
def options_title(_), do: nil
|
||||
end
|
||||
97
lib/berrypod/products/provider_connection.ex
Normal file
97
lib/berrypod/products/provider_connection.ex
Normal file
@@ -0,0 +1,97 @@
|
||||
defmodule Berrypod.Products.ProviderConnection do
|
||||
@moduledoc """
|
||||
Schema for POD provider connections.
|
||||
|
||||
Stores encrypted API credentials and configuration for each provider.
|
||||
Only one connection per provider type is allowed.
|
||||
"""
|
||||
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
alias Berrypod.Vault
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
@provider_types ~w(printify gelato prodigi printful)
|
||||
@sync_statuses ~w(pending syncing completed failed)
|
||||
|
||||
schema "provider_connections" do
|
||||
field :provider_type, :string
|
||||
field :name, :string
|
||||
field :enabled, :boolean, default: true
|
||||
field :api_key_encrypted, :binary
|
||||
field :config, :map, default: %{}
|
||||
field :last_synced_at, :utc_datetime
|
||||
field :sync_status, :string, default: "pending"
|
||||
|
||||
# Virtual field for setting API key
|
||||
field :api_key, :string, virtual: true
|
||||
|
||||
has_many :products, Berrypod.Products.Product
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the list of supported provider types.
|
||||
"""
|
||||
def provider_types, do: @provider_types
|
||||
|
||||
@doc """
|
||||
Returns the list of valid sync statuses.
|
||||
"""
|
||||
def sync_statuses, do: @sync_statuses
|
||||
|
||||
@doc """
|
||||
Changeset for creating a new provider connection.
|
||||
"""
|
||||
def changeset(provider_connection, attrs) do
|
||||
provider_connection
|
||||
|> cast(attrs, [:provider_type, :name, :enabled, :api_key, :config])
|
||||
|> validate_required([:provider_type, :name])
|
||||
|> validate_inclusion(:provider_type, @provider_types)
|
||||
|> unique_constraint(:provider_type)
|
||||
|> encrypt_api_key()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Changeset for updating sync status.
|
||||
"""
|
||||
def sync_changeset(provider_connection, attrs) do
|
||||
provider_connection
|
||||
|> cast(attrs, [:last_synced_at, :sync_status])
|
||||
|> validate_inclusion(:sync_status, @sync_statuses)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Decrypts and returns the API key for a provider connection.
|
||||
"""
|
||||
def get_api_key(%__MODULE__{api_key_encrypted: nil}), do: nil
|
||||
|
||||
def get_api_key(%__MODULE__{api_key_encrypted: encrypted}) do
|
||||
case Vault.decrypt(encrypted) do
|
||||
{:ok, api_key} -> api_key
|
||||
{:error, _} -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp encrypt_api_key(changeset) do
|
||||
case get_change(changeset, :api_key) do
|
||||
nil ->
|
||||
changeset
|
||||
|
||||
api_key ->
|
||||
case Vault.encrypt(api_key) do
|
||||
{:ok, encrypted} ->
|
||||
changeset
|
||||
|> put_change(:api_key_encrypted, encrypted)
|
||||
|> delete_change(:api_key)
|
||||
|
||||
{:error, _} ->
|
||||
add_error(changeset, :api_key, "could not be encrypted")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
27
lib/berrypod/providers.ex
Normal file
27
lib/berrypod/providers.ex
Normal file
@@ -0,0 +1,27 @@
|
||||
defmodule Berrypod.Providers do
|
||||
@moduledoc """
|
||||
Convenience functions for working with POD providers.
|
||||
"""
|
||||
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
alias Berrypod.Providers.Provider
|
||||
|
||||
@doc """
|
||||
Tests a provider connection.
|
||||
|
||||
Returns `{:ok, info}` with provider-specific info (e.g., shop name, shop_id)
|
||||
or `{:error, reason}` if the connection fails.
|
||||
"""
|
||||
def test_connection(%ProviderConnection{} = conn) do
|
||||
case Provider.for_connection(conn) do
|
||||
{:ok, provider} -> provider.test_connection(conn)
|
||||
{:error, :not_implemented} -> {:error, :provider_not_implemented}
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the provider module for a given type.
|
||||
"""
|
||||
defdelegate for_type(type), to: Provider
|
||||
end
|
||||
680
lib/berrypod/providers/printful.ex
Normal file
680
lib/berrypod/providers/printful.ex
Normal file
@@ -0,0 +1,680 @@
|
||||
defmodule Berrypod.Providers.Printful do
|
||||
@moduledoc """
|
||||
Printful provider implementation.
|
||||
|
||||
Handles product sync, order submission, and shipping rate lookups for Printful.
|
||||
Uses v2 API endpoints where available, v1 for sync products.
|
||||
"""
|
||||
|
||||
@behaviour Berrypod.Providers.Provider
|
||||
|
||||
alias Berrypod.Clients.Printful, as: Client
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
|
||||
require Logger
|
||||
|
||||
@impl true
|
||||
def provider_type, do: "printful"
|
||||
|
||||
# =============================================================================
|
||||
# Connection
|
||||
# =============================================================================
|
||||
|
||||
@impl true
|
||||
def test_connection(%ProviderConnection{} = conn) do
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, nil),
|
||||
{:ok, stores} <- Client.get_stores() do
|
||||
store = List.first(stores)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
store_id: store["id"],
|
||||
store_name: store["name"]
|
||||
}}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Products
|
||||
# =============================================================================
|
||||
|
||||
@impl true
|
||||
def fetch_products(%ProviderConnection{config: config} = conn) do
|
||||
store_id = config["store_id"]
|
||||
|
||||
if is_nil(store_id) do
|
||||
{:error, :no_store_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id),
|
||||
{:ok, products} <- fetch_all_sync_products() do
|
||||
{:ok, products}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_all_sync_products do
|
||||
fetch_sync_products_page(0, [])
|
||||
end
|
||||
|
||||
defp fetch_sync_products_page(offset, acc) do
|
||||
case Client.list_sync_products(offset: offset) do
|
||||
{:ok, products} when is_list(products) ->
|
||||
# Fetch full details for each product (includes variants + files)
|
||||
detailed =
|
||||
products
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {product, index} ->
|
||||
if index > 0, do: Process.sleep(100)
|
||||
|
||||
case Client.get_sync_product(product["id"]) do
|
||||
{:ok, detail} ->
|
||||
sync_variants = detail["sync_variants"] || []
|
||||
catalog_colors = fetch_catalog_colors(sync_variants)
|
||||
normalize_product(detail["sync_product"], sync_variants, catalog_colors)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Failed to fetch Printful product #{product["id"]}: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
nil
|
||||
end
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
all_products = acc ++ detailed
|
||||
|
||||
# Printful paginates at 20 per page by default
|
||||
if length(products) >= 20 do
|
||||
Process.sleep(100)
|
||||
fetch_sync_products_page(offset + 20, all_products)
|
||||
else
|
||||
{:ok, all_products}
|
||||
end
|
||||
|
||||
{:ok, _} ->
|
||||
{:ok, acc}
|
||||
|
||||
{:error, _} = error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
# Fetch catalog color hex codes for a product's catalog type.
|
||||
# Caches per catalog_product_id in the process dictionary to avoid duplicate calls.
|
||||
defp fetch_catalog_colors(sync_variants) do
|
||||
catalog_product_id = extract_catalog_product_id_from_variants(sync_variants)
|
||||
cache_key = {:catalog_colors, catalog_product_id}
|
||||
|
||||
case Process.get(cache_key) do
|
||||
nil ->
|
||||
colors = do_fetch_catalog_colors(catalog_product_id)
|
||||
Process.put(cache_key, colors)
|
||||
colors
|
||||
|
||||
cached ->
|
||||
cached
|
||||
end
|
||||
end
|
||||
|
||||
defp do_fetch_catalog_colors(0), do: %{}
|
||||
|
||||
defp do_fetch_catalog_colors(catalog_product_id) do
|
||||
Process.sleep(100)
|
||||
|
||||
case Client.get_catalog_product(catalog_product_id) do
|
||||
{:ok, catalog} ->
|
||||
(catalog["colors"] || [])
|
||||
|> Map.new(fn c -> {c["name"], c["value"]} end)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Failed to fetch catalog colors for product #{catalog_product_id}: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Orders
|
||||
# =============================================================================
|
||||
|
||||
@impl true
|
||||
def submit_order(%ProviderConnection{config: config} = conn, order) do
|
||||
store_id = config["store_id"]
|
||||
|
||||
if is_nil(store_id) do
|
||||
{:error, :no_store_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id),
|
||||
order_data <- build_order_payload(order),
|
||||
{:ok, response} <- Client.create_order(order_data),
|
||||
order_id <- response["id"],
|
||||
{:ok, _confirmed} <- Client.confirm_order(order_id) do
|
||||
{:ok, %{provider_order_id: to_string(order_id)}}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def get_order_status(%ProviderConnection{config: config} = conn, provider_order_id) do
|
||||
store_id = config["store_id"]
|
||||
|
||||
if is_nil(store_id) do
|
||||
{:error, :no_store_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id),
|
||||
{:ok, response} <- Client.get_order(provider_order_id) do
|
||||
shipments = fetch_shipments(provider_order_id)
|
||||
{:ok, normalize_order_status(response, shipments)}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_shipments(order_id) do
|
||||
case Client.get_order_shipments(order_id) do
|
||||
{:ok, shipments} when is_list(shipments) -> shipments
|
||||
_ -> []
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Shipping Rates
|
||||
# =============================================================================
|
||||
|
||||
@impl true
|
||||
def fetch_shipping_rates(%ProviderConnection{config: config} = conn, products)
|
||||
when is_list(products) do
|
||||
store_id = config["store_id"]
|
||||
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id) do
|
||||
# Build per-product items: one representative variant per product
|
||||
product_items = extract_per_product_items(products)
|
||||
|
||||
Logger.info(
|
||||
"Fetching Printful shipping rates for #{length(product_items)} product(s), " <>
|
||||
"#{length(target_countries())} countries"
|
||||
)
|
||||
|
||||
rates =
|
||||
for {catalog_product_id, variant_id} <- product_items,
|
||||
{country_code, _index} <- Enum.with_index(target_countries()),
|
||||
reduce: [] do
|
||||
acc ->
|
||||
if acc != [], do: Process.sleep(100)
|
||||
acc ++ fetch_rate_for_product(catalog_product_id, variant_id, country_code)
|
||||
end
|
||||
|
||||
{:ok, rates}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
|
||||
# Countries to pre-cache rates for
|
||||
defp target_countries do
|
||||
["GB", "US", "DE", "FR", "CA", "AU", "IE", "NL", "AT", "BE"]
|
||||
end
|
||||
|
||||
defp fetch_rate_for_product(catalog_product_id, variant_id, country_code) do
|
||||
items = [%{source: "catalog", catalog_variant_id: variant_id, quantity: 1}]
|
||||
recipient = build_recipient(country_code)
|
||||
|
||||
case Client.calculate_shipping(recipient, items) do
|
||||
{:ok, rates} when is_list(rates) ->
|
||||
standard = Enum.find(rates, &(&1["shipping"] == "STANDARD")) || List.first(rates)
|
||||
|
||||
if standard do
|
||||
[
|
||||
%{
|
||||
blueprint_id: catalog_product_id,
|
||||
print_provider_id: 0,
|
||||
country_code: country_code,
|
||||
first_item_cost: parse_price(standard["rate"]),
|
||||
additional_item_cost: 0,
|
||||
currency: String.upcase(standard["currency"] || "USD"),
|
||||
handling_time_days: standard["max_delivery_days"]
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Failed to fetch Printful shipping for #{country_code}: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Printful requires state_code for US, CA, and AU
|
||||
@default_state_codes %{"US" => "NY", "CA" => "ON", "AU" => "NSW"}
|
||||
|
||||
defp build_recipient(country_code) do
|
||||
base = %{country_code: country_code}
|
||||
|
||||
case @default_state_codes[country_code] do
|
||||
nil -> base
|
||||
state -> Map.put(base, :state_code, state)
|
||||
end
|
||||
end
|
||||
|
||||
# Returns {catalog_product_id, first_catalog_variant_id} per product
|
||||
defp extract_per_product_items(products) do
|
||||
products
|
||||
|> Enum.flat_map(fn product ->
|
||||
provider_data = product[:provider_data] || %{}
|
||||
|
||||
catalog_product_id =
|
||||
provider_data[:catalog_product_id] || provider_data["catalog_product_id"]
|
||||
|
||||
catalog_variant_ids =
|
||||
provider_data[:catalog_variant_ids] || provider_data["catalog_variant_ids"] || []
|
||||
|
||||
case {catalog_product_id, catalog_variant_ids} do
|
||||
{nil, _} -> []
|
||||
{_, []} -> []
|
||||
{cpid, [vid | _]} -> [{cpid, vid}]
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Option Types (for frontend display)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Extracts option types from Printful provider_data for frontend display.
|
||||
|
||||
Builds option types from the stored options list, which contains
|
||||
distinct colour and size values with optional hex codes.
|
||||
"""
|
||||
def extract_option_types(%{"options" => options}) when is_list(options) do
|
||||
Enum.map(options, fn opt ->
|
||||
%{
|
||||
name: opt["name"],
|
||||
type: option_type_atom(opt["type"]),
|
||||
values:
|
||||
Enum.map(opt["values"] || [], fn val ->
|
||||
base = %{title: val["title"]}
|
||||
if val["hex"], do: Map.put(base, :hex, val["hex"]), else: base
|
||||
end)
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
def extract_option_types(_), do: []
|
||||
|
||||
defp option_type_atom("color"), do: :color
|
||||
defp option_type_atom("size"), do: :size
|
||||
defp option_type_atom(_), do: :other
|
||||
|
||||
# =============================================================================
|
||||
# Data Normalization
|
||||
# =============================================================================
|
||||
|
||||
defp normalize_product(sync_product, sync_variants, catalog_colors) do
|
||||
images = extract_preview_images(sync_variants)
|
||||
catalog_product_id = extract_catalog_product_id_from_variants(sync_variants)
|
||||
catalog_variant_ids = Enum.map(sync_variants, & &1["variant_id"]) |> Enum.reject(&is_nil/1)
|
||||
|
||||
color_variant_map =
|
||||
sync_variants
|
||||
|> Enum.reject(fn sv -> is_nil(sv["color"]) end)
|
||||
|> Enum.uniq_by(fn sv -> sv["color"] end)
|
||||
|> Map.new(fn sv -> {normalize_text(sv["color"]), sv["variant_id"]} end)
|
||||
|
||||
%{
|
||||
provider_product_id: to_string(sync_product["id"]),
|
||||
title: sync_product["name"],
|
||||
description: "",
|
||||
category: extract_category(sync_variants),
|
||||
images: images,
|
||||
variants: Enum.map(sync_variants, &normalize_variant/1),
|
||||
provider_data: %{
|
||||
catalog_product_id: catalog_product_id,
|
||||
catalog_variant_ids: catalog_variant_ids,
|
||||
color_variant_map: color_variant_map,
|
||||
# Shipping calc uses these generic keys (shared with Printify)
|
||||
blueprint_id: catalog_product_id,
|
||||
print_provider_id: 0,
|
||||
thumbnail_url: sync_product["thumbnail_url"],
|
||||
artwork_url: extract_artwork_url(sync_variants),
|
||||
options: build_option_types(sync_variants, catalog_colors),
|
||||
raw: %{sync_product: sync_product}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp normalize_variant(sv) do
|
||||
%{
|
||||
provider_variant_id: to_string(sv["id"]),
|
||||
title: build_variant_title(sv),
|
||||
sku: sv["sku"],
|
||||
price: parse_price(sv["retail_price"]),
|
||||
cost: nil,
|
||||
options: build_variant_options(sv),
|
||||
is_enabled: sv["synced"] == true,
|
||||
is_available: sv["availability_status"] == "active"
|
||||
}
|
||||
end
|
||||
|
||||
defp build_variant_title(sv) do
|
||||
parts = [sv["color"], sv["size"]] |> Enum.reject(&is_nil/1) |> Enum.map(&normalize_text/1)
|
||||
Enum.join(parts, " / ")
|
||||
end
|
||||
|
||||
defp build_variant_options(sv) do
|
||||
opts = %{}
|
||||
opts = if sv["color"], do: Map.put(opts, "Color", normalize_text(sv["color"])), else: opts
|
||||
opts = if sv["size"], do: Map.put(opts, "Size", normalize_text(sv["size"])), else: opts
|
||||
opts
|
||||
end
|
||||
|
||||
# Extract unique preview images from sync variants (one per unique image URL)
|
||||
defp extract_preview_images(sync_variants) do
|
||||
sync_variants
|
||||
|> Enum.flat_map(fn sv ->
|
||||
(sv["files"] || [])
|
||||
|> Enum.filter(&(&1["type"] == "preview"))
|
||||
|> Enum.map(fn file ->
|
||||
%{
|
||||
src: file["preview_url"] || file["thumbnail_url"],
|
||||
color: sv["color"],
|
||||
name: sv["name"]
|
||||
}
|
||||
end)
|
||||
end)
|
||||
|> Enum.uniq_by(& &1.src)
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {img, index} ->
|
||||
alt = if img.color not in [nil, ""], do: img.color, else: img.name
|
||||
color = if img.color not in [nil, ""], do: normalize_text(img.color), else: nil
|
||||
%{src: img.src, position: index, alt: alt, color: color}
|
||||
end)
|
||||
end
|
||||
|
||||
# Find the artwork (design file) URL from the first variant's "default" file
|
||||
defp extract_artwork_url(sync_variants) do
|
||||
sync_variants
|
||||
|> Enum.find_value(fn sv ->
|
||||
(sv["files"] || [])
|
||||
|> Enum.find_value(fn
|
||||
%{"type" => "default", "url" => url} when is_binary(url) -> url
|
||||
_ -> nil
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
defp extract_catalog_product_id_from_variants(sync_variants) do
|
||||
sync_variants
|
||||
|> Enum.find_value(fn sv ->
|
||||
get_in(sv, ["product", "product_id"])
|
||||
end) || 0
|
||||
end
|
||||
|
||||
# Build option types from variants for frontend display.
|
||||
# catalog_colors is a %{"Black" => "#0b0b0b", ...} map from the catalog API.
|
||||
defp build_option_types(sync_variants, catalog_colors) do
|
||||
colors =
|
||||
sync_variants
|
||||
|> Enum.reject(fn sv -> is_nil(sv["color"]) end)
|
||||
|> Enum.uniq_by(fn sv -> sv["color"] end)
|
||||
|> Enum.map(fn sv ->
|
||||
title = normalize_text(sv["color"])
|
||||
base = %{"title" => title}
|
||||
hex = Map.get(catalog_colors, sv["color"])
|
||||
if hex, do: Map.put(base, "colors", [hex]), else: base
|
||||
end)
|
||||
|
||||
sizes =
|
||||
sync_variants
|
||||
|> Enum.map(& &1["size"])
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|> Enum.map(&normalize_text/1)
|
||||
|> Enum.uniq()
|
||||
|> Enum.map(fn size -> %{"title" => size} end)
|
||||
|
||||
opts = []
|
||||
|
||||
opts =
|
||||
if colors != [],
|
||||
do: opts ++ [%{"name" => "Color", "type" => "color", "values" => colors}],
|
||||
else: opts
|
||||
|
||||
opts =
|
||||
if sizes != [],
|
||||
do: opts ++ [%{"name" => "Size", "type" => "size", "values" => sizes}],
|
||||
else: opts
|
||||
|
||||
opts
|
||||
end
|
||||
|
||||
defp extract_category(sync_variants) do
|
||||
case sync_variants do
|
||||
[sv | _] ->
|
||||
product_name = get_in(sv, ["product", "name"]) || ""
|
||||
categorize_from_name(product_name)
|
||||
|
||||
[] ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp categorize_from_name(name) do
|
||||
name_lower = String.downcase(name)
|
||||
|
||||
cond do
|
||||
has_keyword?(name_lower, ~w[t-shirt tshirt shirt hoodie sweatshirt jogger]) -> "Apparel"
|
||||
has_keyword?(name_lower, ~w[bag tote hat cap sleeve phone case]) -> "Accessories"
|
||||
has_keyword?(name_lower, ~w[mug cup blanket pillow cushion throw]) -> "Homewares"
|
||||
has_keyword?(name_lower, ~w[canvas poster print frame]) -> "Canvas Prints"
|
||||
has_keyword?(name_lower, ~w[notebook journal]) -> "Stationery"
|
||||
true -> "Apparel"
|
||||
end
|
||||
end
|
||||
|
||||
defp has_keyword?(text, keywords) do
|
||||
Enum.any?(keywords, &String.contains?(text, &1))
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Order Normalization
|
||||
# =============================================================================
|
||||
|
||||
defp normalize_order_status(raw, shipments) do
|
||||
%{
|
||||
status: map_order_status(raw["status"]),
|
||||
provider_status: raw["status"],
|
||||
tracking_number: extract_tracking(shipments),
|
||||
tracking_url: extract_tracking_url(shipments),
|
||||
shipments: shipments
|
||||
}
|
||||
end
|
||||
|
||||
defp map_order_status("draft"), do: "submitted"
|
||||
defp map_order_status("pending"), do: "submitted"
|
||||
defp map_order_status("inprocess"), do: "processing"
|
||||
defp map_order_status("fulfilled"), do: "shipped"
|
||||
defp map_order_status("shipped"), do: "shipped"
|
||||
defp map_order_status("delivered"), do: "delivered"
|
||||
defp map_order_status("canceled"), do: "cancelled"
|
||||
defp map_order_status("failed"), do: "submitted"
|
||||
defp map_order_status("onhold"), do: "submitted"
|
||||
defp map_order_status(_), do: "submitted"
|
||||
|
||||
defp extract_tracking([shipment | _]) do
|
||||
shipment["tracking_number"] || get_in(shipment, ["tracking", "number"])
|
||||
end
|
||||
|
||||
defp extract_tracking(_), do: nil
|
||||
|
||||
defp extract_tracking_url([shipment | _]) do
|
||||
shipment["tracking_url"] || get_in(shipment, ["tracking", "url"])
|
||||
end
|
||||
|
||||
defp extract_tracking_url(_), do: nil
|
||||
|
||||
# =============================================================================
|
||||
# Order Building
|
||||
# =============================================================================
|
||||
|
||||
defp build_order_payload(order_data) do
|
||||
%{
|
||||
external_id: order_data.order_number,
|
||||
shipping: "STANDARD",
|
||||
recipient: build_recipient(order_data.shipping_address, order_data.customer_email),
|
||||
items:
|
||||
Enum.map(order_data.line_items, fn item ->
|
||||
%{
|
||||
sync_variant_id: parse_int(item.provider_variant_id),
|
||||
quantity: item.quantity
|
||||
}
|
||||
end)
|
||||
}
|
||||
end
|
||||
|
||||
defp build_recipient(address, email) when is_map(address) do
|
||||
%{
|
||||
name: address["name"] || "",
|
||||
address1: address["line1"] || address["address1"] || "",
|
||||
address2: address["line2"] || address["address2"] || "",
|
||||
city: address["city"] || "",
|
||||
country_code: address["country"] || "",
|
||||
state_code: address["state"] || address["region"] || "",
|
||||
zip: address["postal_code"] || address["zip"] || "",
|
||||
email: email
|
||||
}
|
||||
end
|
||||
|
||||
defp build_recipient(_address, email) do
|
||||
%{
|
||||
name: "",
|
||||
address1: "",
|
||||
address2: "",
|
||||
city: "",
|
||||
country_code: "",
|
||||
state_code: "",
|
||||
zip: "",
|
||||
email: email
|
||||
}
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Webhooks
|
||||
# =============================================================================
|
||||
|
||||
@webhook_events [
|
||||
"package_shipped",
|
||||
"package_returned",
|
||||
"order_failed",
|
||||
"order_canceled",
|
||||
"product_synced",
|
||||
"product_updated",
|
||||
"product_deleted"
|
||||
]
|
||||
|
||||
@doc """
|
||||
Registers webhooks with Printful for this store.
|
||||
|
||||
The webhook URL should include a token query param for verification,
|
||||
e.g. `https://example.com/webhooks/printful?token=SECRET`.
|
||||
"""
|
||||
def register_webhooks(%ProviderConnection{config: config} = conn, webhook_url) do
|
||||
store_id = config["store_id"]
|
||||
secret = config["webhook_secret"]
|
||||
|
||||
cond do
|
||||
is_nil(store_id) ->
|
||||
{:error, :no_store_id}
|
||||
|
||||
is_nil(secret) or secret == "" ->
|
||||
{:error, :no_webhook_secret}
|
||||
|
||||
true ->
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id) do
|
||||
url_with_token = append_token(webhook_url, secret)
|
||||
Client.setup_webhooks(url_with_token, @webhook_events)
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists currently registered webhooks for this store.
|
||||
"""
|
||||
def list_webhooks(%ProviderConnection{config: config} = conn) do
|
||||
store_id = config["store_id"]
|
||||
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_credentials(api_key, store_id) do
|
||||
Client.get_webhooks()
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
|
||||
defp append_token(url, token) do
|
||||
uri = URI.parse(url)
|
||||
params = URI.decode_query(uri.query || "")
|
||||
query = URI.encode_query(Map.put(params, "token", token))
|
||||
URI.to_string(%{uri | query: query})
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Helpers
|
||||
# =============================================================================
|
||||
|
||||
# Parse a price string like "13.50" into integer pence (1350)
|
||||
defp parse_price(price) when is_binary(price) do
|
||||
case Float.parse(price) do
|
||||
{float, _} -> round(float * 100)
|
||||
:error -> 0
|
||||
end
|
||||
end
|
||||
|
||||
defp parse_price(price) when is_number(price), do: round(price * 100)
|
||||
defp parse_price(_), do: 0
|
||||
|
||||
defp parse_int(value) when is_integer(value), do: value
|
||||
defp parse_int(value) when is_binary(value), do: String.to_integer(value)
|
||||
|
||||
# Printful uses Unicode double prime (″) and multiplication sign (×) in size
|
||||
# labels. These break LiveView's phx-value-* attribute serialization, so we
|
||||
# strip inch marks and normalise the multiplication sign.
|
||||
defp normalize_text(text) when is_binary(text) do
|
||||
text
|
||||
|> String.replace("″", "")
|
||||
|> String.replace("×", "x")
|
||||
end
|
||||
|
||||
defp normalize_text(text), do: text
|
||||
|
||||
defp set_credentials(api_key, store_id) do
|
||||
Process.put(:printful_api_key, api_key)
|
||||
if store_id, do: Process.put(:printful_store_id, store_id)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
638
lib/berrypod/providers/printify.ex
Normal file
638
lib/berrypod/providers/printify.ex
Normal file
@@ -0,0 +1,638 @@
|
||||
defmodule Berrypod.Providers.Printify do
|
||||
@moduledoc """
|
||||
Printify provider implementation.
|
||||
|
||||
Handles product sync and order submission for Printify.
|
||||
"""
|
||||
|
||||
@behaviour Berrypod.Providers.Provider
|
||||
|
||||
alias Berrypod.Clients.Printify, as: Client
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
|
||||
require Logger
|
||||
|
||||
@impl true
|
||||
def provider_type, do: "printify"
|
||||
|
||||
@impl true
|
||||
def test_connection(%ProviderConnection{} = conn) do
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key),
|
||||
{:ok, shops} <- Client.get_shops() do
|
||||
shop = List.first(shops)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
shop_id: shop["id"],
|
||||
shop_name: shop["title"],
|
||||
shop_count: length(shops)
|
||||
}}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def fetch_products(%ProviderConnection{config: config} = conn) do
|
||||
shop_id = config["shop_id"]
|
||||
|
||||
if is_nil(shop_id) do
|
||||
{:error, :no_shop_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key),
|
||||
{:ok, products} <- fetch_all_products(shop_id) do
|
||||
{:ok, products}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Fetches all products by paginating through the API
|
||||
defp fetch_all_products(shop_id) do
|
||||
fetch_products_page(shop_id, 1, [])
|
||||
end
|
||||
|
||||
defp fetch_products_page(shop_id, page, acc) do
|
||||
case Client.list_products(shop_id, page: page) do
|
||||
{:ok, response} ->
|
||||
products = Enum.map(response["data"] || [], &normalize_product/1)
|
||||
all_products = acc ++ products
|
||||
|
||||
current_page = response["current_page"] || page
|
||||
last_page = response["last_page"] || 1
|
||||
|
||||
if current_page < last_page do
|
||||
# Small delay to be nice to rate limits (600/min = 10/sec)
|
||||
Process.sleep(100)
|
||||
fetch_products_page(shop_id, page + 1, all_products)
|
||||
else
|
||||
{:ok, all_products}
|
||||
end
|
||||
|
||||
{:error, _} = error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def submit_order(%ProviderConnection{config: config} = conn, order) do
|
||||
shop_id = config["shop_id"]
|
||||
|
||||
if is_nil(shop_id) do
|
||||
{:error, :no_shop_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key),
|
||||
order_data <- build_order_payload(order),
|
||||
{:ok, response} <- Client.create_order(shop_id, order_data) do
|
||||
{:ok, %{provider_order_id: response["id"]}}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def get_order_status(%ProviderConnection{config: config} = conn, provider_order_id) do
|
||||
shop_id = config["shop_id"]
|
||||
|
||||
if is_nil(shop_id) do
|
||||
{:error, :no_shop_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key),
|
||||
{:ok, response} <- Client.get_order(shop_id, provider_order_id) do
|
||||
{:ok, normalize_order_status(response)}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Shipping Rates
|
||||
# =============================================================================
|
||||
|
||||
@impl true
|
||||
def fetch_shipping_rates(%ProviderConnection{} = conn, products) when is_list(products) do
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key) do
|
||||
pairs = extract_blueprint_provider_pairs(products)
|
||||
Logger.info("Fetching shipping rates for #{length(pairs)} blueprint/provider pairs")
|
||||
|
||||
rates =
|
||||
pairs
|
||||
|> Enum.with_index()
|
||||
|> Enum.flat_map(fn {pair, index} ->
|
||||
# Rate limit: 100ms between requests
|
||||
if index > 0, do: Process.sleep(100)
|
||||
fetch_shipping_for_pair(pair)
|
||||
end)
|
||||
|
||||
{:ok, rates}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_blueprint_provider_pairs(products) do
|
||||
products
|
||||
|> Enum.flat_map(fn product ->
|
||||
provider_data = product[:provider_data] || %{}
|
||||
blueprint_id = provider_data[:blueprint_id] || provider_data["blueprint_id"]
|
||||
print_provider_id = provider_data[:print_provider_id] || provider_data["print_provider_id"]
|
||||
|
||||
if blueprint_id && print_provider_id do
|
||||
[{blueprint_id, print_provider_id}]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end)
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
defp fetch_shipping_for_pair({blueprint_id, print_provider_id}) do
|
||||
case Client.get_shipping(blueprint_id, print_provider_id) do
|
||||
{:ok, response} ->
|
||||
normalize_shipping_response(blueprint_id, print_provider_id, response)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Failed to fetch shipping for blueprint #{blueprint_id}, " <>
|
||||
"provider #{print_provider_id}: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_shipping_response(blueprint_id, print_provider_id, response) do
|
||||
handling_time_days =
|
||||
case response["handling_time"] do
|
||||
%{"value" => value, "unit" => "day"} -> value
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
profiles = response["profiles"] || []
|
||||
|
||||
# For each profile, expand countries into individual rate maps.
|
||||
# Then group by country and take the max first_item_cost across profiles
|
||||
# (conservative estimate across variant groups).
|
||||
profiles
|
||||
|> Enum.flat_map(fn profile ->
|
||||
countries = profile["countries"] || []
|
||||
first_cost = get_in(profile, ["first_item", "cost"]) || 0
|
||||
additional_cost = get_in(profile, ["additional_items", "cost"]) || 0
|
||||
currency = get_in(profile, ["first_item", "currency"]) || "USD"
|
||||
|
||||
Enum.map(countries, fn country ->
|
||||
%{
|
||||
blueprint_id: blueprint_id,
|
||||
print_provider_id: print_provider_id,
|
||||
country_code: country,
|
||||
first_item_cost: first_cost,
|
||||
additional_item_cost: additional_cost,
|
||||
currency: currency,
|
||||
handling_time_days: handling_time_days
|
||||
}
|
||||
end)
|
||||
end)
|
||||
|> Enum.group_by(& &1.country_code)
|
||||
|> Enum.map(fn {_country, country_rates} ->
|
||||
# Take the max first_item_cost across variant groups for this country
|
||||
Enum.max_by(country_rates, & &1.first_item_cost)
|
||||
end)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Webhook Registration
|
||||
# =============================================================================
|
||||
|
||||
@webhook_events [
|
||||
"product:updated",
|
||||
"product:deleted",
|
||||
"product:publish:started",
|
||||
"order:sent-to-production",
|
||||
"order:shipment:created",
|
||||
"order:shipment:delivered"
|
||||
]
|
||||
|
||||
@doc """
|
||||
Registers webhooks for product events with Printify.
|
||||
|
||||
Returns {:ok, results} or {:error, reason}.
|
||||
"""
|
||||
def register_webhooks(%ProviderConnection{config: config} = conn, webhook_url) do
|
||||
shop_id = config["shop_id"]
|
||||
secret = config["webhook_secret"]
|
||||
|
||||
cond do
|
||||
is_nil(shop_id) ->
|
||||
{:error, :no_shop_id}
|
||||
|
||||
is_nil(secret) or secret == "" ->
|
||||
{:error, :no_webhook_secret}
|
||||
|
||||
true ->
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key) do
|
||||
{:ok, create_all_webhooks(shop_id, webhook_url, secret)}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp create_all_webhooks(shop_id, webhook_url, secret) do
|
||||
Enum.map(@webhook_events, fn event ->
|
||||
case Client.create_webhook(shop_id, webhook_url, event, secret) do
|
||||
{:ok, response} -> {:ok, event, response}
|
||||
{:error, reason} -> {:error, event, reason}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists registered webhooks for the shop.
|
||||
"""
|
||||
def list_webhooks(%ProviderConnection{config: config} = conn) do
|
||||
shop_id = config["shop_id"]
|
||||
|
||||
if is_nil(shop_id) do
|
||||
{:error, :no_shop_id}
|
||||
else
|
||||
with api_key when is_binary(api_key) <- ProviderConnection.get_api_key(conn),
|
||||
:ok <- set_api_key(api_key),
|
||||
{:ok, webhooks} <- Client.list_webhooks(shop_id) do
|
||||
{:ok, webhooks}
|
||||
else
|
||||
nil -> {:error, :no_api_key}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Option Types Extraction (for frontend)
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Extracts option types from Printify provider_data for frontend display.
|
||||
|
||||
Returns a list of option type maps with normalized names, types, and values
|
||||
including hex color codes for color-type options.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> extract_option_types(%{"options" => [
|
||||
...> %{"name" => "Colors", "type" => "color", "values" => [
|
||||
...> %{"id" => 1, "title" => "Black", "colors" => ["#000000"]}
|
||||
...> ]}
|
||||
...> ]})
|
||||
[%{name: "Color", type: :color, values: [%{id: 1, title: "Black", hex: "#000000"}]}]
|
||||
"""
|
||||
def extract_option_types(%{"options" => options}) when is_list(options) do
|
||||
Enum.map(options, fn opt ->
|
||||
%{
|
||||
name: singularize_option_name(opt["name"]),
|
||||
type: option_type_atom(opt["type"]),
|
||||
values: extract_option_values(opt)
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
def extract_option_types(_), do: []
|
||||
|
||||
defp option_type_atom("color"), do: :color
|
||||
defp option_type_atom("size"), do: :size
|
||||
defp option_type_atom(_), do: :other
|
||||
|
||||
defp extract_option_values(%{"values" => values, "type" => "color"}) do
|
||||
Enum.map(values, fn val ->
|
||||
%{
|
||||
id: val["id"],
|
||||
title: val["title"],
|
||||
hex: List.first(val["colors"])
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
defp extract_option_values(%{"values" => values}) do
|
||||
Enum.map(values, fn val ->
|
||||
%{id: val["id"], title: val["title"]}
|
||||
end)
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Data Normalization
|
||||
# =============================================================================
|
||||
|
||||
defp normalize_product(raw) do
|
||||
options = raw["options"] || []
|
||||
raw_variants = raw["variants"] || []
|
||||
color_lookup = build_image_color_lookup(raw_variants, options)
|
||||
filtered_options = filter_options_by_variants(options, raw_variants)
|
||||
|
||||
%{
|
||||
provider_product_id: to_string(raw["id"]),
|
||||
title: raw["title"],
|
||||
description: raw["description"],
|
||||
category: extract_category(raw),
|
||||
images: normalize_images(raw["images"] || [], color_lookup),
|
||||
variants: normalize_variants(raw_variants, options),
|
||||
provider_data: %{
|
||||
blueprint_id: raw["blueprint_id"],
|
||||
print_provider_id: raw["print_provider_id"],
|
||||
tags: raw["tags"] || [],
|
||||
options: filtered_options,
|
||||
raw: raw
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp normalize_images(images, color_lookup) do
|
||||
images
|
||||
|> Enum.map(fn img ->
|
||||
color = resolve_image_color(img["variant_ids"] || [], color_lookup)
|
||||
%{src: img["src"], alt: img["position"], color: color, raw_position: img["position"]}
|
||||
end)
|
||||
|> select_images_per_color()
|
||||
|> Enum.with_index()
|
||||
|> Enum.map(fn {img, index} ->
|
||||
%{src: img.src, position: index, alt: img.alt, color: img.color}
|
||||
end)
|
||||
end
|
||||
|
||||
# Hero colour (first seen) keeps all images.
|
||||
# Other colours keep only front + back views to avoid bloating the DB.
|
||||
defp select_images_per_color(images) do
|
||||
hero_color =
|
||||
images
|
||||
|> Enum.find_value(fn img -> img.color end)
|
||||
|
||||
Enum.filter(images, fn img ->
|
||||
img.color == hero_color || is_nil(img.color) ||
|
||||
img.raw_position in ["front", "back"]
|
||||
end)
|
||||
end
|
||||
|
||||
# Filter blueprint options to only include values present in enabled variants,
|
||||
# ordered to match variant ordering (so the default variant's colour comes first).
|
||||
defp filter_options_by_variants(options, raw_variants) do
|
||||
enabled_order = enabled_option_value_order(options, raw_variants)
|
||||
|
||||
Enum.map(options, fn opt ->
|
||||
name = opt["name"]
|
||||
|
||||
case Map.get(enabled_order, name) do
|
||||
nil ->
|
||||
opt
|
||||
|
||||
ordered_values ->
|
||||
values_by_title = Map.new(opt["values"] || [], fn v -> {v["title"], v} end)
|
||||
|
||||
sorted =
|
||||
ordered_values
|
||||
|> Enum.map(&Map.get(values_by_title, &1))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
Map.put(opt, "values", sorted)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# Returns %{"Colors" => ["Dark Heather", "Navy", ...], "Sizes" => ["S", "2XL"]}
|
||||
# preserving first-seen order from enabled variants.
|
||||
defp enabled_option_value_order(options, raw_variants) do
|
||||
option_names = Enum.map(options, & &1["name"])
|
||||
|
||||
raw_variants
|
||||
|> Enum.filter(& &1["is_enabled"])
|
||||
|> Enum.reduce(%{}, fn var, acc ->
|
||||
parts = String.split(var["title"] || "", " / ")
|
||||
|
||||
option_names
|
||||
|> Enum.with_index()
|
||||
|> Enum.reduce(acc, fn {name, idx}, inner_acc ->
|
||||
case Enum.at(parts, idx) do
|
||||
nil ->
|
||||
inner_acc
|
||||
|
||||
val ->
|
||||
existing = Map.get(inner_acc, name, [])
|
||||
|
||||
if val in existing,
|
||||
do: inner_acc,
|
||||
else: Map.put(inner_acc, name, existing ++ [val])
|
||||
end
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
defp build_image_color_lookup(raw_variants, options) do
|
||||
option_names = Enum.map(options, & &1["name"])
|
||||
color_index = Enum.find_index(option_names, &(&1 in ["Colors", "Color"]))
|
||||
|
||||
if color_index do
|
||||
Map.new(raw_variants, fn var ->
|
||||
parts = String.split(var["title"] || "", " / ")
|
||||
color = Enum.at(parts, color_index)
|
||||
{var["id"], color}
|
||||
end)
|
||||
else
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
defp resolve_image_color(variant_ids, color_lookup) do
|
||||
Enum.find_value(variant_ids, fn vid -> Map.get(color_lookup, vid) end)
|
||||
end
|
||||
|
||||
defp normalize_variants(variants, options) do
|
||||
option_names = extract_option_names(options)
|
||||
|
||||
Enum.map(variants, fn var ->
|
||||
%{
|
||||
provider_variant_id: to_string(var["id"]),
|
||||
title: var["title"],
|
||||
sku: var["sku"],
|
||||
price: var["price"],
|
||||
cost: var["cost"],
|
||||
options: normalize_variant_options(var, option_names),
|
||||
is_enabled: var["is_enabled"] == true,
|
||||
is_available: var["is_available"] == true
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
# Extract option names from product options, singularizing common plurals
|
||||
defp extract_option_names(options) do
|
||||
Enum.map(options, fn opt ->
|
||||
singularize_option_name(opt["name"])
|
||||
end)
|
||||
end
|
||||
|
||||
defp singularize_option_name("Colors"), do: "Color"
|
||||
defp singularize_option_name("Sizes"), do: "Size"
|
||||
defp singularize_option_name(name), do: name
|
||||
|
||||
defp normalize_variant_options(variant, option_names) do
|
||||
# Build human-readable map from variant title
|
||||
# Title format matches product options order: "Navy / S" for [Colors, Sizes]
|
||||
title = variant["title"] || ""
|
||||
parts = String.split(title, " / ")
|
||||
|
||||
parts
|
||||
|> Enum.with_index()
|
||||
|> Enum.reduce(%{}, fn {value, index}, acc ->
|
||||
key = Enum.at(option_names, index) || "Option #{index + 1}"
|
||||
Map.put(acc, key, value)
|
||||
end)
|
||||
end
|
||||
|
||||
defp extract_category(raw) do
|
||||
# Try to extract category from tags (case-insensitive)
|
||||
tags =
|
||||
(raw["tags"] || [])
|
||||
|> Enum.map(&String.downcase/1)
|
||||
|
||||
cond do
|
||||
has_tag?(tags, ~w[t-shirt tshirt shirt hoodie sweatshirt apparel clothing]) -> "Apparel"
|
||||
has_tag?(tags, ~w[canvas]) -> "Canvas Prints"
|
||||
has_tag?(tags, ~w[mug cup blanket pillow cushion homeware homewares home]) -> "Homewares"
|
||||
has_tag?(tags, ~w[notebook journal stationery]) -> "Stationery"
|
||||
has_tag?(tags, ~w[phone case bag tote accessories]) -> "Accessories"
|
||||
has_tag?(tags, ~w[art print poster wall]) -> "Art Prints"
|
||||
true -> List.first(raw["tags"])
|
||||
end
|
||||
end
|
||||
|
||||
defp has_tag?(tags, keywords) do
|
||||
Enum.any?(tags, fn tag ->
|
||||
Enum.any?(keywords, fn keyword -> String.contains?(tag, keyword) end)
|
||||
end)
|
||||
end
|
||||
|
||||
defp normalize_order_status(raw) do
|
||||
%{
|
||||
status: map_order_status(raw["status"]),
|
||||
provider_status: raw["status"],
|
||||
tracking_number: extract_tracking(raw),
|
||||
tracking_url: extract_tracking_url(raw),
|
||||
shipments: raw["shipments"] || []
|
||||
}
|
||||
end
|
||||
|
||||
defp map_order_status("pending"), do: "submitted"
|
||||
defp map_order_status("on-hold"), do: "submitted"
|
||||
defp map_order_status("payment-not-received"), do: "submitted"
|
||||
defp map_order_status("cost-calculation"), do: "submitted"
|
||||
defp map_order_status("in-production"), do: "processing"
|
||||
defp map_order_status("partially-shipped"), do: "processing"
|
||||
defp map_order_status("shipped"), do: "shipped"
|
||||
defp map_order_status("delivered"), do: "delivered"
|
||||
defp map_order_status("canceled"), do: "cancelled"
|
||||
defp map_order_status(_), do: "submitted"
|
||||
|
||||
defp extract_tracking(raw) do
|
||||
case raw["shipments"] do
|
||||
[shipment | _] -> shipment["tracking_number"]
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_tracking_url(raw) do
|
||||
case raw["shipments"] do
|
||||
[shipment | _] -> shipment["tracking_url"]
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Order Building
|
||||
# =============================================================================
|
||||
|
||||
defp build_order_payload(order_data) do
|
||||
%{
|
||||
external_id: order_data.order_number,
|
||||
label: order_data.order_number,
|
||||
line_items:
|
||||
Enum.map(order_data.line_items, fn item ->
|
||||
%{
|
||||
product_id: item.provider_product_id,
|
||||
variant_id: parse_variant_id(item.provider_variant_id),
|
||||
quantity: item.quantity
|
||||
}
|
||||
end),
|
||||
shipping_method: 1,
|
||||
address_to: build_address(order_data.shipping_address, order_data.customer_email)
|
||||
}
|
||||
end
|
||||
|
||||
# Maps Stripe shipping_details address fields to Printify's expected format.
|
||||
# Stripe gives us: name, line1, line2, city, postal_code, state, country
|
||||
# Printify wants: first_name, last_name, address1, address2, city, zip, region, country
|
||||
defp build_address(address, email) when is_map(address) do
|
||||
{first, last} = split_name(address["name"])
|
||||
|
||||
%{
|
||||
first_name: first,
|
||||
last_name: last,
|
||||
email: email,
|
||||
phone: address["phone"] || "",
|
||||
country: address["country"] || "",
|
||||
region: address["state"] || address["region"] || "",
|
||||
address1: address["line1"] || address["address1"] || "",
|
||||
address2: address["line2"] || address["address2"] || "",
|
||||
city: address["city"] || "",
|
||||
zip: address["postal_code"] || address["zip"] || ""
|
||||
}
|
||||
end
|
||||
|
||||
defp build_address(_address, email) do
|
||||
%{
|
||||
first_name: "",
|
||||
last_name: "",
|
||||
email: email,
|
||||
phone: "",
|
||||
country: "",
|
||||
region: "",
|
||||
address1: "",
|
||||
address2: "",
|
||||
city: "",
|
||||
zip: ""
|
||||
}
|
||||
end
|
||||
|
||||
defp split_name(nil), do: {"", ""}
|
||||
defp split_name(""), do: {"", ""}
|
||||
|
||||
defp split_name(name) do
|
||||
case String.split(name, " ", parts: 2) do
|
||||
[first] -> {first, ""}
|
||||
[first, last] -> {first, last}
|
||||
end
|
||||
end
|
||||
|
||||
# Printify variant IDs are integers, but we store them as strings
|
||||
defp parse_variant_id(id) when is_integer(id), do: id
|
||||
defp parse_variant_id(id) when is_binary(id), do: String.to_integer(id)
|
||||
|
||||
# =============================================================================
|
||||
# API Key Management
|
||||
# =============================================================================
|
||||
|
||||
# Temporarily sets the API key for the request
|
||||
# In a production system, this would use a connection pool or request context
|
||||
defp set_api_key(api_key) do
|
||||
Process.put(:printify_api_key, api_key)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
106
lib/berrypod/providers/provider.ex
Normal file
106
lib/berrypod/providers/provider.ex
Normal file
@@ -0,0 +1,106 @@
|
||||
defmodule Berrypod.Providers.Provider do
|
||||
@moduledoc """
|
||||
Behaviour for POD provider integrations.
|
||||
|
||||
Each provider (Printify, Gelato, Prodigi, etc.) implements this behaviour
|
||||
to provide a consistent interface for:
|
||||
|
||||
- Testing connections
|
||||
- Fetching products
|
||||
- Submitting orders
|
||||
- Tracking order status
|
||||
|
||||
## Data Normalization
|
||||
|
||||
Providers return normalized data structures:
|
||||
|
||||
- Products are maps with keys: `title`, `description`, `provider_product_id`,
|
||||
`images`, `variants`, `category`, `provider_data`
|
||||
- Variants are maps with keys: `provider_variant_id`, `title`, `sku`, `price`,
|
||||
`cost`, `options`, `is_enabled`, `is_available`
|
||||
- Images are maps with keys: `src`, `position`, `alt`
|
||||
"""
|
||||
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
|
||||
@doc """
|
||||
Returns the provider type identifier (e.g., "printify", "gelato").
|
||||
"""
|
||||
@callback provider_type() :: String.t()
|
||||
|
||||
@doc """
|
||||
Tests the connection to the provider.
|
||||
|
||||
Returns `{:ok, info}` with provider-specific info (e.g., shop name)
|
||||
or `{:error, reason}` if the connection fails.
|
||||
"""
|
||||
@callback test_connection(ProviderConnection.t()) :: {:ok, map()} | {:error, term()}
|
||||
|
||||
@doc """
|
||||
Fetches all products from the provider.
|
||||
|
||||
Returns a list of normalized product maps.
|
||||
"""
|
||||
@callback fetch_products(ProviderConnection.t()) :: {:ok, [map()]} | {:error, term()}
|
||||
|
||||
@doc """
|
||||
Submits an order to the provider for fulfillment.
|
||||
|
||||
Returns `{:ok, %{provider_order_id: String.t()}}` on success.
|
||||
"""
|
||||
@callback submit_order(ProviderConnection.t(), order :: map()) ::
|
||||
{:ok, %{provider_order_id: String.t()}} | {:error, term()}
|
||||
|
||||
@doc """
|
||||
Gets the current status of an order from the provider.
|
||||
"""
|
||||
@callback get_order_status(ProviderConnection.t(), provider_order_id :: String.t()) ::
|
||||
{:ok, map()} | {:error, term()}
|
||||
|
||||
@doc """
|
||||
Fetches shipping rates from the provider for the given products.
|
||||
|
||||
Takes the connection and the already-fetched product list (from fetch_products).
|
||||
Returns normalized rate maps with keys: blueprint_id, print_provider_id,
|
||||
country_code, first_item_cost, additional_item_cost, currency, handling_time_days.
|
||||
|
||||
Optional — providers that don't support shipping rate lookup can skip this.
|
||||
The sync worker checks `function_exported?/3` before calling.
|
||||
"""
|
||||
@callback fetch_shipping_rates(ProviderConnection.t(), products :: [map()]) ::
|
||||
{:ok, [map()]} | {:error, term()}
|
||||
|
||||
@optional_callbacks [fetch_shipping_rates: 2]
|
||||
|
||||
@doc """
|
||||
Returns the provider module for a given provider type.
|
||||
|
||||
Checks `:provider_modules` application config first, allowing test
|
||||
overrides via Mox. Falls back to hardcoded dispatch.
|
||||
"""
|
||||
def for_type(type) do
|
||||
case Application.get_env(:berrypod, :provider_modules, %{}) do
|
||||
modules when is_map(modules) ->
|
||||
case Map.get(modules, type) do
|
||||
nil -> default_for_type(type)
|
||||
module -> {:ok, module}
|
||||
end
|
||||
|
||||
_ ->
|
||||
default_for_type(type)
|
||||
end
|
||||
end
|
||||
|
||||
defp default_for_type("printify"), do: {:ok, Berrypod.Providers.Printify}
|
||||
defp default_for_type("gelato"), do: {:error, :not_implemented}
|
||||
defp default_for_type("prodigi"), do: {:error, :not_implemented}
|
||||
defp default_for_type("printful"), do: {:ok, Berrypod.Providers.Printful}
|
||||
defp default_for_type(type), do: {:error, {:unknown_provider, type}}
|
||||
|
||||
@doc """
|
||||
Returns the provider module for a provider connection.
|
||||
"""
|
||||
def for_connection(%ProviderConnection{provider_type: type}) do
|
||||
for_type(type)
|
||||
end
|
||||
end
|
||||
51
lib/berrypod/release.ex
Normal file
51
lib/berrypod/release.ex
Normal file
@@ -0,0 +1,51 @@
|
||||
defmodule Berrypod.Release do
|
||||
@moduledoc """
|
||||
Release tasks that can be run via `bin/migrate` or `bin/berrypod eval`.
|
||||
|
||||
Migrations run automatically on startup (see Application), so this is mainly
|
||||
useful as a standalone tool for debugging or manual recovery.
|
||||
"""
|
||||
|
||||
@app :berrypod
|
||||
|
||||
def migrate do
|
||||
load_app()
|
||||
|
||||
for repo <- repos() do
|
||||
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Seeds default theme settings if none exist.
|
||||
|
||||
Called on every boot but only writes to the DB on first run
|
||||
(when the settings table is empty). Safe to call repeatedly.
|
||||
"""
|
||||
def seed_defaults do
|
||||
alias Berrypod.Settings
|
||||
|
||||
case Settings.get_setting("theme_settings") do
|
||||
nil ->
|
||||
{:ok, _} = Settings.apply_preset(:studio)
|
||||
:ok
|
||||
|
||||
_exists ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
def rollback(repo, version) do
|
||||
load_app()
|
||||
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
|
||||
end
|
||||
|
||||
defp repos do
|
||||
Application.fetch_env!(@app, :ecto_repos)
|
||||
end
|
||||
|
||||
defp load_app do
|
||||
Application.ensure_all_started(:ssl)
|
||||
Application.load(@app)
|
||||
end
|
||||
end
|
||||
5
lib/berrypod/repo.ex
Normal file
5
lib/berrypod/repo.ex
Normal file
@@ -0,0 +1,5 @@
|
||||
defmodule Berrypod.Repo do
|
||||
use Ecto.Repo,
|
||||
otp_app: :berrypod,
|
||||
adapter: Ecto.Adapters.SQLite3
|
||||
end
|
||||
218
lib/berrypod/search.ex
Normal file
218
lib/berrypod/search.ex
Normal file
@@ -0,0 +1,218 @@
|
||||
defmodule Berrypod.Search do
|
||||
@moduledoc """
|
||||
Full-text product search backed by SQLite FTS5.
|
||||
|
||||
Uses a contentless FTS5 index with BM25 ranking. The index is rebuilt
|
||||
from the products table after each provider sync.
|
||||
"""
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
alias Berrypod.Products.Product
|
||||
alias Berrypod.Repo
|
||||
|
||||
@listing_preloads [images: :image]
|
||||
|
||||
# BM25 column weights: title(10), category(5), variant_info(3), description(1)
|
||||
@bm25_weights "10.0, 5.0, 3.0, 1.0"
|
||||
|
||||
@doc """
|
||||
Searches products by query string. Returns ranked list of Product structs
|
||||
with listing preloads, or empty list for blank/short queries.
|
||||
"""
|
||||
def search(query) when is_binary(query) do
|
||||
query = String.trim(query)
|
||||
|
||||
if String.length(query) < 2 do
|
||||
[]
|
||||
else
|
||||
fts_query = build_fts_query(query)
|
||||
|
||||
case search_fts(fts_query) do
|
||||
[] -> search_like(query)
|
||||
results -> results
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def search(_), do: []
|
||||
|
||||
@doc """
|
||||
Rebuilds the entire FTS5 index from visible, active products.
|
||||
"""
|
||||
def rebuild_index do
|
||||
Repo.transaction(fn ->
|
||||
# Clear existing index data
|
||||
Repo.query!("DELETE FROM products_search_map")
|
||||
Repo.query!("DELETE FROM products_search")
|
||||
|
||||
# Load all visible products with variants for indexing
|
||||
products =
|
||||
Product
|
||||
|> where([p], p.visible == true and p.status == "active")
|
||||
|> preload([:variants])
|
||||
|> Repo.all()
|
||||
|
||||
Enum.each(products, &insert_into_index/1)
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Indexes or reindexes a single product.
|
||||
Removes existing entry first if present.
|
||||
"""
|
||||
def index_product(%Product{} = product) do
|
||||
product = Repo.preload(product, [:variants], force: true)
|
||||
|
||||
Repo.transaction(fn ->
|
||||
remove_from_index(product.id)
|
||||
insert_into_index(product)
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes a product from the search index.
|
||||
"""
|
||||
def remove_product(product_id) do
|
||||
remove_from_index(product_id)
|
||||
end
|
||||
|
||||
# Build an FTS5 MATCH query from user input.
|
||||
# Strips special chars, splits into tokens, adds * prefix match to last token.
|
||||
defp build_fts_query(input) do
|
||||
tokens =
|
||||
input
|
||||
|> String.replace(~r/[^\w\s]/, "")
|
||||
|> String.split(~r/\s+/, trim: true)
|
||||
|
||||
case tokens do
|
||||
[] ->
|
||||
nil
|
||||
|
||||
tokens ->
|
||||
{complete, [last]} = Enum.split(tokens, -1)
|
||||
|
||||
parts =
|
||||
Enum.map(complete, &~s("#{&1}")) ++
|
||||
[~s("#{last}" *)]
|
||||
|
||||
Enum.join(parts, " ")
|
||||
end
|
||||
end
|
||||
|
||||
defp search_fts(nil), do: []
|
||||
|
||||
defp search_fts(fts_query) do
|
||||
result =
|
||||
Repo.query!(
|
||||
"""
|
||||
SELECT m.product_id, bm25(products_search, #{@bm25_weights}) AS rank
|
||||
FROM products_search
|
||||
JOIN products_search_map m ON m.rowid = products_search.rowid
|
||||
WHERE products_search MATCH ?1
|
||||
ORDER BY rank
|
||||
LIMIT 20
|
||||
""",
|
||||
[fts_query]
|
||||
)
|
||||
|
||||
product_ids = Enum.map(result.rows, fn [id, _rank] -> id end)
|
||||
|
||||
if product_ids == [] do
|
||||
[]
|
||||
else
|
||||
# Fetch full structs preserving rank order
|
||||
products_by_id =
|
||||
Product
|
||||
|> where([p], p.id in ^product_ids)
|
||||
|> where([p], p.visible == true and p.status == "active")
|
||||
|> preload(^@listing_preloads)
|
||||
|> Repo.all()
|
||||
|> Map.new(&{&1.id, &1})
|
||||
|
||||
Enum.flat_map(product_ids, fn id ->
|
||||
case Map.get(products_by_id, id) do
|
||||
nil -> []
|
||||
product -> [product]
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
# Substring fallback when FTS5 prefix matching returns nothing
|
||||
defp search_like(query) do
|
||||
pattern = "%#{sanitize_like(query)}%"
|
||||
|
||||
Product
|
||||
|> where([p], p.visible == true and p.status == "active")
|
||||
|> where([p], like(p.title, ^pattern) or like(p.category, ^pattern))
|
||||
|> order_by([p], p.title)
|
||||
|> limit(20)
|
||||
|> preload(^@listing_preloads)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
defp sanitize_like(input) do
|
||||
input
|
||||
|> String.replace("\\", "\\\\")
|
||||
|> String.replace("%", "\\%")
|
||||
|> String.replace("_", "\\_")
|
||||
end
|
||||
|
||||
defp insert_into_index(%Product{} = product) do
|
||||
Repo.query!(
|
||||
"INSERT INTO products_search_map (product_id) VALUES (?1)",
|
||||
[product.id]
|
||||
)
|
||||
|
||||
%{rows: [[rowid]]} = Repo.query!("SELECT last_insert_rowid()")
|
||||
|
||||
variant_info = build_variant_info(product.variants || [])
|
||||
description = strip_html(product.description || "")
|
||||
|
||||
Repo.query!(
|
||||
"""
|
||||
INSERT INTO products_search (rowid, title, category, variant_info, description)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5)
|
||||
""",
|
||||
[rowid, product.title || "", product.category || "", variant_info, description]
|
||||
)
|
||||
end
|
||||
|
||||
defp remove_from_index(product_id) do
|
||||
case Repo.query!(
|
||||
"SELECT rowid FROM products_search_map WHERE product_id = ?1",
|
||||
[product_id]
|
||||
) do
|
||||
%{rows: [[rowid]]} ->
|
||||
Repo.query!("DELETE FROM products_search WHERE rowid = ?1", [rowid])
|
||||
Repo.query!("DELETE FROM products_search_map WHERE rowid = ?1", [rowid])
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
# Build searchable variant text from enabled variants
|
||||
defp build_variant_info(variants) do
|
||||
variants
|
||||
|> Enum.filter(& &1.is_enabled)
|
||||
|> Enum.flat_map(fn v -> [v.title | Map.values(v.options || %{})] end)
|
||||
|> Enum.uniq()
|
||||
|> Enum.join(" ")
|
||||
end
|
||||
|
||||
# Strip HTML tags and decode common entities
|
||||
defp strip_html(html) do
|
||||
html
|
||||
|> String.replace(~r/<[^>]+>/, " ")
|
||||
|> String.replace("&", "&")
|
||||
|> String.replace("<", "<")
|
||||
|> String.replace(">", ">")
|
||||
|> String.replace(""", "\"")
|
||||
|> String.replace("'", "'")
|
||||
|> String.replace(" ", " ")
|
||||
|> String.replace(~r/\s+/, " ")
|
||||
|> String.trim()
|
||||
end
|
||||
end
|
||||
41
lib/berrypod/secrets.ex
Normal file
41
lib/berrypod/secrets.ex
Normal file
@@ -0,0 +1,41 @@
|
||||
defmodule Berrypod.Secrets do
|
||||
@moduledoc """
|
||||
Loads encrypted secrets from the database into Application env at runtime.
|
||||
|
||||
Secrets are stored encrypted in the settings table via `Settings.put_secret/2`
|
||||
and loaded into the appropriate Application config on startup. This keeps all
|
||||
credentials in the portable SQLite database, encrypted via the Vault module.
|
||||
|
||||
The only external dependency is `SECRET_KEY_BASE` (used to derive encryption keys).
|
||||
"""
|
||||
|
||||
alias Berrypod.Settings
|
||||
|
||||
require Logger
|
||||
|
||||
# Registry of {settings_key, app, env_key} — add new secrets here
|
||||
@secret_registry [
|
||||
{"stripe_api_key", :stripity_stripe, :api_key},
|
||||
{"stripe_signing_secret", :stripity_stripe, :signing_secret}
|
||||
]
|
||||
|
||||
@doc """
|
||||
Loads all secrets from the database into Application env.
|
||||
|
||||
Called at startup from the supervision tree, after the Repo is ready.
|
||||
"""
|
||||
def load_all do
|
||||
for {settings_key, app, env_key} <- @secret_registry do
|
||||
case Settings.get_secret(settings_key) do
|
||||
nil ->
|
||||
:skip
|
||||
|
||||
value ->
|
||||
Application.put_env(app, env_key, value)
|
||||
Logger.debug("Loaded #{settings_key} from database")
|
||||
end
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
260
lib/berrypod/settings.ex
Normal file
260
lib/berrypod/settings.ex
Normal file
@@ -0,0 +1,260 @@
|
||||
defmodule Berrypod.Settings do
|
||||
@moduledoc """
|
||||
The Settings context for managing site-wide configuration.
|
||||
"""
|
||||
|
||||
import Ecto.Query, warn: false
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Settings.{Setting, ThemeSettings}
|
||||
alias Berrypod.Vault
|
||||
|
||||
@doc """
|
||||
Gets a setting by key with an optional default value.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_setting("site_name", "My Shop")
|
||||
"My Shop"
|
||||
|
||||
"""
|
||||
def get_setting(key, default \\ nil) do
|
||||
case fetch_setting(key) do
|
||||
{:ok, setting} -> decode_value(setting)
|
||||
:not_found -> default
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Sets a setting value by key.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> put_setting("site_name", "My Awesome Shop")
|
||||
{:ok, %Setting{}}
|
||||
|
||||
"""
|
||||
def put_setting(key, value, value_type \\ "string") do
|
||||
encoded_value = encode_value(value, value_type)
|
||||
|
||||
%Setting{key: key}
|
||||
|> Setting.changeset(%{key: key, value: encoded_value, value_type: value_type})
|
||||
|> Repo.insert(
|
||||
on_conflict: {:replace, [:value, :value_type, :updated_at]},
|
||||
conflict_target: :key
|
||||
)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the theme settings as a ThemeSettings struct.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_theme_settings()
|
||||
%ThemeSettings{mood: "neutral", typography: "clean", ...}
|
||||
|
||||
"""
|
||||
def get_theme_settings do
|
||||
case get_setting("theme_settings") do
|
||||
nil ->
|
||||
# Return defaults
|
||||
%ThemeSettings{}
|
||||
|
||||
settings_map when is_map(settings_map) ->
|
||||
settings_map
|
||||
|> atomize_keys()
|
||||
|> then(&struct(ThemeSettings, &1))
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the theme settings.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> update_theme_settings(%{mood: "dark", typography: "modern"})
|
||||
{:ok, %ThemeSettings{}}
|
||||
|
||||
"""
|
||||
def update_theme_settings(attrs) when is_map(attrs) do
|
||||
current = get_theme_settings()
|
||||
|
||||
changeset = ThemeSettings.changeset(current, attrs)
|
||||
|
||||
if changeset.valid? do
|
||||
settings = Ecto.Changeset.apply_changes(changeset)
|
||||
json = Jason.encode!(settings)
|
||||
put_setting("theme_settings", json, "json")
|
||||
|
||||
# Invalidate and rewarm CSS cache
|
||||
alias Berrypod.Theme.{CSSCache, CSSGenerator}
|
||||
CSSCache.invalidate()
|
||||
css = CSSGenerator.generate(settings)
|
||||
CSSCache.put(css)
|
||||
|
||||
{:ok, settings}
|
||||
else
|
||||
{:error, changeset}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Applies a preset to theme settings.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> apply_preset(:gallery)
|
||||
{:ok, %ThemeSettings{}}
|
||||
|
||||
"""
|
||||
def apply_preset(preset_name) when is_atom(preset_name) do
|
||||
preset = Berrypod.Theme.Presets.get(preset_name)
|
||||
|
||||
if preset do
|
||||
update_theme_settings(preset)
|
||||
else
|
||||
{:error, :preset_not_found}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns whether the shop is live (visible to the public).
|
||||
|
||||
Defaults to `false` for fresh installs.
|
||||
"""
|
||||
def site_live? do
|
||||
get_setting("site_live", false) == true
|
||||
end
|
||||
|
||||
@doc """
|
||||
Sets whether the shop is live (visible to the public).
|
||||
"""
|
||||
def set_site_live(live?) when is_boolean(live?) do
|
||||
put_setting("site_live", live?, "boolean")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Deletes a setting by key.
|
||||
"""
|
||||
def delete_setting(key) do
|
||||
case fetch_setting(key) do
|
||||
{:ok, setting} -> Repo.delete(setting)
|
||||
:not_found -> :ok
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Stores an encrypted secret in the database.
|
||||
|
||||
The plaintext is encrypted via Vault before storage.
|
||||
"""
|
||||
def put_secret(key, plaintext) when is_binary(plaintext) do
|
||||
case Vault.encrypt(plaintext) do
|
||||
{:ok, encrypted} ->
|
||||
%Setting{key: key}
|
||||
|> Setting.changeset(%{
|
||||
key: key,
|
||||
value: "[encrypted]",
|
||||
value_type: "encrypted",
|
||||
encrypted_value: encrypted
|
||||
})
|
||||
|> Repo.insert(
|
||||
on_conflict: {:replace, [:value, :encrypted_value, :value_type, :updated_at]},
|
||||
conflict_target: :key
|
||||
)
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves and decrypts an encrypted secret from the database.
|
||||
|
||||
Returns the plaintext value or the default if not found.
|
||||
"""
|
||||
def get_secret(key, default \\ nil) do
|
||||
case fetch_setting(key) do
|
||||
{:ok, %Setting{value_type: "encrypted", encrypted_value: encrypted}}
|
||||
when not is_nil(encrypted) ->
|
||||
case Vault.decrypt(encrypted) do
|
||||
{:ok, plaintext} -> plaintext
|
||||
{:error, _} -> default
|
||||
end
|
||||
|
||||
_ ->
|
||||
default
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks whether an encrypted secret exists in the database.
|
||||
"""
|
||||
def has_secret?(key) do
|
||||
case fetch_setting(key) do
|
||||
{:ok, %Setting{value_type: "encrypted", encrypted_value: encrypted}}
|
||||
when not is_nil(encrypted) ->
|
||||
true
|
||||
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns a masked hint for an encrypted secret (e.g. "sk_test_•••abc").
|
||||
|
||||
Useful for admin UIs to confirm which key is active without exposing it.
|
||||
"""
|
||||
def secret_hint(key) do
|
||||
case get_secret(key) do
|
||||
nil ->
|
||||
nil
|
||||
|
||||
plaintext when byte_size(plaintext) > 8 ->
|
||||
prefix = binary_part(plaintext, 0, min(8, byte_size(plaintext)))
|
||||
suffix = binary_part(plaintext, byte_size(plaintext), -3)
|
||||
"#{prefix}•••#{suffix}"
|
||||
|
||||
_short ->
|
||||
"•••"
|
||||
end
|
||||
end
|
||||
|
||||
# Private helpers
|
||||
|
||||
defp fetch_setting(key) do
|
||||
case Repo.get_by(Setting, key: key) do
|
||||
nil -> :not_found
|
||||
setting -> {:ok, setting}
|
||||
end
|
||||
end
|
||||
|
||||
defp decode_value(%Setting{value_type: "encrypted", encrypted_value: encrypted})
|
||||
when not is_nil(encrypted) do
|
||||
case Vault.decrypt(encrypted) do
|
||||
{:ok, plaintext} -> plaintext
|
||||
{:error, _} -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp decode_value(%Setting{value: value, value_type: "json"}), do: Jason.decode!(value)
|
||||
defp decode_value(%Setting{value: value, value_type: "integer"}), do: String.to_integer(value)
|
||||
|
||||
defp decode_value(%Setting{value: value, value_type: "boolean"}),
|
||||
do: value == "true"
|
||||
|
||||
defp decode_value(%Setting{value: value, value_type: "string"}), do: value
|
||||
|
||||
defp encode_value(value, "json") when is_binary(value), do: value
|
||||
defp encode_value(value, "json"), do: Jason.encode!(value)
|
||||
defp encode_value(value, "integer") when is_integer(value), do: Integer.to_string(value)
|
||||
defp encode_value(value, "boolean") when is_boolean(value), do: Atom.to_string(value)
|
||||
defp encode_value(value, "string") when is_binary(value), do: value
|
||||
|
||||
defp atomize_keys(map) when is_map(map) do
|
||||
Map.new(map, fn
|
||||
{key, value} when is_binary(key) -> {String.to_atom(key), value}
|
||||
{key, value} -> {key, value}
|
||||
end)
|
||||
end
|
||||
end
|
||||
38
lib/berrypod/settings/setting.ex
Normal file
38
lib/berrypod/settings/setting.ex
Normal file
@@ -0,0 +1,38 @@
|
||||
defmodule Berrypod.Settings.Setting do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "settings" do
|
||||
field :key, :string
|
||||
field :value, :string
|
||||
field :value_type, :string, default: "string"
|
||||
field :encrypted_value, :binary
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(setting, attrs) do
|
||||
setting
|
||||
|> cast(attrs, [:key, :value, :value_type, :encrypted_value])
|
||||
|> validate_required([:key, :value_type])
|
||||
|> validate_inclusion(:value_type, ~w(string json integer boolean encrypted))
|
||||
|> validate_has_value()
|
||||
|> unique_constraint(:key)
|
||||
end
|
||||
|
||||
# Encrypted settings store data in encrypted_value, not value.
|
||||
# All other types require value.
|
||||
defp validate_has_value(changeset) do
|
||||
case get_field(changeset, :value_type) do
|
||||
"encrypted" ->
|
||||
validate_required(changeset, [:encrypted_value])
|
||||
|
||||
_ ->
|
||||
validate_required(changeset, [:value])
|
||||
end
|
||||
end
|
||||
end
|
||||
123
lib/berrypod/settings/theme_settings.ex
Normal file
123
lib/berrypod/settings/theme_settings.ex
Normal file
@@ -0,0 +1,123 @@
|
||||
defmodule Berrypod.Settings.ThemeSettings do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@derive Jason.Encoder
|
||||
@primary_key false
|
||||
embedded_schema do
|
||||
# Core theme tokens
|
||||
field :mood, :string, default: "neutral"
|
||||
field :typography, :string, default: "clean"
|
||||
field :shape, :string, default: "soft"
|
||||
field :density, :string, default: "balanced"
|
||||
field :grid_columns, :string, default: "4"
|
||||
field :header_layout, :string, default: "standard"
|
||||
field :accent_color, :string, default: "#f97316"
|
||||
|
||||
# Branding
|
||||
field :site_name, :string, default: "Store Name"
|
||||
field :site_description, :string, default: "Discover unique products and original designs."
|
||||
field :logo_mode, :string, default: "text-only"
|
||||
field :logo_image_id, :binary_id
|
||||
field :logo_size, :integer, default: 36
|
||||
field :logo_recolor, :boolean, default: false
|
||||
field :logo_color, :string, default: "#171717"
|
||||
|
||||
# Header Background
|
||||
field :header_background_enabled, :boolean, default: false
|
||||
field :header_image_id, :binary_id
|
||||
field :header_zoom, :integer, default: 100
|
||||
field :header_position_x, :integer, default: 50
|
||||
field :header_position_y, :integer, default: 50
|
||||
|
||||
# Advanced customization
|
||||
field :secondary_accent_color, :string, default: "#ea580c"
|
||||
field :sale_color, :string, default: "#dc2626"
|
||||
field :font_size, :string, default: "medium"
|
||||
field :heading_weight, :string, default: "bold"
|
||||
field :layout_width, :string, default: "wide"
|
||||
field :button_style, :string, default: "filled"
|
||||
field :card_shadow, :string, default: "none"
|
||||
field :product_text_align, :string, default: "left"
|
||||
field :image_aspect_ratio, :string, default: "square"
|
||||
|
||||
# Feature toggles
|
||||
field :announcement_bar, :boolean, default: true
|
||||
field :sticky_header, :boolean, default: false
|
||||
field :hover_image, :boolean, default: true
|
||||
field :show_prices, :boolean, default: true
|
||||
field :pdp_trust_badges, :boolean, default: true
|
||||
field :pdp_reviews, :boolean, default: true
|
||||
field :pdp_related_products, :boolean, default: true
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(settings, attrs) do
|
||||
settings
|
||||
|> cast(attrs, [
|
||||
:mood,
|
||||
:typography,
|
||||
:shape,
|
||||
:density,
|
||||
:grid_columns,
|
||||
:header_layout,
|
||||
:accent_color,
|
||||
:site_name,
|
||||
:site_description,
|
||||
:logo_mode,
|
||||
:logo_image_id,
|
||||
:logo_size,
|
||||
:logo_recolor,
|
||||
:logo_color,
|
||||
:header_background_enabled,
|
||||
:header_image_id,
|
||||
:header_zoom,
|
||||
:header_position_x,
|
||||
:header_position_y,
|
||||
:secondary_accent_color,
|
||||
:sale_color,
|
||||
:font_size,
|
||||
:heading_weight,
|
||||
:layout_width,
|
||||
:button_style,
|
||||
:card_shadow,
|
||||
:product_text_align,
|
||||
:image_aspect_ratio,
|
||||
:announcement_bar,
|
||||
:sticky_header,
|
||||
:hover_image,
|
||||
:show_prices,
|
||||
:pdp_trust_badges,
|
||||
:pdp_reviews,
|
||||
:pdp_related_products
|
||||
])
|
||||
|> validate_required([:mood, :typography, :shape, :density])
|
||||
|> validate_inclusion(:mood, ~w(neutral warm cool dark))
|
||||
|> validate_inclusion(
|
||||
:typography,
|
||||
~w(clean editorial modern classic friendly minimal impulse)
|
||||
)
|
||||
|> validate_inclusion(:shape, ~w(sharp soft round pill))
|
||||
|> validate_inclusion(:density, ~w(spacious balanced compact))
|
||||
|> validate_inclusion(:grid_columns, ~w(2 3 4))
|
||||
|> validate_inclusion(:header_layout, ~w(standard centered left))
|
||||
|> validate_inclusion(:logo_mode, ~w(text-only logo-text logo-only))
|
||||
|> validate_number(:logo_size, greater_than_or_equal_to: 24, less_than_or_equal_to: 120)
|
||||
|> validate_number(:header_zoom, greater_than_or_equal_to: 100, less_than_or_equal_to: 200)
|
||||
|> validate_number(:header_position_x,
|
||||
greater_than_or_equal_to: 0,
|
||||
less_than_or_equal_to: 100
|
||||
)
|
||||
|> validate_number(:header_position_y,
|
||||
greater_than_or_equal_to: 0,
|
||||
less_than_or_equal_to: 100
|
||||
)
|
||||
|> validate_inclusion(:layout_width, ~w(contained wide full))
|
||||
|> validate_inclusion(:card_shadow, ~w(none sm md lg))
|
||||
|> validate_inclusion(:font_size, ~w(small medium large))
|
||||
|> validate_inclusion(:heading_weight, ~w(regular medium bold))
|
||||
|> validate_inclusion(:button_style, ~w(filled outline soft))
|
||||
|> validate_inclusion(:product_text_align, ~w(left center))
|
||||
|> validate_inclusion(:image_aspect_ratio, ~w(square portrait landscape))
|
||||
end
|
||||
end
|
||||
33
lib/berrypod/setup.ex
Normal file
33
lib/berrypod/setup.ex
Normal file
@@ -0,0 +1,33 @@
|
||||
defmodule Berrypod.Setup do
|
||||
@moduledoc """
|
||||
Aggregates setup status checks for the admin setup flow.
|
||||
"""
|
||||
|
||||
alias Berrypod.{Accounts, Products, Settings}
|
||||
|
||||
@doc """
|
||||
Returns a map describing the current setup status.
|
||||
|
||||
Used by the admin setup checklist and ThemeHook gate to determine
|
||||
what's been completed and whether the shop can go live.
|
||||
"""
|
||||
def setup_status do
|
||||
conn = Products.get_provider_connection_by_type("printify")
|
||||
product_count = Products.count_products_for_connection(conn && conn.id)
|
||||
|
||||
printify_connected = conn != nil and conn.api_key_encrypted != nil
|
||||
products_synced = product_count > 0
|
||||
stripe_connected = Settings.has_secret?("stripe_api_key")
|
||||
site_live = Settings.site_live?()
|
||||
|
||||
%{
|
||||
admin_created: Accounts.has_admin?(),
|
||||
printify_connected: printify_connected,
|
||||
products_synced: products_synced,
|
||||
product_count: product_count,
|
||||
stripe_connected: stripe_connected,
|
||||
site_live: site_live,
|
||||
can_go_live: printify_connected and products_synced and stripe_connected
|
||||
}
|
||||
end
|
||||
end
|
||||
357
lib/berrypod/shipping.ex
Normal file
357
lib/berrypod/shipping.ex
Normal file
@@ -0,0 +1,357 @@
|
||||
defmodule Berrypod.Shipping do
|
||||
@moduledoc """
|
||||
The Shipping context.
|
||||
|
||||
Manages cached shipping rates from POD providers and calculates
|
||||
shipping estimates for cart and checkout.
|
||||
"""
|
||||
|
||||
import Ecto.Query
|
||||
alias Berrypod.ExchangeRate
|
||||
alias Berrypod.Repo
|
||||
alias Berrypod.Shipping.ShippingRate
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Settings
|
||||
|
||||
require Logger
|
||||
|
||||
@country_names %{
|
||||
"AT" => "Austria",
|
||||
"AU" => "Australia",
|
||||
"BE" => "Belgium",
|
||||
"BG" => "Bulgaria",
|
||||
"CA" => "Canada",
|
||||
"CH" => "Switzerland",
|
||||
"CY" => "Cyprus",
|
||||
"CZ" => "Czechia",
|
||||
"DE" => "Germany",
|
||||
"DK" => "Denmark",
|
||||
"EE" => "Estonia",
|
||||
"ES" => "Spain",
|
||||
"FI" => "Finland",
|
||||
"FR" => "France",
|
||||
"GB" => "United Kingdom",
|
||||
"GR" => "Greece",
|
||||
"HR" => "Croatia",
|
||||
"HU" => "Hungary",
|
||||
"IE" => "Ireland",
|
||||
"IT" => "Italy",
|
||||
"JP" => "Japan",
|
||||
"LT" => "Lithuania",
|
||||
"LU" => "Luxembourg",
|
||||
"LV" => "Latvia",
|
||||
"MT" => "Malta",
|
||||
"NL" => "Netherlands",
|
||||
"NO" => "Norway",
|
||||
"NZ" => "New Zealand",
|
||||
"PL" => "Poland",
|
||||
"PT" => "Portugal",
|
||||
"RO" => "Romania",
|
||||
"SE" => "Sweden",
|
||||
"SI" => "Slovenia",
|
||||
"SK" => "Slovakia",
|
||||
"US" => "United States"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Rate Storage
|
||||
# =============================================================================
|
||||
|
||||
@default_buffer_percent 5
|
||||
|
||||
@doc """
|
||||
Bulk upserts shipping rates for a provider connection.
|
||||
|
||||
When `exchange_rates` are provided (e.g. `%{"USD" => 0.79}`), rates are
|
||||
converted to GBP at insert time with a buffer (default #{@default_buffer_percent}%).
|
||||
This locks in the exchange rate at sync time rather than at display time.
|
||||
|
||||
Existing rates for the same (connection, blueprint, provider, country) combo
|
||||
are replaced. Returns the number of upserted rows.
|
||||
"""
|
||||
def upsert_rates(provider_connection_id, rates, exchange_rates \\ nil)
|
||||
when is_list(rates) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
buffer = get_buffer_percent()
|
||||
|
||||
entries =
|
||||
Enum.map(rates, fn rate ->
|
||||
{first, additional, currency} =
|
||||
convert_rate_to_gbp(rate, exchange_rates, buffer)
|
||||
|
||||
%{
|
||||
id: Ecto.UUID.generate(),
|
||||
provider_connection_id: provider_connection_id,
|
||||
blueprint_id: rate.blueprint_id,
|
||||
print_provider_id: rate.print_provider_id,
|
||||
country_code: rate.country_code,
|
||||
first_item_cost: first,
|
||||
additional_item_cost: additional,
|
||||
currency: currency,
|
||||
handling_time_days: rate[:handling_time_days],
|
||||
inserted_at: now,
|
||||
updated_at: now
|
||||
}
|
||||
end)
|
||||
|
||||
{count, _} =
|
||||
Repo.insert_all(ShippingRate, entries,
|
||||
on_conflict:
|
||||
{:replace,
|
||||
[
|
||||
:first_item_cost,
|
||||
:additional_item_cost,
|
||||
:currency,
|
||||
:handling_time_days,
|
||||
:updated_at
|
||||
]},
|
||||
conflict_target: [
|
||||
:provider_connection_id,
|
||||
:blueprint_id,
|
||||
:print_provider_id,
|
||||
:country_code
|
||||
]
|
||||
)
|
||||
|
||||
Logger.info("Upserted #{count} shipping rates for connection #{provider_connection_id}")
|
||||
{:ok, count}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets a single shipping rate for a blueprint/provider/country combo.
|
||||
|
||||
Falls back to `REST_OF_THE_WORLD` if no exact country match exists
|
||||
(Printify uses this as a catch-all for unlisted countries).
|
||||
"""
|
||||
def get_rate(blueprint_id, print_provider_id, country_code) do
|
||||
ShippingRate
|
||||
|> where(
|
||||
[r],
|
||||
r.blueprint_id == ^blueprint_id and
|
||||
r.print_provider_id == ^print_provider_id and
|
||||
r.country_code == ^country_code
|
||||
)
|
||||
|> limit(1)
|
||||
|> Repo.one()
|
||||
|> case do
|
||||
nil when country_code != "REST_OF_THE_WORLD" ->
|
||||
get_rate(blueprint_id, print_provider_id, "REST_OF_THE_WORLD")
|
||||
|
||||
result ->
|
||||
result
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Cart Calculation
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Calculates the shipping estimate for a cart.
|
||||
|
||||
Takes a list of hydrated cart items (with variant_id, quantity) and a
|
||||
country code. Groups items by print provider, looks up rates, and
|
||||
returns the total in the shop's currency (pence).
|
||||
|
||||
Returns `{:ok, cost_pence}` or `{:error, :rates_not_found}`.
|
||||
"""
|
||||
def calculate_for_cart([], _country_code), do: {:ok, 0}
|
||||
|
||||
def calculate_for_cart(cart_items, country_code) do
|
||||
variant_ids = Enum.map(cart_items, & &1.variant_id)
|
||||
variants_map = Products.get_variants_with_products(variant_ids)
|
||||
|
||||
# Build list of {print_provider_id, blueprint_id, quantity, variant_id}
|
||||
items_with_provider =
|
||||
Enum.flat_map(cart_items, fn item ->
|
||||
case Map.get(variants_map, item.variant_id) do
|
||||
nil ->
|
||||
[]
|
||||
|
||||
variant ->
|
||||
provider_data = variant.product.provider_data || %{}
|
||||
blueprint_id = provider_data["blueprint_id"]
|
||||
print_provider_id = provider_data["print_provider_id"]
|
||||
|
||||
if blueprint_id && print_provider_id do
|
||||
[
|
||||
%{
|
||||
print_provider_id: print_provider_id,
|
||||
blueprint_id: blueprint_id,
|
||||
quantity: item.quantity
|
||||
}
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
if Enum.empty?(items_with_provider) do
|
||||
{:error, :rates_not_found}
|
||||
else
|
||||
calculate_grouped(items_with_provider, country_code)
|
||||
end
|
||||
end
|
||||
|
||||
defp calculate_grouped(items, country_code) do
|
||||
# Group by print_provider_id — items from same provider ship together
|
||||
groups = Enum.group_by(items, & &1.print_provider_id)
|
||||
|
||||
results =
|
||||
Enum.map(groups, fn {_provider_id, group_items} ->
|
||||
calculate_provider_group(group_items, country_code)
|
||||
end)
|
||||
|
||||
case Enum.find(results, &match?({:error, _}, &1)) do
|
||||
nil ->
|
||||
total = results |> Enum.map(fn {:ok, cost} -> cost end) |> Enum.sum()
|
||||
{:ok, total}
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp calculate_provider_group(group_items, country_code) do
|
||||
# Look up rates for each unique blueprint in the group
|
||||
rates =
|
||||
group_items
|
||||
|> Enum.uniq_by(& &1.blueprint_id)
|
||||
|> Enum.map(fn item ->
|
||||
rate = get_rate(item.blueprint_id, item.print_provider_id, country_code)
|
||||
{item, rate}
|
||||
end)
|
||||
|
||||
if Enum.any?(rates, fn {_item, rate} -> is_nil(rate) end) do
|
||||
{:error, :rates_not_found}
|
||||
else
|
||||
# Take the highest first_item_cost across all blueprints in the group
|
||||
{_best_item, best_rate} = Enum.max_by(rates, fn {_item, rate} -> rate.first_item_cost end)
|
||||
|
||||
total_qty = Enum.reduce(group_items, 0, fn item, acc -> acc + item.quantity end)
|
||||
|
||||
# Build a map of blueprint_id => rate for additional item costs
|
||||
rate_by_blueprint = Map.new(rates, fn {item, rate} -> {item.blueprint_id, rate} end)
|
||||
|
||||
# First item uses the highest first_item_cost
|
||||
# Remaining items each use their own blueprint's additional_item_cost
|
||||
additional_cost =
|
||||
group_items
|
||||
|> Enum.flat_map(fn item ->
|
||||
rate = Map.get(rate_by_blueprint, item.blueprint_id)
|
||||
List.duplicate(rate.additional_item_cost, item.quantity)
|
||||
end)
|
||||
|> Enum.sort(:desc)
|
||||
# Drop the first item (covered by first_item_cost)
|
||||
|> Enum.drop(1)
|
||||
|> Enum.sum()
|
||||
|
||||
cost =
|
||||
if total_qty > 0 do
|
||||
best_rate.first_item_cost + additional_cost
|
||||
else
|
||||
0
|
||||
end
|
||||
|
||||
# If rates were converted at sync time, currency is GBP and we're done.
|
||||
# For legacy unconverted rates, convert now using cached exchange rates.
|
||||
gbp_cost =
|
||||
if best_rate.currency == "GBP" do
|
||||
cost
|
||||
else
|
||||
ExchangeRate.rate_for(best_rate.currency)
|
||||
|> then(&ceil(cost * &1))
|
||||
end
|
||||
|
||||
{:ok, gbp_cost}
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Sync-time Currency Conversion
|
||||
# =============================================================================
|
||||
|
||||
# Converts a rate to GBP at sync time using live exchange rates + buffer.
|
||||
# Returns {first_item_cost, additional_item_cost, "GBP"}.
|
||||
defp convert_rate_to_gbp(rate, exchange_rates, buffer_percent)
|
||||
|
||||
defp convert_rate_to_gbp(rate, _exchange_rates, _buffer)
|
||||
when rate.currency in ["GBP", "gbp"] do
|
||||
{rate.first_item_cost, rate.additional_item_cost, "GBP"}
|
||||
end
|
||||
|
||||
defp convert_rate_to_gbp(rate, exchange_rates, buffer) when is_map(exchange_rates) do
|
||||
fx = Map.get(exchange_rates, String.upcase(rate.currency), 0.80)
|
||||
multiplier = fx * (1 + buffer / 100)
|
||||
|
||||
{
|
||||
ceil(rate.first_item_cost * multiplier),
|
||||
ceil(rate.additional_item_cost * multiplier),
|
||||
"GBP"
|
||||
}
|
||||
end
|
||||
|
||||
# No exchange rates provided — store in original currency (legacy path)
|
||||
defp convert_rate_to_gbp(rate, nil, _buffer) do
|
||||
{rate.first_item_cost, rate.additional_item_cost, rate.currency}
|
||||
end
|
||||
|
||||
defp get_buffer_percent do
|
||||
case Settings.get_setting("shipping_buffer_percent") do
|
||||
nil -> @default_buffer_percent
|
||||
val when is_binary(val) -> String.to_integer(val)
|
||||
val when is_integer(val) -> val
|
||||
end
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Queries
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Returns a list of distinct country codes that have shipping rates.
|
||||
|
||||
When `REST_OF_THE_WORLD` exists in the DB, all countries from the
|
||||
known names map are included (they're covered by the fallback rate).
|
||||
"""
|
||||
def list_available_countries do
|
||||
codes =
|
||||
ShippingRate
|
||||
|> distinct(true)
|
||||
|> select([r], r.country_code)
|
||||
|> order_by([r], r.country_code)
|
||||
|> Repo.all()
|
||||
|
||||
has_rest_of_world = "REST_OF_THE_WORLD" in codes
|
||||
|
||||
codes
|
||||
|> Enum.reject(&(&1 == "REST_OF_THE_WORLD"))
|
||||
|> then(fn explicit ->
|
||||
if has_rest_of_world do
|
||||
Map.keys(@country_names) ++ explicit
|
||||
else
|
||||
explicit
|
||||
end
|
||||
end)
|
||||
|> Enum.uniq()
|
||||
|> Enum.sort()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns `{code, name}` tuples for all countries with shipping rates,
|
||||
sorted by name.
|
||||
"""
|
||||
def list_available_countries_with_names do
|
||||
list_available_countries()
|
||||
|> Enum.map(fn code -> {code, country_name(code)} end)
|
||||
|> Enum.sort_by(&elem(&1, 1))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the display name for a country code.
|
||||
"""
|
||||
def country_name(code) when is_binary(code) do
|
||||
Map.get(@country_names, String.upcase(code), code)
|
||||
end
|
||||
end
|
||||
53
lib/berrypod/shipping/shipping_rate.ex
Normal file
53
lib/berrypod/shipping/shipping_rate.ex
Normal file
@@ -0,0 +1,53 @@
|
||||
defmodule Berrypod.Shipping.ShippingRate do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
|
||||
@primary_key {:id, :binary_id, autogenerate: true}
|
||||
@foreign_key_type :binary_id
|
||||
|
||||
schema "shipping_rates" do
|
||||
field :blueprint_id, :integer
|
||||
field :print_provider_id, :integer
|
||||
field :country_code, :string
|
||||
field :first_item_cost, :integer
|
||||
field :additional_item_cost, :integer
|
||||
field :currency, :string, default: "USD"
|
||||
field :handling_time_days, :integer
|
||||
|
||||
belongs_to :provider_connection, Berrypod.Products.ProviderConnection
|
||||
|
||||
timestamps(type: :utc_datetime)
|
||||
end
|
||||
|
||||
def changeset(rate, attrs) do
|
||||
rate
|
||||
|> cast(attrs, [
|
||||
:provider_connection_id,
|
||||
:blueprint_id,
|
||||
:print_provider_id,
|
||||
:country_code,
|
||||
:first_item_cost,
|
||||
:additional_item_cost,
|
||||
:currency,
|
||||
:handling_time_days
|
||||
])
|
||||
|> validate_required([
|
||||
:provider_connection_id,
|
||||
:blueprint_id,
|
||||
:print_provider_id,
|
||||
:country_code,
|
||||
:first_item_cost,
|
||||
:additional_item_cost,
|
||||
:currency
|
||||
])
|
||||
|> validate_number(:first_item_cost, greater_than_or_equal_to: 0)
|
||||
|> validate_number(:additional_item_cost, greater_than_or_equal_to: 0)
|
||||
|> foreign_key_constraint(:provider_connection_id)
|
||||
|> unique_constraint([
|
||||
:provider_connection_id,
|
||||
:blueprint_id,
|
||||
:print_provider_id,
|
||||
:country_code
|
||||
])
|
||||
end
|
||||
end
|
||||
135
lib/berrypod/stripe/setup.ex
Normal file
135
lib/berrypod/stripe/setup.ex
Normal file
@@ -0,0 +1,135 @@
|
||||
defmodule Berrypod.Stripe.Setup do
|
||||
@moduledoc """
|
||||
Handles Stripe account setup: key verification, automatic webhook
|
||||
endpoint creation, and teardown.
|
||||
"""
|
||||
|
||||
alias Berrypod.Settings
|
||||
alias Berrypod.Secrets
|
||||
|
||||
require Logger
|
||||
|
||||
@webhook_events ["checkout.session.completed", "checkout.session.expired"]
|
||||
|
||||
@doc """
|
||||
Verifies a Stripe API key by making a lightweight Balance API call.
|
||||
"""
|
||||
def verify_api_key(api_key) do
|
||||
case Stripe.Balance.retrieve(%{}, api_key: api_key) do
|
||||
{:ok, _balance} -> :ok
|
||||
{:error, %Stripe.Error{message: message}} -> {:error, message}
|
||||
{:error, _} -> {:error, "Could not connect to Stripe"}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Full setup flow: verify key, store it, create webhook endpoint if possible.
|
||||
|
||||
Returns:
|
||||
- `{:ok, :webhook_created}` — key valid, webhook auto-created (production)
|
||||
- `{:ok, :localhost}` — key valid, but URL is localhost so webhook skipped
|
||||
- `{:error, message}` — key invalid or setup failed
|
||||
"""
|
||||
def connect(api_key) do
|
||||
with :ok <- verify_api_key(api_key) do
|
||||
Settings.put_secret("stripe_api_key", api_key)
|
||||
|
||||
case maybe_create_webhook(api_key) do
|
||||
{:ok, result} ->
|
||||
Secrets.load_all()
|
||||
{:ok, result}
|
||||
|
||||
{:error, reason} ->
|
||||
# Key is valid and stored, but webhook creation failed.
|
||||
# Still load the key so checkout works (webhooks can be set up manually).
|
||||
Secrets.load_all()
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Removes Stripe configuration and deletes the webhook endpoint from Stripe.
|
||||
"""
|
||||
def disconnect do
|
||||
delete_existing_webhook()
|
||||
|
||||
for key <- ["stripe_api_key", "stripe_signing_secret", "stripe_webhook_endpoint_id"] do
|
||||
Settings.delete_setting(key)
|
||||
end
|
||||
|
||||
Application.delete_env(:stripity_stripe, :api_key)
|
||||
Application.delete_env(:stripity_stripe, :signing_secret)
|
||||
:ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Saves a manually-provided webhook signing secret (for dev mode / Stripe CLI).
|
||||
"""
|
||||
def save_signing_secret(signing_secret) do
|
||||
Settings.put_secret("stripe_signing_secret", signing_secret)
|
||||
Secrets.load_all()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the webhook URL for this app.
|
||||
"""
|
||||
def webhook_url do
|
||||
"#{BerrypodWeb.Endpoint.url()}/webhooks/stripe"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns true if the app is running on localhost (Stripe can't reach it).
|
||||
"""
|
||||
def localhost? do
|
||||
url = BerrypodWeb.Endpoint.url()
|
||||
uri = URI.parse(url)
|
||||
uri.host in ["localhost", "127.0.0.1", "0.0.0.0", "::1"]
|
||||
end
|
||||
|
||||
defp maybe_create_webhook(api_key) do
|
||||
if localhost?() do
|
||||
{:ok, :localhost}
|
||||
else
|
||||
delete_existing_webhook()
|
||||
create_webhook(api_key)
|
||||
end
|
||||
end
|
||||
|
||||
defp create_webhook(api_key) do
|
||||
params = %{
|
||||
url: webhook_url(),
|
||||
enabled_events: @webhook_events
|
||||
}
|
||||
|
||||
case Stripe.WebhookEndpoint.create(params, api_key: api_key) do
|
||||
{:ok, endpoint} ->
|
||||
Settings.put_secret("stripe_signing_secret", endpoint.secret)
|
||||
Settings.put_setting("stripe_webhook_endpoint_id", endpoint.id, "string")
|
||||
Logger.info("Stripe webhook endpoint created: #{endpoint.id}")
|
||||
{:ok, :webhook_created}
|
||||
|
||||
{:error, %Stripe.Error{message: message}} ->
|
||||
Logger.warning("Failed to create Stripe webhook: #{message}")
|
||||
{:error, message}
|
||||
|
||||
{:error, _} ->
|
||||
{:error, "Failed to create webhook endpoint"}
|
||||
end
|
||||
end
|
||||
|
||||
defp delete_existing_webhook do
|
||||
endpoint_id = Settings.get_setting("stripe_webhook_endpoint_id")
|
||||
api_key = Settings.get_secret("stripe_api_key")
|
||||
|
||||
if endpoint_id && api_key do
|
||||
case Stripe.WebhookEndpoint.delete(endpoint_id, api_key: api_key) do
|
||||
{:ok, _} ->
|
||||
Logger.info("Deleted Stripe webhook endpoint: #{endpoint_id}")
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("Failed to delete webhook endpoint #{endpoint_id}: #{inspect(reason)}")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
139
lib/berrypod/sync/image_download_worker.ex
Normal file
139
lib/berrypod/sync/image_download_worker.ex
Normal file
@@ -0,0 +1,139 @@
|
||||
defmodule Berrypod.Sync.ImageDownloadWorker do
|
||||
@moduledoc """
|
||||
Oban worker for downloading product images from external URLs.
|
||||
|
||||
Downloads images from Printify CDN, processes through the Media pipeline
|
||||
(WebP conversion, AVIF/WebP variant generation), and links to ProductImage.
|
||||
|
||||
## Usage
|
||||
|
||||
# Enqueue a download for a product image
|
||||
ImageDownloadWorker.enqueue(product_image_id)
|
||||
|
||||
## Job Args
|
||||
|
||||
* `product_image_id` - The ID of the ProductImage to download
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :images, max_attempts: 3
|
||||
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Media
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"product_image_id" => product_image_id}}) do
|
||||
case Products.get_product_image(product_image_id) do
|
||||
nil ->
|
||||
{:cancel, :product_image_not_found}
|
||||
|
||||
%{image_id: image_id} when not is_nil(image_id) ->
|
||||
# Already has a linked image, skip
|
||||
:ok
|
||||
|
||||
product_image ->
|
||||
download_and_link(product_image)
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueue an image download for a product image.
|
||||
"""
|
||||
def enqueue(product_image_id) do
|
||||
%{product_image_id: product_image_id}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
defp download_and_link(product_image) do
|
||||
case download_image(product_image.src) do
|
||||
{:ok, data, content_type} ->
|
||||
upload_and_link(product_image, data, content_type)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"[ImageDownloadWorker] Failed to download #{product_image.src}: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp download_image(url) do
|
||||
case Req.get(url, receive_timeout: 30_000) do
|
||||
{:ok, %Req.Response{status: 200, body: body, headers: headers}} ->
|
||||
content_type = get_content_type(headers)
|
||||
{:ok, body, content_type}
|
||||
|
||||
{:ok, %Req.Response{status: status}} ->
|
||||
{:error, {:http_error, status}}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_content_type(headers) do
|
||||
headers
|
||||
|> Enum.find(fn {k, _v} -> String.downcase(k) == "content-type" end)
|
||||
|> case do
|
||||
{_, value} when is_binary(value) -> value |> String.split(";") |> hd() |> String.trim()
|
||||
{_, [value | _]} -> value |> String.split(";") |> hd() |> String.trim()
|
||||
_ -> "image/jpeg"
|
||||
end
|
||||
end
|
||||
|
||||
defp upload_and_link(product_image, data, content_type) do
|
||||
filename = extract_filename(product_image.src, content_type)
|
||||
|
||||
attrs = %{
|
||||
image_type: "product",
|
||||
filename: filename,
|
||||
content_type: content_type,
|
||||
file_size: byte_size(data),
|
||||
data: data
|
||||
}
|
||||
|
||||
case Media.upload_image(attrs) do
|
||||
{:ok, image} ->
|
||||
case Products.link_product_image(product_image, image.id) do
|
||||
{:ok, _} ->
|
||||
Logger.info(
|
||||
"[ImageDownloadWorker] Downloaded and linked image for #{product_image.id}"
|
||||
)
|
||||
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[ImageDownloadWorker] Failed to link image: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[ImageDownloadWorker] Failed to upload image: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_filename(url, content_type) do
|
||||
# Extract filename from URL path, fall back to generated name
|
||||
uri = URI.parse(url)
|
||||
path_parts = String.split(uri.path || "", "/")
|
||||
basename = List.last(path_parts) || "image"
|
||||
|
||||
# Ensure it has an extension
|
||||
if Path.extname(basename) == "" do
|
||||
ext = extension_for_content_type(content_type)
|
||||
"#{basename}#{ext}"
|
||||
else
|
||||
basename
|
||||
end
|
||||
end
|
||||
|
||||
defp extension_for_content_type("image/jpeg"), do: ".jpg"
|
||||
defp extension_for_content_type("image/png"), do: ".png"
|
||||
defp extension_for_content_type("image/webp"), do: ".webp"
|
||||
defp extension_for_content_type("image/gif"), do: ".gif"
|
||||
defp extension_for_content_type(_), do: ".jpg"
|
||||
end
|
||||
336
lib/berrypod/sync/mockup_enricher.ex
Normal file
336
lib/berrypod/sync/mockup_enricher.ex
Normal file
@@ -0,0 +1,336 @@
|
||||
defmodule Berrypod.Sync.MockupEnricher do
|
||||
@moduledoc """
|
||||
Oban worker that enriches Printful products with extra mockup angle images.
|
||||
|
||||
After product sync, Printful products only have front-view preview images.
|
||||
This worker uses the legacy mockup generator API to produce extra angles
|
||||
(back, left, right, etc.) and appends them as additional product images.
|
||||
|
||||
The hero colour gets full angle coverage (front, back, left, right).
|
||||
Other colours get a front-view mockup only (one API call each).
|
||||
|
||||
Each product is processed as a separate job so failures don't block others.
|
||||
The temporary S3 URLs from the mockup generator are downloaded via the
|
||||
existing ImageDownloadWorker pipeline.
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :images, max_attempts: 5
|
||||
|
||||
alias Berrypod.Clients.Printful, as: Client
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
alias Berrypod.Sync.ImageDownloadWorker
|
||||
|
||||
require Logger
|
||||
|
||||
@poll_interval_ms 3_000
|
||||
@max_poll_attempts 20
|
||||
@inter_color_delay_ms 5_000
|
||||
|
||||
# Mockup generator config per catalog product type:
|
||||
# {placement, area_width, area_height}
|
||||
# Apparel/accessories use "front", flat products use "default"
|
||||
@product_configs %{
|
||||
1 => {"default", 4500, 6750},
|
||||
3 => {"default", 4800, 7200},
|
||||
19 => {"default", 3150, 1350},
|
||||
71 => {"front", 4500, 5100},
|
||||
146 => {"front", 4500, 4500},
|
||||
181 => {"default", 1092, 2286},
|
||||
274 => {"default", 3300, 3300},
|
||||
394 => {"default", 3900, 2925},
|
||||
395 => {"default", 9000, 10800}
|
||||
}
|
||||
|
||||
# Stagger jobs to avoid hammering Printful's rate limits
|
||||
@job_stagger_seconds 45
|
||||
|
||||
@doc """
|
||||
Enqueue mockup enrichment for a product.
|
||||
|
||||
Accepts an optional `delay_index` to stagger jobs (each index adds
|
||||
#{@job_stagger_seconds}s of delay).
|
||||
"""
|
||||
def enqueue(conn_id, product_id, delay_index \\ 0) do
|
||||
delay = delay_index * @job_stagger_seconds
|
||||
scheduled_at = DateTime.add(DateTime.utc_now(), delay, :second)
|
||||
|
||||
%{provider_connection_id: conn_id, product_id: product_id}
|
||||
|> new(scheduled_at: scheduled_at)
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{
|
||||
args: %{"provider_connection_id" => conn_id, "product_id" => product_id}
|
||||
}) do
|
||||
with %ProviderConnection{} = conn <- Products.get_provider_connection(conn_id),
|
||||
product when not is_nil(product) <- Products.get_product(product_id),
|
||||
:ok <- set_credentials(conn) do
|
||||
enrich_product(product)
|
||||
else
|
||||
nil -> {:cancel, :not_found}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
|
||||
defp enrich_product(product) do
|
||||
provider_data = product.provider_data || %{}
|
||||
|
||||
catalog_product_id =
|
||||
provider_data["catalog_product_id"] || provider_data[:catalog_product_id]
|
||||
|
||||
artwork_url = provider_data["artwork_url"] || provider_data[:artwork_url]
|
||||
|
||||
color_variant_map =
|
||||
provider_data["color_variant_map"] || provider_data[:color_variant_map] || %{}
|
||||
|
||||
cond do
|
||||
is_nil(catalog_product_id) ->
|
||||
Logger.info("[MockupEnricher] No catalog_product_id for #{product.title}, skipping")
|
||||
:ok
|
||||
|
||||
is_nil(artwork_url) ->
|
||||
Logger.info("[MockupEnricher] No artwork_url for #{product.title}, skipping")
|
||||
:ok
|
||||
|
||||
already_enriched?(product) ->
|
||||
Logger.info("[MockupEnricher] Already enriched #{product.title}, skipping")
|
||||
:ok
|
||||
|
||||
true ->
|
||||
enrich_all_colours(product, catalog_product_id, artwork_url, color_variant_map)
|
||||
end
|
||||
end
|
||||
|
||||
defp enrich_all_colours(product, catalog_product_id, artwork_url, color_variant_map) do
|
||||
colors = Map.to_list(color_variant_map)
|
||||
|
||||
if colors == [] do
|
||||
# No colour info — fall back to single-variant enrichment
|
||||
Logger.info(
|
||||
"[MockupEnricher] No color_variant_map for #{product.title}, using first variant"
|
||||
)
|
||||
|
||||
enrich_single_colour(product, catalog_product_id, nil, nil, artwork_url, :hero)
|
||||
else
|
||||
# First colour is the hero (gets full angles), rest get front-only
|
||||
[{hero_color, hero_variant_id} | other_colors] = colors
|
||||
|
||||
case enrich_single_colour(
|
||||
product,
|
||||
catalog_product_id,
|
||||
hero_color,
|
||||
hero_variant_id,
|
||||
artwork_url,
|
||||
:hero
|
||||
) do
|
||||
{:ok, hero_count} ->
|
||||
Logger.info("[MockupEnricher] Hero colour #{hero_color}: #{hero_count} image(s)")
|
||||
|
||||
other_count =
|
||||
enrich_other_colours(product, catalog_product_id, artwork_url, other_colors)
|
||||
|
||||
total = hero_count + other_count
|
||||
Logger.info("[MockupEnricher] Total #{total} extra image(s) for #{product.title}")
|
||||
:ok
|
||||
|
||||
{:error, {429, _}} ->
|
||||
Logger.info("[MockupEnricher] Rate limited for #{product.title}, snoozing 60s")
|
||||
{:snooze, 60}
|
||||
|
||||
{:error, {400, _} = reason} ->
|
||||
Logger.info("[MockupEnricher] #{product.title} not supported: #{inspect(reason)}")
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MockupEnricher] Failed for #{product.title}: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp enrich_other_colours(product, catalog_product_id, artwork_url, colors) do
|
||||
Enum.reduce(colors, 0, fn {color_name, variant_id}, acc ->
|
||||
Process.sleep(@inter_color_delay_ms)
|
||||
|
||||
case enrich_single_colour(
|
||||
product,
|
||||
catalog_product_id,
|
||||
color_name,
|
||||
variant_id,
|
||||
artwork_url,
|
||||
:front_only
|
||||
) do
|
||||
{:ok, count} ->
|
||||
Logger.info("[MockupEnricher] Colour #{color_name}: #{count} image(s)")
|
||||
acc + count
|
||||
|
||||
{:error, {429, _}} ->
|
||||
# Rate limited on a non-hero colour — log and continue with remaining
|
||||
Logger.info(
|
||||
"[MockupEnricher] Rate limited on #{color_name}, skipping remaining colours"
|
||||
)
|
||||
|
||||
acc
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MockupEnricher] Failed for colour #{color_name}: #{inspect(reason)}")
|
||||
acc
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp enrich_single_colour(
|
||||
product,
|
||||
catalog_product_id,
|
||||
color_name,
|
||||
variant_id,
|
||||
artwork_url,
|
||||
mode
|
||||
) do
|
||||
{placement, area_width, area_height} =
|
||||
Map.get(@product_configs, catalog_product_id, {"front", 4500, 5100})
|
||||
|
||||
body = %{
|
||||
variant_ids: if(variant_id, do: [variant_id], else: []),
|
||||
format: "jpg",
|
||||
files: [
|
||||
%{
|
||||
placement: placement,
|
||||
image_url: artwork_url,
|
||||
position: %{
|
||||
area_width: area_width,
|
||||
area_height: area_height,
|
||||
width: area_width,
|
||||
height: area_height,
|
||||
top: 0,
|
||||
left: 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
with {:ok, task_data} <- Client.create_mockup_generator_task(catalog_product_id, body),
|
||||
task_key <- task_data["task_key"],
|
||||
{:ok, result} <- poll_generator_task(task_key) do
|
||||
images = extract_images(result, mode)
|
||||
|
||||
if images == [] do
|
||||
{:ok, 0}
|
||||
else
|
||||
append_images_to_product(product, images, color_name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp poll_generator_task(task_key), do: poll_generator_task(task_key, 0)
|
||||
|
||||
defp poll_generator_task(_task_key, attempt) when attempt >= @max_poll_attempts do
|
||||
{:error, :timeout}
|
||||
end
|
||||
|
||||
defp poll_generator_task(task_key, attempt) do
|
||||
Process.sleep(@poll_interval_ms)
|
||||
|
||||
case Client.get_mockup_generator_task(task_key) do
|
||||
{:ok, %{"status" => "completed"} = result} ->
|
||||
{:ok, result}
|
||||
|
||||
{:ok, %{"status" => "error", "error" => error}} ->
|
||||
{:error, {:mockup_error, error}}
|
||||
|
||||
{:ok, %{"status" => _pending}} ->
|
||||
poll_generator_task(task_key, attempt + 1)
|
||||
|
||||
{:error, {429, _}} ->
|
||||
Process.sleep(60_000)
|
||||
poll_generator_task(task_key, attempt + 1)
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# Hero mode: collect all extra angle images (front, back, left, right, etc.)
|
||||
# Front-only mode: just the main mockup URL
|
||||
defp extract_images(result, :hero) do
|
||||
(result["mockups"] || [])
|
||||
|> Enum.flat_map(fn mockup ->
|
||||
(mockup["extra"] || [])
|
||||
|> Enum.map(fn extra ->
|
||||
%{src: extra["url"], alt: extra["title"]}
|
||||
end)
|
||||
end)
|
||||
|> Enum.reject(&is_nil(&1.src))
|
||||
|> Enum.uniq_by(& &1.src)
|
||||
end
|
||||
|
||||
defp extract_images(result, :front_only) do
|
||||
(result["mockups"] || [])
|
||||
|> Enum.take(1)
|
||||
|> Enum.map(fn mockup ->
|
||||
%{src: mockup["mockup_url"], alt: "Front"}
|
||||
end)
|
||||
|> Enum.reject(&is_nil(&1.src))
|
||||
end
|
||||
|
||||
defp append_images_to_product(product, extra_images, color_name) do
|
||||
existing_images = Products.list_product_images(product.id)
|
||||
next_position = max_position(existing_images) + 1
|
||||
|
||||
results =
|
||||
extra_images
|
||||
|> Enum.with_index(next_position)
|
||||
|> Enum.map(fn {img, position} ->
|
||||
attrs = %{
|
||||
product_id: product.id,
|
||||
src: img.src,
|
||||
alt: img.alt,
|
||||
color: color_name,
|
||||
position: position
|
||||
}
|
||||
|
||||
case Products.create_product_image(attrs) do
|
||||
{:ok, product_image} ->
|
||||
ImageDownloadWorker.enqueue(product_image.id)
|
||||
{:ok, product_image}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MockupEnricher] Failed to create image: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end)
|
||||
|
||||
count = Enum.count(results, &match?({:ok, _}, &1))
|
||||
{:ok, count}
|
||||
end
|
||||
|
||||
defp max_position([]), do: -1
|
||||
|
||||
defp max_position(images) do
|
||||
images |> Enum.map(& &1.position) |> Enum.max()
|
||||
end
|
||||
|
||||
# Check if this product already has mockup-enriched images (those with a color tag)
|
||||
defp already_enriched?(product) do
|
||||
images = Products.list_product_images(product.id)
|
||||
|
||||
Enum.any?(images, fn img ->
|
||||
img.color != nil && img.alt in ["Front", "Back", "Left", "Right"]
|
||||
end)
|
||||
end
|
||||
|
||||
defp set_credentials(conn) do
|
||||
case ProviderConnection.get_api_key(conn) do
|
||||
api_key when is_binary(api_key) ->
|
||||
Process.put(:printful_api_key, api_key)
|
||||
store_id = get_in(conn.config, ["store_id"])
|
||||
if store_id, do: Process.put(:printful_store_id, store_id)
|
||||
:ok
|
||||
|
||||
nil ->
|
||||
{:error, :no_api_key}
|
||||
end
|
||||
end
|
||||
end
|
||||
261
lib/berrypod/sync/product_sync_worker.ex
Normal file
261
lib/berrypod/sync/product_sync_worker.ex
Normal file
@@ -0,0 +1,261 @@
|
||||
defmodule Berrypod.Sync.ProductSyncWorker do
|
||||
@moduledoc """
|
||||
Oban worker for syncing products from POD providers.
|
||||
|
||||
This worker fetches products from a provider, normalizes them,
|
||||
and upserts them into the local database.
|
||||
|
||||
## Usage
|
||||
|
||||
# Enqueue a sync for a provider connection
|
||||
ProductSyncWorker.enqueue(provider_connection_id)
|
||||
|
||||
## Job Args
|
||||
|
||||
* `provider_connection_id` - The ID of the provider connection to sync
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :sync, max_attempts: 3
|
||||
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Products.ProviderConnection
|
||||
alias Berrypod.Providers.Provider
|
||||
alias Berrypod.Sync.ImageDownloadWorker
|
||||
alias Berrypod.Sync.MockupEnricher
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"provider_connection_id" => conn_id}}) do
|
||||
case Products.get_provider_connection(conn_id) do
|
||||
nil ->
|
||||
{:cancel, :connection_not_found}
|
||||
|
||||
%ProviderConnection{enabled: false} ->
|
||||
{:cancel, :connection_disabled}
|
||||
|
||||
conn ->
|
||||
sync_products(conn)
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueue a product sync for a provider connection.
|
||||
"""
|
||||
def enqueue(provider_connection_id) do
|
||||
%{provider_connection_id: provider_connection_id}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Enqueue a product sync with a delay.
|
||||
"""
|
||||
def enqueue(provider_connection_id, delay_seconds) when is_integer(delay_seconds) do
|
||||
%{provider_connection_id: provider_connection_id}
|
||||
|> new(scheduled_at: DateTime.add(DateTime.utc_now(), delay_seconds, :second))
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Private
|
||||
# =============================================================================
|
||||
|
||||
# Number of concurrent product syncs (DB operations only, not API calls)
|
||||
@max_concurrency 5
|
||||
|
||||
defp sync_products(conn) do
|
||||
Logger.info("Starting product sync for #{conn.provider_type} (#{conn.id})")
|
||||
Products.update_sync_status(conn, "syncing")
|
||||
broadcast_sync(conn.id, {:sync_status, "syncing"})
|
||||
|
||||
try do
|
||||
do_sync_products(conn)
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("Product sync crashed for #{conn.provider_type}: #{Exception.message(e)}")
|
||||
Products.update_sync_status(conn, "failed")
|
||||
broadcast_sync(conn.id, {:sync_status, "failed"})
|
||||
{:error, :sync_crashed}
|
||||
end
|
||||
end
|
||||
|
||||
defp do_sync_products(conn) do
|
||||
with {:ok, provider} <- Provider.for_connection(conn),
|
||||
{:ok, products} <- provider.fetch_products(conn) do
|
||||
Logger.info("Fetched #{length(products)} products from #{conn.provider_type}")
|
||||
|
||||
results = sync_all_products(conn, products)
|
||||
|
||||
created = Enum.count(results, &match?({:ok, _, :created}, &1))
|
||||
updated = Enum.count(results, &match?({:ok, _, :updated}, &1))
|
||||
unchanged = Enum.count(results, &match?({:ok, _, :unchanged}, &1))
|
||||
errors = Enum.count(results, &match?({:error, _}, &1))
|
||||
|
||||
Logger.info(
|
||||
"Product sync complete for #{conn.provider_type}: " <>
|
||||
"#{created} created, #{updated} updated, #{unchanged} unchanged, #{errors} errors"
|
||||
)
|
||||
|
||||
# Enqueue mockup enrichment for Printful products (extra angle images)
|
||||
if conn.provider_type == "printful" do
|
||||
enqueue_mockup_enrichment(conn, results)
|
||||
end
|
||||
|
||||
# Sync shipping rates (non-fatal — logged and skipped on failure)
|
||||
sync_shipping_rates(conn, provider, products)
|
||||
|
||||
Products.update_sync_status(conn, "completed", DateTime.utc_now())
|
||||
product_count = Products.count_products_for_connection(conn.id)
|
||||
broadcast_sync(conn.id, {:sync_status, "completed", product_count})
|
||||
|
||||
# Rebuild search index after successful sync
|
||||
Berrypod.Search.rebuild_index()
|
||||
|
||||
:ok
|
||||
else
|
||||
{:error, reason} = error ->
|
||||
Logger.error("Product sync failed for #{conn.provider_type}: #{inspect(reason)}")
|
||||
Products.update_sync_status(conn, "failed")
|
||||
broadcast_sync(conn.id, {:sync_status, "failed"})
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp sync_all_products(conn, products) do
|
||||
products
|
||||
|> Task.async_stream(
|
||||
fn product_data -> sync_single_product(conn, product_data) end,
|
||||
max_concurrency: @max_concurrency,
|
||||
timeout: 30_000,
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.map(fn
|
||||
{:ok, result} -> result
|
||||
{:exit, :timeout} -> {:error, :timeout}
|
||||
{:exit, reason} -> {:error, reason}
|
||||
end)
|
||||
end
|
||||
|
||||
defp sync_single_product(conn, product_data) do
|
||||
case sync_product(conn, product_data) do
|
||||
{:ok, product, status} ->
|
||||
sync_product_associations(product, product_data)
|
||||
{:ok, product, status}
|
||||
|
||||
error ->
|
||||
Logger.warning(
|
||||
"Failed to sync product #{product_data[:provider_product_id]}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp sync_product(conn, product_data) do
|
||||
attrs = %{
|
||||
provider_product_id: product_data[:provider_product_id],
|
||||
title: product_data[:title],
|
||||
description: product_data[:description],
|
||||
category: product_data[:category],
|
||||
provider_data: product_data[:provider_data]
|
||||
}
|
||||
|
||||
Products.upsert_product(conn, attrs)
|
||||
end
|
||||
|
||||
defp broadcast_sync(conn_id, message) do
|
||||
Phoenix.PubSub.broadcast(Berrypod.PubSub, "sync:#{conn_id}", message)
|
||||
end
|
||||
|
||||
defp sync_product_associations(product, product_data) do
|
||||
# Sync images
|
||||
images =
|
||||
(product_data[:images] || [])
|
||||
|> Enum.map(fn img ->
|
||||
%{
|
||||
src: img[:src],
|
||||
position: img[:position],
|
||||
alt: img[:alt],
|
||||
color: img[:color]
|
||||
}
|
||||
end)
|
||||
|
||||
image_results = Products.sync_product_images(product, images)
|
||||
|
||||
# Enqueue downloads for images without image_id
|
||||
Enum.each(image_results, fn
|
||||
{:ok, %{image_id: nil, id: id}} -> ImageDownloadWorker.enqueue(id)
|
||||
_ -> :ok
|
||||
end)
|
||||
|
||||
# Sync variants
|
||||
variants =
|
||||
(product_data[:variants] || [])
|
||||
|> Enum.map(fn var ->
|
||||
%{
|
||||
provider_variant_id: var[:provider_variant_id],
|
||||
title: var[:title],
|
||||
sku: var[:sku],
|
||||
price: var[:price],
|
||||
cost: var[:cost],
|
||||
options: var[:options],
|
||||
is_enabled: var[:is_enabled],
|
||||
is_available: var[:is_available]
|
||||
}
|
||||
end)
|
||||
|
||||
Products.sync_product_variants(product, variants)
|
||||
|
||||
# Recompute denormalized fields (cheapest_price, in_stock, on_sale) from variants
|
||||
Products.recompute_cached_fields(product)
|
||||
end
|
||||
|
||||
defp sync_shipping_rates(conn, provider, products) do
|
||||
if function_exported?(provider, :fetch_shipping_rates, 2) do
|
||||
# Fetch live exchange rates so shipping costs are stored in GBP
|
||||
{:ok, exchange_rates} = Berrypod.ExchangeRate.fetch_and_cache()
|
||||
|
||||
case provider.fetch_shipping_rates(conn, products) do
|
||||
{:ok, rates} when rates != [] ->
|
||||
Berrypod.Shipping.upsert_rates(conn.id, rates, exchange_rates)
|
||||
|
||||
{:ok, []} ->
|
||||
Logger.info("No shipping rates returned for #{conn.provider_type}")
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"Shipping rate sync failed for #{conn.provider_type}: #{inspect(reason)}"
|
||||
)
|
||||
end
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.error(
|
||||
"Shipping rate sync crashed for #{conn.provider_type}: #{Exception.message(e)}"
|
||||
)
|
||||
end
|
||||
|
||||
# Enqueue MockupEnricher jobs for created/updated Printful products
|
||||
defp enqueue_mockup_enrichment(conn, results) do
|
||||
products_to_enrich =
|
||||
results
|
||||
|> Enum.filter(&match?({:ok, _, status} when status in [:created, :updated], &1))
|
||||
|> Enum.map(fn {:ok, product, _status} -> product end)
|
||||
|
||||
if products_to_enrich != [] do
|
||||
Logger.info(
|
||||
"Enqueueing mockup enrichment for #{length(products_to_enrich)} Printful product(s)"
|
||||
)
|
||||
|
||||
products_to_enrich
|
||||
|> Enum.with_index()
|
||||
|> Enum.each(fn {product, index} ->
|
||||
MockupEnricher.enqueue(conn.id, product.id, index)
|
||||
end)
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("Mockup enrichment enqueue failed: #{Exception.message(e)}")
|
||||
end
|
||||
end
|
||||
34
lib/berrypod/sync/scheduled_sync_worker.ex
Normal file
34
lib/berrypod/sync/scheduled_sync_worker.ex
Normal file
@@ -0,0 +1,34 @@
|
||||
defmodule Berrypod.Sync.ScheduledSyncWorker do
|
||||
@moduledoc """
|
||||
Oban cron worker for periodic product + shipping rate sync.
|
||||
|
||||
Runs every 6 hours, enqueues a ProductSyncWorker for each enabled
|
||||
provider connection. The :sync queue (concurrency 1) serialises
|
||||
these with any manual syncs triggered from the admin UI.
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :sync, max_attempts: 1
|
||||
|
||||
alias Berrypod.Products
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{}) do
|
||||
connections =
|
||||
Products.list_provider_connections()
|
||||
|> Enum.filter(& &1.enabled)
|
||||
|
||||
if Enum.empty?(connections) do
|
||||
Logger.info("Scheduled sync: no enabled provider connections, skipping")
|
||||
else
|
||||
Logger.info("Scheduled sync: enqueuing sync for #{length(connections)} connection(s)")
|
||||
|
||||
Enum.each(connections, fn conn ->
|
||||
Products.enqueue_sync(conn)
|
||||
end)
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
121
lib/berrypod/theme/css_cache.ex
Normal file
121
lib/berrypod/theme/css_cache.ex
Normal file
@@ -0,0 +1,121 @@
|
||||
defmodule Berrypod.Theme.CSSCache do
|
||||
@moduledoc """
|
||||
GenServer that maintains an ETS table for caching generated theme CSS.
|
||||
|
||||
This provides fast lookups for theme CSS without regenerating it on every request.
|
||||
The cache is invalidated when theme settings are updated.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
|
||||
@table_name :theme_css_cache
|
||||
|
||||
## Client API
|
||||
|
||||
@doc """
|
||||
Starts the CSS cache GenServer.
|
||||
"""
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets cached CSS for the site theme.
|
||||
|
||||
Returns `{:ok, css}` if found in cache, or `:miss` if not cached.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CSSCache.get()
|
||||
{:ok, "/* Theme CSS ... */"}
|
||||
|
||||
iex> CSSCache.get()
|
||||
:miss
|
||||
|
||||
"""
|
||||
def get do
|
||||
case :ets.lookup(@table_name, :site_theme) do
|
||||
[{:site_theme, css}] -> {:ok, css}
|
||||
[] -> :miss
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Caches CSS for the site theme.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CSSCache.put(css_string)
|
||||
:ok
|
||||
|
||||
"""
|
||||
def put(css) when is_binary(css) do
|
||||
:ets.insert(@table_name, {:site_theme, css})
|
||||
:ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Invalidates the cached CSS, forcing regeneration on next request.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CSSCache.invalidate()
|
||||
:ok
|
||||
|
||||
"""
|
||||
def invalidate do
|
||||
:ets.delete(@table_name, :site_theme)
|
||||
:ok
|
||||
rescue
|
||||
ArgumentError -> :ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Warms the cache by generating and storing CSS from current theme settings.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> CSSCache.warm()
|
||||
:ok
|
||||
|
||||
"""
|
||||
def warm do
|
||||
alias Berrypod.Settings
|
||||
alias Berrypod.Theme.CSSGenerator
|
||||
|
||||
settings = Settings.get_theme_settings()
|
||||
|
||||
# Use endpoint's static_path for digested URLs in production
|
||||
path_resolver = &BerrypodWeb.Endpoint.static_path/1
|
||||
|
||||
css = CSSGenerator.generate(settings, path_resolver)
|
||||
put(css)
|
||||
:ok
|
||||
end
|
||||
|
||||
## Server Callbacks
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
:ets.new(@table_name, [
|
||||
:set,
|
||||
:public,
|
||||
:named_table,
|
||||
read_concurrency: true,
|
||||
write_concurrency: false
|
||||
])
|
||||
|
||||
{:ok, %{}, {:continue, :warm}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_continue(:warm, state) do
|
||||
try do
|
||||
warm()
|
||||
rescue
|
||||
_ -> :ok
|
||||
end
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
end
|
||||
379
lib/berrypod/theme/css_generator.ex
Normal file
379
lib/berrypod/theme/css_generator.ex
Normal file
@@ -0,0 +1,379 @@
|
||||
defmodule Berrypod.Theme.CSSGenerator do
|
||||
@moduledoc """
|
||||
Generates CSS custom properties (Layer 2: Theme Tokens) from theme settings.
|
||||
|
||||
This module converts ThemeSettings into CSS variables that bridge the gap
|
||||
between fixed primitives (Layer 1) and semantic aliases (Layer 3).
|
||||
|
||||
For the shop (public pages), this generates ALL theme tokens inline, so the
|
||||
shop doesn't need the attribute-based selectors in theme-layer2-attributes.css.
|
||||
The theme editor still uses those selectors for live preview switching.
|
||||
"""
|
||||
|
||||
alias Berrypod.Settings.ThemeSettings
|
||||
alias Berrypod.Theme.Fonts
|
||||
|
||||
@doc """
|
||||
Generates CSS for theme settings.
|
||||
|
||||
Returns a string of CSS custom properties that can be injected into a <style> tag.
|
||||
This includes ALL theme tokens (mood, typography, shape, density) so the shop
|
||||
pages don't need the attribute-based CSS selectors.
|
||||
|
||||
Also includes @font-face declarations for the fonts used by the typography preset.
|
||||
|
||||
Accepts an optional path_resolver function for digested font paths.
|
||||
In production, pass `&BerrypodWeb.Endpoint.static_path/1`.
|
||||
"""
|
||||
def generate(%ThemeSettings{} = settings, path_resolver \\ fn path -> path end) do
|
||||
"""
|
||||
/* Font faces for #{settings.typography} typography */
|
||||
#{Fonts.generate_font_faces(settings.typography, path_resolver)}
|
||||
|
||||
/* Theme Tokens - Layer 2 (dynamically generated) */
|
||||
.themed {
|
||||
/* Mood colors */
|
||||
#{generate_mood(settings.mood)}
|
||||
|
||||
/* Typography */
|
||||
#{generate_typography(settings.typography)}
|
||||
|
||||
/* Shape (border radii) */
|
||||
#{generate_shape(settings.shape)}
|
||||
|
||||
/* Density */
|
||||
#{generate_density(settings.density)}
|
||||
|
||||
/* Slider-controlled values */
|
||||
#{generate_accent(settings.accent_color)}
|
||||
#{generate_secondary_colors(settings)}
|
||||
#{generate_font_size(settings.font_size)}
|
||||
#{generate_heading_weight(settings.heading_weight)}
|
||||
#{generate_layout_width(settings.layout_width)}
|
||||
#{generate_button_style(settings.button_style)}
|
||||
#{generate_product_text_align(settings.product_text_align)}
|
||||
#{generate_image_aspect_ratio(settings.image_aspect_ratio)}
|
||||
}
|
||||
"""
|
||||
|> String.trim()
|
||||
end
|
||||
|
||||
# Mood colors - surface, text, and border colors
|
||||
defp generate_mood("neutral") do
|
||||
"""
|
||||
--t-surface-base: #ffffff;
|
||||
--t-surface-raised: #ffffff;
|
||||
--t-surface-sunken: #f5f5f5;
|
||||
--t-surface-overlay: rgba(255, 255, 255, 0.95);
|
||||
--t-text-primary: #171717;
|
||||
--t-text-secondary: #525252;
|
||||
--t-text-tertiary: #737373;
|
||||
--t-text-inverse: #ffffff;
|
||||
--t-border-default: #e5e5e5;
|
||||
--t-border-subtle: #f0f0f0;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_mood("warm") do
|
||||
"""
|
||||
--t-surface-base: #fdf8f3;
|
||||
--t-surface-raised: #fffcf8;
|
||||
--t-surface-sunken: #f5ebe0;
|
||||
--t-surface-overlay: rgba(253, 248, 243, 0.95);
|
||||
--t-text-primary: #1c1917;
|
||||
--t-text-secondary: #57534e;
|
||||
--t-text-tertiary: #78716c;
|
||||
--t-text-inverse: #ffffff;
|
||||
--t-border-default: #e7e0d8;
|
||||
--t-border-subtle: #f0ebe4;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_mood("cool") do
|
||||
"""
|
||||
--t-surface-base: #f4f7fb;
|
||||
--t-surface-raised: #f8fafc;
|
||||
--t-surface-sunken: #e8eff7;
|
||||
--t-surface-overlay: rgba(244, 247, 251, 0.95);
|
||||
--t-text-primary: #0f172a;
|
||||
--t-text-secondary: #475569;
|
||||
--t-text-tertiary: #64748b;
|
||||
--t-text-inverse: #ffffff;
|
||||
--t-border-default: #d4dce8;
|
||||
--t-border-subtle: #e8eff5;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_mood("dark") do
|
||||
"""
|
||||
--t-surface-base: #0a0a0a;
|
||||
--t-surface-raised: #171717;
|
||||
--t-surface-sunken: #000000;
|
||||
--t-surface-overlay: rgba(23, 23, 23, 0.95);
|
||||
--t-text-primary: #fafafa;
|
||||
--t-text-secondary: #a3a3a3;
|
||||
--t-text-tertiary: #737373;
|
||||
--t-text-inverse: #171717;
|
||||
--t-border-default: #262626;
|
||||
--t-border-subtle: #1c1c1c;
|
||||
"""
|
||||
end
|
||||
|
||||
# Fallback for any other mood value
|
||||
defp generate_mood(_), do: generate_mood("neutral")
|
||||
|
||||
# Typography style settings (weight and tracking per preset)
|
||||
@typography_styles %{
|
||||
"clean" => %{weight: 600, tracking: "-0.02em"},
|
||||
"editorial" => %{weight: 500, tracking: "-0.01em"},
|
||||
"modern" => %{weight: 500, tracking: "-0.03em"},
|
||||
"classic" => %{weight: 500, tracking: "0"},
|
||||
"friendly" => %{weight: 600, tracking: "-0.01em"},
|
||||
"minimal" => %{weight: 500, tracking: "0"},
|
||||
"impulse" => %{weight: 300, tracking: "0.02em"}
|
||||
}
|
||||
|
||||
# Typography - font families, weights, and tracking
|
||||
# Uses Fonts module for DRY font-family declarations
|
||||
defp generate_typography(typography) do
|
||||
%{heading: heading_key, body: body_key} = Fonts.fonts_for_typography(typography)
|
||||
style = Map.get(@typography_styles, typography, @typography_styles["clean"])
|
||||
|
||||
"""
|
||||
--t-font-heading: #{Fonts.font_family(heading_key)};
|
||||
--t-font-body: #{Fonts.font_family(body_key)};
|
||||
--t-heading-weight: #{style.weight};
|
||||
--t-heading-tracking: #{style.tracking};
|
||||
"""
|
||||
end
|
||||
|
||||
# Shape - border radii
|
||||
defp generate_shape("soft") do
|
||||
"""
|
||||
--t-radius-sm: 0.25rem;
|
||||
--t-radius-md: 0.5rem;
|
||||
--t-radius-lg: 0.75rem;
|
||||
--t-radius-button: 0.5rem;
|
||||
--t-radius-card: 0.75rem;
|
||||
--t-radius-input: 0.5rem;
|
||||
--t-radius-image: 0.5rem;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_shape("sharp") do
|
||||
"""
|
||||
--t-radius-sm: 0;
|
||||
--t-radius-md: 0;
|
||||
--t-radius-lg: 0;
|
||||
--t-radius-button: 0;
|
||||
--t-radius-card: 0;
|
||||
--t-radius-input: 0;
|
||||
--t-radius-image: 0;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_shape("round") do
|
||||
"""
|
||||
--t-radius-sm: 0.5rem;
|
||||
--t-radius-md: 0.75rem;
|
||||
--t-radius-lg: 1rem;
|
||||
--t-radius-button: 0.75rem;
|
||||
--t-radius-card: 1rem;
|
||||
--t-radius-input: 0.75rem;
|
||||
--t-radius-image: 0.75rem;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_shape("pill") do
|
||||
"""
|
||||
--t-radius-sm: 9999px;
|
||||
--t-radius-md: 9999px;
|
||||
--t-radius-lg: 1rem;
|
||||
--t-radius-button: 9999px;
|
||||
--t-radius-card: 1rem;
|
||||
--t-radius-input: 9999px;
|
||||
--t-radius-image: 0.75rem;
|
||||
"""
|
||||
end
|
||||
|
||||
# Fallback for any other shape value
|
||||
defp generate_shape(_), do: generate_shape("soft")
|
||||
|
||||
# Density - spacing multiplier
|
||||
defp generate_density("balanced") do
|
||||
"""
|
||||
--t-density: 1;
|
||||
--space-xs: 0.5rem;
|
||||
--space-sm: 0.75rem;
|
||||
--space-md: 1rem;
|
||||
--space-lg: 1.5rem;
|
||||
--space-xl: 2rem;
|
||||
--space-2xl: 3rem;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_density("spacious") do
|
||||
"""
|
||||
--t-density: 1.25;
|
||||
--space-xs: 0.625rem;
|
||||
--space-sm: 0.9375rem;
|
||||
--space-md: 1.25rem;
|
||||
--space-lg: 1.875rem;
|
||||
--space-xl: 2.5rem;
|
||||
--space-2xl: 3.75rem;
|
||||
"""
|
||||
end
|
||||
|
||||
defp generate_density("compact") do
|
||||
"""
|
||||
--t-density: 0.85;
|
||||
--space-xs: 0.425rem;
|
||||
--space-sm: 0.6375rem;
|
||||
--space-md: 0.85rem;
|
||||
--space-lg: 1.275rem;
|
||||
--space-xl: 1.7rem;
|
||||
--space-2xl: 2.55rem;
|
||||
"""
|
||||
end
|
||||
|
||||
# Fallback for any other density value
|
||||
defp generate_density(_), do: generate_density("balanced")
|
||||
|
||||
# Accent color with HSL breakdown
|
||||
defp generate_accent(hex_color) do
|
||||
{h, s, l} = hex_to_hsl(hex_color)
|
||||
|
||||
"""
|
||||
--t-accent-h: #{h};
|
||||
--t-accent-s: #{s}%;
|
||||
--t-accent-l: #{l}%;
|
||||
"""
|
||||
end
|
||||
|
||||
# Secondary colors
|
||||
defp generate_secondary_colors(settings) do
|
||||
"""
|
||||
--t-secondary-accent: #{settings.secondary_accent_color};
|
||||
--t-sale-color: #{settings.sale_color};
|
||||
"""
|
||||
end
|
||||
|
||||
# Font size variations
|
||||
# Using 18px as base for better accessibility (WCAG recommends 18px+)
|
||||
# Small: 18px, Medium: 19px, Large: 20px
|
||||
defp generate_font_size("small") do
|
||||
"--t-font-size-scale: 1.125;"
|
||||
end
|
||||
|
||||
defp generate_font_size("medium") do
|
||||
"--t-font-size-scale: 1.1875;"
|
||||
end
|
||||
|
||||
defp generate_font_size("large") do
|
||||
"--t-font-size-scale: 1.25;"
|
||||
end
|
||||
|
||||
# Heading weight (override typography default)
|
||||
defp generate_heading_weight("regular") do
|
||||
"--t-heading-weight-override: 400;"
|
||||
end
|
||||
|
||||
defp generate_heading_weight("medium") do
|
||||
"--t-heading-weight-override: 500;"
|
||||
end
|
||||
|
||||
defp generate_heading_weight("bold") do
|
||||
"--t-heading-weight-override: 700;"
|
||||
end
|
||||
|
||||
# Layout width
|
||||
defp generate_layout_width("contained") do
|
||||
"--t-layout-max-width: 1100px;"
|
||||
end
|
||||
|
||||
defp generate_layout_width("wide") do
|
||||
"--t-layout-max-width: 1400px;"
|
||||
end
|
||||
|
||||
defp generate_layout_width("full") do
|
||||
"--t-layout-max-width: 100%;"
|
||||
end
|
||||
|
||||
# Button style
|
||||
defp generate_button_style("filled") do
|
||||
"--t-button-style: filled;"
|
||||
end
|
||||
|
||||
defp generate_button_style("outline") do
|
||||
"--t-button-style: outline;"
|
||||
end
|
||||
|
||||
defp generate_button_style("soft") do
|
||||
"--t-button-style: soft;"
|
||||
end
|
||||
|
||||
# Product text alignment
|
||||
defp generate_product_text_align("left") do
|
||||
"--t-product-text-align: left;"
|
||||
end
|
||||
|
||||
defp generate_product_text_align("center") do
|
||||
"--t-product-text-align: center;"
|
||||
end
|
||||
|
||||
# Image aspect ratio
|
||||
defp generate_image_aspect_ratio("square") do
|
||||
"--t-image-aspect-ratio: 1 / 1;"
|
||||
end
|
||||
|
||||
defp generate_image_aspect_ratio("portrait") do
|
||||
"--t-image-aspect-ratio: 3 / 4;"
|
||||
end
|
||||
|
||||
defp generate_image_aspect_ratio("landscape") do
|
||||
"--t-image-aspect-ratio: 4 / 3;"
|
||||
end
|
||||
|
||||
# Convert hex color to HSL
|
||||
defp hex_to_hsl("#" <> hex), do: hex_to_hsl(hex)
|
||||
|
||||
defp hex_to_hsl(hex) when byte_size(hex) == 6 do
|
||||
{r, ""} = Integer.parse(String.slice(hex, 0..1), 16)
|
||||
{g, ""} = Integer.parse(String.slice(hex, 2..3), 16)
|
||||
{b, ""} = Integer.parse(String.slice(hex, 4..5), 16)
|
||||
|
||||
# Normalize RGB values to 0-1
|
||||
r = r / 255
|
||||
g = g / 255
|
||||
b = b / 255
|
||||
|
||||
max = Enum.max([r, g, b])
|
||||
min = Enum.min([r, g, b])
|
||||
delta = max - min
|
||||
|
||||
# Calculate lightness
|
||||
l = (max + min) / 2
|
||||
|
||||
# Calculate saturation and hue
|
||||
{h, s} =
|
||||
if delta == 0 do
|
||||
{0, 0}
|
||||
else
|
||||
s = if l > 0.5, do: delta / (2 - max - min), else: delta / (max + min)
|
||||
|
||||
h =
|
||||
cond do
|
||||
max == r -> (g - b) / delta + if(g < b, do: 6, else: 0)
|
||||
max == g -> (b - r) / delta + 2
|
||||
max == b -> (r - g) / delta + 4
|
||||
end
|
||||
|
||||
{h * 60, s}
|
||||
end
|
||||
|
||||
{round(h), round(s * 100), round(l * 100)}
|
||||
end
|
||||
|
||||
# Handle invalid hex values
|
||||
defp hex_to_hsl(_), do: {0, 0, 50}
|
||||
end
|
||||
232
lib/berrypod/theme/fonts.ex
Normal file
232
lib/berrypod/theme/fonts.ex
Normal file
@@ -0,0 +1,232 @@
|
||||
defmodule Berrypod.Theme.Fonts do
|
||||
@moduledoc """
|
||||
Centralized font configuration for the theme system.
|
||||
|
||||
This module defines all available fonts and their variants, maps typography
|
||||
presets to font pairs, and generates CSS @font-face declarations.
|
||||
|
||||
Font files are expected to be in /priv/static/fonts/ and served at /fonts/.
|
||||
"""
|
||||
|
||||
# Font definitions: family name, file prefix, and available weights
|
||||
@fonts %{
|
||||
inter: %{
|
||||
family: "Inter",
|
||||
file_prefix: "inter-v20-latin",
|
||||
weights: [300, 400, 500, 600, 700],
|
||||
fallback: "system-ui, sans-serif"
|
||||
},
|
||||
manrope: %{
|
||||
family: "Manrope",
|
||||
file_prefix: "manrope-v20-latin",
|
||||
weights: [400, 500, 600, 700],
|
||||
fallback: "system-ui, sans-serif"
|
||||
},
|
||||
raleway: %{
|
||||
family: "Raleway",
|
||||
file_prefix: "raleway-v37-latin",
|
||||
weights: [300, 400, 500],
|
||||
fallback: "system-ui, sans-serif"
|
||||
},
|
||||
playfair: %{
|
||||
family: "Playfair Display",
|
||||
file_prefix: "playfair-display-v40-latin",
|
||||
weights: [400, 500, 700],
|
||||
fallback: "Georgia, serif"
|
||||
},
|
||||
space_grotesk: %{
|
||||
family: "Space Grotesk",
|
||||
file_prefix: "space-grotesk-v22-latin",
|
||||
weights: [400, 500, 600],
|
||||
fallback: "system-ui, sans-serif"
|
||||
},
|
||||
cormorant: %{
|
||||
family: "Cormorant Garamond",
|
||||
file_prefix: "cormorant-garamond-v21-latin",
|
||||
weights: [400, 500, 600],
|
||||
fallback: "Georgia, serif"
|
||||
},
|
||||
source_serif: %{
|
||||
family: "Source Serif 4",
|
||||
file_prefix: "source-serif-4-v14-latin",
|
||||
weights: [400, 600],
|
||||
fallback: "Georgia, serif"
|
||||
},
|
||||
fraunces: %{
|
||||
family: "Fraunces",
|
||||
file_prefix: "fraunces-v38-latin",
|
||||
weights: [400, 500, 600, 700],
|
||||
fallback: "Georgia, serif"
|
||||
},
|
||||
work_sans: %{
|
||||
family: "Work Sans",
|
||||
file_prefix: "work-sans-v24-latin",
|
||||
weights: [300, 400, 500, 600],
|
||||
fallback: "system-ui, sans-serif"
|
||||
},
|
||||
dm_sans: %{
|
||||
family: "DM Sans",
|
||||
file_prefix: "dm-sans-v17-latin",
|
||||
weights: [400, 500, 600, 700],
|
||||
fallback: "system-ui, sans-serif"
|
||||
}
|
||||
}
|
||||
|
||||
# Typography presets map to heading and body font keys
|
||||
@typography_fonts %{
|
||||
"clean" => %{heading: :manrope, body: :inter},
|
||||
"editorial" => %{heading: :playfair, body: :raleway},
|
||||
"modern" => %{heading: :space_grotesk, body: :inter},
|
||||
"classic" => %{heading: :cormorant, body: :source_serif},
|
||||
"friendly" => %{heading: :fraunces, body: :work_sans},
|
||||
"minimal" => %{heading: :dm_sans, body: :source_serif},
|
||||
"impulse" => %{heading: :raleway, body: :inter}
|
||||
}
|
||||
|
||||
@doc """
|
||||
Returns the font configuration for a given font key.
|
||||
"""
|
||||
def get_font(key), do: Map.get(@fonts, key)
|
||||
|
||||
@doc """
|
||||
Returns all font configurations.
|
||||
"""
|
||||
def all_fonts, do: @fonts
|
||||
|
||||
@doc """
|
||||
Returns the heading and body font keys for a typography preset.
|
||||
"""
|
||||
def fonts_for_typography(typography) do
|
||||
Map.get(@typography_fonts, typography, @typography_fonts["clean"])
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the CSS font-family declaration (with fallbacks) for a font key.
|
||||
"""
|
||||
def font_family(key) do
|
||||
case get_font(key) do
|
||||
%{family: family, fallback: fallback} -> "'#{family}', #{fallback}"
|
||||
nil -> "system-ui, sans-serif"
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates @font-face CSS for a specific typography preset.
|
||||
|
||||
Only includes the fonts needed for that preset.
|
||||
|
||||
Accepts an optional path_resolver function to transform font URLs.
|
||||
In production, pass `&BerrypodWeb.Endpoint.static_path/1` for digested paths.
|
||||
"""
|
||||
def generate_font_faces(typography, path_resolver \\ &default_path_resolver/1) do
|
||||
%{heading: heading_key, body: body_key} = fonts_for_typography(typography)
|
||||
|
||||
font_keys =
|
||||
if heading_key == body_key do
|
||||
[heading_key]
|
||||
else
|
||||
[heading_key, body_key]
|
||||
end
|
||||
|
||||
Enum.map_join(font_keys, "\n", &generate_font_face_for_font(&1, path_resolver))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates @font-face CSS for ALL fonts.
|
||||
|
||||
Used in the theme editor where users can switch between typography presets.
|
||||
|
||||
Accepts an optional path_resolver function for digested paths.
|
||||
"""
|
||||
def generate_all_font_faces(path_resolver \\ &default_path_resolver/1) do
|
||||
@fonts
|
||||
|> Map.keys()
|
||||
|> Enum.map_join("\n", &generate_font_face_for_font(&1, path_resolver))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the font file paths (without /fonts prefix) for a typography preset.
|
||||
|
||||
Used by templates to generate preload links with digested paths via ~p sigil.
|
||||
"""
|
||||
def preload_font_paths(typography) do
|
||||
%{heading: heading_key, body: body_key} = fonts_for_typography(typography)
|
||||
|
||||
# Preload the most commonly used weights
|
||||
preload_weights = %{
|
||||
# For headings, preload the typical heading weight (500-600)
|
||||
heading: [500, 600],
|
||||
# For body, preload regular and semibold
|
||||
body: [400, 600]
|
||||
}
|
||||
|
||||
heading_paths = font_paths_for_weights(heading_key, preload_weights.heading)
|
||||
body_paths = font_paths_for_weights(body_key, preload_weights.body)
|
||||
|
||||
# Deduplicate in case heading and body use the same font
|
||||
(heading_paths ++ body_paths) |> Enum.uniq()
|
||||
end
|
||||
|
||||
defp font_paths_for_weights(key, weights) do
|
||||
case get_font(key) do
|
||||
%{file_prefix: prefix, weights: available_weights} ->
|
||||
weights
|
||||
|> Enum.filter(&(&1 in available_weights))
|
||||
|> Enum.map(fn weight ->
|
||||
weight_suffix = if weight == 400, do: "regular", else: to_string(weight)
|
||||
"#{prefix}-#{weight_suffix}.woff2"
|
||||
end)
|
||||
|
||||
nil ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates preload link tags for a specific typography preset.
|
||||
|
||||
Returns a list of maps with href, as, type, and crossorigin attributes.
|
||||
|
||||
Accepts an optional path_resolver function for digested paths.
|
||||
In production, pass `&BerrypodWeb.Endpoint.static_path/1`.
|
||||
"""
|
||||
def preload_links(typography, path_resolver \\ &default_path_resolver/1) do
|
||||
typography
|
||||
|> preload_font_paths()
|
||||
|> Enum.map(fn filename ->
|
||||
%{
|
||||
href: path_resolver.("/fonts/#{filename}"),
|
||||
as: "font",
|
||||
type: "font/woff2",
|
||||
crossorigin: true
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp default_path_resolver(path), do: path
|
||||
|
||||
defp generate_font_face_for_font(key, path_resolver) do
|
||||
case get_font(key) do
|
||||
%{family: family, file_prefix: prefix, weights: weights} ->
|
||||
Enum.map_join(weights, "", fn weight ->
|
||||
weight_suffix = if weight == 400, do: "regular", else: to_string(weight)
|
||||
font_path = path_resolver.("/fonts/#{prefix}-#{weight_suffix}.woff2")
|
||||
|
||||
"""
|
||||
@font-face {
|
||||
font-family: '#{family}';
|
||||
font-style: normal;
|
||||
font-weight: #{weight};
|
||||
font-display: swap;
|
||||
src: url('#{font_path}') format('woff2');
|
||||
}
|
||||
"""
|
||||
end)
|
||||
|
||||
nil ->
|
||||
""
|
||||
end
|
||||
end
|
||||
end
|
||||
226
lib/berrypod/theme/presets.ex
Normal file
226
lib/berrypod/theme/presets.ex
Normal file
@@ -0,0 +1,226 @@
|
||||
defmodule Berrypod.Theme.Presets do
|
||||
@moduledoc """
|
||||
Defines the 8 curated theme presets for Berrypod.
|
||||
"""
|
||||
|
||||
@presets %{
|
||||
gallery: %{
|
||||
mood: "warm",
|
||||
typography: "editorial",
|
||||
shape: "soft",
|
||||
density: "spacious",
|
||||
grid_columns: "3",
|
||||
header_layout: "centered",
|
||||
accent_color: "#e85d04",
|
||||
layout_width: "wide",
|
||||
card_shadow: "sm",
|
||||
announcement_bar: true
|
||||
},
|
||||
studio: %{
|
||||
mood: "neutral",
|
||||
typography: "clean",
|
||||
shape: "soft",
|
||||
density: "balanced",
|
||||
grid_columns: "4",
|
||||
header_layout: "standard",
|
||||
accent_color: "#2563eb",
|
||||
layout_width: "wide",
|
||||
card_shadow: "sm",
|
||||
announcement_bar: true
|
||||
},
|
||||
boutique: %{
|
||||
mood: "warm",
|
||||
typography: "classic",
|
||||
shape: "soft",
|
||||
density: "balanced",
|
||||
grid_columns: "3",
|
||||
header_layout: "left",
|
||||
accent_color: "#b45309",
|
||||
layout_width: "contained",
|
||||
card_shadow: "md",
|
||||
announcement_bar: true
|
||||
},
|
||||
bold: %{
|
||||
mood: "neutral",
|
||||
typography: "modern",
|
||||
shape: "sharp",
|
||||
density: "compact",
|
||||
grid_columns: "4",
|
||||
header_layout: "standard",
|
||||
accent_color: "#dc2626",
|
||||
layout_width: "full",
|
||||
card_shadow: "none",
|
||||
announcement_bar: true
|
||||
},
|
||||
playful: %{
|
||||
mood: "neutral",
|
||||
typography: "friendly",
|
||||
shape: "pill",
|
||||
density: "balanced",
|
||||
grid_columns: "4",
|
||||
header_layout: "standard",
|
||||
accent_color: "#8b5cf6",
|
||||
layout_width: "wide",
|
||||
card_shadow: "md",
|
||||
announcement_bar: true
|
||||
},
|
||||
minimal: %{
|
||||
mood: "neutral",
|
||||
typography: "impulse",
|
||||
shape: "sharp",
|
||||
density: "spacious",
|
||||
grid_columns: "2",
|
||||
header_layout: "standard",
|
||||
accent_color: "#171717",
|
||||
layout_width: "full",
|
||||
card_shadow: "none",
|
||||
announcement_bar: false
|
||||
},
|
||||
night: %{
|
||||
mood: "dark",
|
||||
typography: "modern",
|
||||
shape: "soft",
|
||||
density: "balanced",
|
||||
grid_columns: "4",
|
||||
header_layout: "standard",
|
||||
accent_color: "#f97316",
|
||||
layout_width: "wide",
|
||||
card_shadow: "lg",
|
||||
announcement_bar: true
|
||||
},
|
||||
classic: %{
|
||||
mood: "warm",
|
||||
typography: "classic",
|
||||
shape: "soft",
|
||||
density: "spacious",
|
||||
grid_columns: "3",
|
||||
header_layout: "standard",
|
||||
accent_color: "#166534",
|
||||
layout_width: "contained",
|
||||
card_shadow: "sm",
|
||||
announcement_bar: true
|
||||
}
|
||||
}
|
||||
|
||||
@descriptions %{
|
||||
gallery: "Editorial serif headlines",
|
||||
studio: "Clean modern sans-serif",
|
||||
boutique: "Elegant classic serif",
|
||||
bold: "Tech-forward geometric",
|
||||
playful: "Quirky variable font",
|
||||
minimal: "Light refined pairing",
|
||||
night: "Dark tech aesthetic",
|
||||
classic: "Traditional luxury serif"
|
||||
}
|
||||
|
||||
# Core keys used to match presets (excludes branding-specific settings)
|
||||
@core_keys ~w(mood typography shape density grid_columns header_layout accent_color layout_width card_shadow announcement_bar)a
|
||||
|
||||
@doc """
|
||||
Returns all available presets.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> all()
|
||||
%{gallery: %{...}, studio: %{...}, ...}
|
||||
|
||||
"""
|
||||
def all, do: @presets
|
||||
|
||||
@doc """
|
||||
Gets a preset by name.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get(:gallery)
|
||||
%{mood: "warm", typography: "editorial", ...}
|
||||
|
||||
iex> get(:nonexistent)
|
||||
nil
|
||||
|
||||
"""
|
||||
def get(preset_name) when is_atom(preset_name) do
|
||||
Map.get(@presets, preset_name)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists all preset names.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> list_names()
|
||||
[:gallery, :studio, :boutique, :bold, :playful, :minimal, :night, :classic]
|
||||
|
||||
"""
|
||||
def list_names do
|
||||
Map.keys(@presets)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets the description for a preset.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> get_description(:gallery)
|
||||
"Elegant & editorial"
|
||||
|
||||
"""
|
||||
def get_description(preset_name) when is_atom(preset_name) do
|
||||
Map.get(@descriptions, preset_name, "")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns all presets with their descriptions.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> all_with_descriptions()
|
||||
[{:bold, "High contrast, strong"}, ...]
|
||||
|
||||
"""
|
||||
def all_with_descriptions do
|
||||
@presets
|
||||
|> Map.keys()
|
||||
|> Enum.sort()
|
||||
|> Enum.map(fn name -> {name, Map.get(@descriptions, name, "")} end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Detects which preset matches the current theme settings, if any.
|
||||
Only compares core theme keys, ignoring branding-specific settings.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> detect_preset(%ThemeSettings{mood: "warm", typography: "editorial", ...})
|
||||
:gallery
|
||||
|
||||
iex> detect_preset(%ThemeSettings{...customized...})
|
||||
nil
|
||||
|
||||
"""
|
||||
def detect_preset(theme_settings) do
|
||||
current_core = extract_core_values(theme_settings)
|
||||
|
||||
Enum.find_value(@presets, fn {name, preset} ->
|
||||
preset_core = Map.take(preset, @core_keys)
|
||||
|
||||
if maps_match?(current_core, preset_core) do
|
||||
name
|
||||
else
|
||||
nil
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp extract_core_values(theme_settings) do
|
||||
theme_settings
|
||||
|> Map.from_struct()
|
||||
|> Map.take(@core_keys)
|
||||
end
|
||||
|
||||
defp maps_match?(map1, map2) do
|
||||
Enum.all?(@core_keys, fn key ->
|
||||
Map.get(map1, key) == Map.get(map2, key)
|
||||
end)
|
||||
end
|
||||
end
|
||||
1231
lib/berrypod/theme/preview_data.ex
Normal file
1231
lib/berrypod/theme/preview_data.ex
Normal file
File diff suppressed because it is too large
Load Diff
101
lib/berrypod/vault.ex
Normal file
101
lib/berrypod/vault.ex
Normal file
@@ -0,0 +1,101 @@
|
||||
defmodule Berrypod.Vault do
|
||||
@moduledoc """
|
||||
Handles encryption and decryption of sensitive data.
|
||||
|
||||
Uses AES-256-GCM for authenticated encryption.
|
||||
Keys are derived from the application's secret_key_base.
|
||||
"""
|
||||
|
||||
@aad "Berrypod.Vault"
|
||||
|
||||
@doc """
|
||||
Encrypts a string value.
|
||||
|
||||
Returns `{:ok, encrypted_binary}` or `{:error, reason}`.
|
||||
The encrypted binary includes the IV and auth tag.
|
||||
"""
|
||||
@spec encrypt(String.t()) :: {:ok, binary()} | {:error, term()}
|
||||
def encrypt(plaintext) when is_binary(plaintext) do
|
||||
key = derive_key()
|
||||
iv = :crypto.strong_rand_bytes(12)
|
||||
|
||||
{ciphertext, tag} =
|
||||
:crypto.crypto_one_time_aead(:aes_256_gcm, key, iv, plaintext, @aad, true)
|
||||
|
||||
# Format: iv (12 bytes) + tag (16 bytes) + ciphertext
|
||||
{:ok, iv <> tag <> ciphertext}
|
||||
rescue
|
||||
e -> {:error, e}
|
||||
end
|
||||
|
||||
def encrypt(nil), do: {:ok, nil}
|
||||
|
||||
@doc """
|
||||
Decrypts an encrypted binary.
|
||||
|
||||
Returns `{:ok, plaintext}` or `{:error, reason}`.
|
||||
"""
|
||||
@spec decrypt(binary()) :: {:ok, String.t()} | {:error, term()}
|
||||
def decrypt(<<iv::binary-12, tag::binary-16, ciphertext::binary>>) do
|
||||
key = derive_key()
|
||||
|
||||
case :crypto.crypto_one_time_aead(:aes_256_gcm, key, iv, ciphertext, @aad, tag, false) do
|
||||
plaintext when is_binary(plaintext) ->
|
||||
{:ok, plaintext}
|
||||
|
||||
:error ->
|
||||
{:error, :decryption_failed}
|
||||
end
|
||||
rescue
|
||||
e -> {:error, e}
|
||||
end
|
||||
|
||||
def decrypt(nil), do: {:ok, nil}
|
||||
def decrypt(""), do: {:ok, ""}
|
||||
|
||||
def decrypt(_invalid) do
|
||||
{:error, :invalid_ciphertext}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Encrypts a string value, raising on error.
|
||||
"""
|
||||
@spec encrypt!(String.t()) :: binary()
|
||||
def encrypt!(plaintext) do
|
||||
case encrypt(plaintext) do
|
||||
{:ok, ciphertext} -> ciphertext
|
||||
{:error, reason} -> raise "Encryption failed: #{inspect(reason)}"
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Decrypts an encrypted binary, raising on error.
|
||||
"""
|
||||
@spec decrypt!(binary()) :: String.t()
|
||||
def decrypt!(ciphertext) do
|
||||
case decrypt(ciphertext) do
|
||||
{:ok, plaintext} -> plaintext
|
||||
{:error, reason} -> raise "Decryption failed: #{inspect(reason)}"
|
||||
end
|
||||
end
|
||||
|
||||
# Derives a 32-byte key from the secret_key_base
|
||||
defp derive_key do
|
||||
secret_key_base = get_secret_key_base()
|
||||
|
||||
:crypto.hash(:sha256, secret_key_base <> "vault_encryption_key")
|
||||
end
|
||||
|
||||
defp get_secret_key_base do
|
||||
case Application.get_env(:berrypod, BerrypodWeb.Endpoint)[:secret_key_base] do
|
||||
nil ->
|
||||
raise """
|
||||
Secret key base is not configured.
|
||||
Set it in config/runtime.exs or config/dev.exs.
|
||||
"""
|
||||
|
||||
key when is_binary(key) ->
|
||||
key
|
||||
end
|
||||
end
|
||||
end
|
||||
242
lib/berrypod/webhooks.ex
Normal file
242
lib/berrypod/webhooks.ex
Normal file
@@ -0,0 +1,242 @@
|
||||
defmodule Berrypod.Webhooks do
|
||||
@moduledoc """
|
||||
Handles incoming webhook events from POD providers.
|
||||
"""
|
||||
|
||||
alias Berrypod.Orders
|
||||
alias Berrypod.Orders.OrderNotifier
|
||||
alias Berrypod.Products
|
||||
alias Berrypod.Sync.ProductSyncWorker
|
||||
alias Berrypod.Webhooks.ProductDeleteWorker
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Handles a Printify webhook event.
|
||||
|
||||
Returns :ok or {:ok, job} on success, {:error, reason} on failure.
|
||||
"""
|
||||
|
||||
# --- Product events ---
|
||||
|
||||
def handle_printify_event("product:updated", %{"id" => _product_id}) do
|
||||
enqueue_product_sync()
|
||||
end
|
||||
|
||||
def handle_printify_event("product:publish:started", %{"id" => _product_id}) do
|
||||
enqueue_product_sync()
|
||||
end
|
||||
|
||||
def handle_printify_event("product:deleted", %{"id" => product_id}) do
|
||||
ProductDeleteWorker.enqueue(product_id)
|
||||
end
|
||||
|
||||
# --- Order events ---
|
||||
|
||||
def handle_printify_event("order:sent-to-production", resource) do
|
||||
with {:ok, order} <- find_order_from_resource(resource) do
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "processing",
|
||||
provider_status: "in-production"
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
def handle_printify_event("order:shipment:created", resource) do
|
||||
shipment = extract_shipment(resource)
|
||||
|
||||
with {:ok, order} <- find_order_from_resource(resource),
|
||||
{:ok, updated_order} <-
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "shipped",
|
||||
provider_status: "shipped",
|
||||
tracking_number: shipment.tracking_number,
|
||||
tracking_url: shipment.tracking_url,
|
||||
shipped_at: DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
}) do
|
||||
OrderNotifier.deliver_shipping_notification(updated_order)
|
||||
{:ok, updated_order}
|
||||
end
|
||||
end
|
||||
|
||||
def handle_printify_event("order:shipment:delivered", resource) do
|
||||
with {:ok, order} <- find_order_from_resource(resource) do
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "delivered",
|
||||
provider_status: "delivered",
|
||||
delivered_at: DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
# --- Catch-all ---
|
||||
|
||||
def handle_printify_event("shop:disconnected", _resource) do
|
||||
Logger.warning("Printify shop disconnected - manual intervention needed")
|
||||
:ok
|
||||
end
|
||||
|
||||
def handle_printify_event(event_type, _resource) do
|
||||
Logger.info("Ignoring unhandled Printify event: #{event_type}")
|
||||
:ok
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Printful events
|
||||
# =============================================================================
|
||||
|
||||
@doc """
|
||||
Handles a Printful webhook event.
|
||||
|
||||
Returns :ok or {:ok, result} on success, {:error, reason} on failure.
|
||||
"""
|
||||
|
||||
# --- Order events ---
|
||||
|
||||
def handle_printful_event("package_shipped", data) do
|
||||
with {:ok, order} <- find_printful_order(data) do
|
||||
shipment = extract_printful_shipment(data)
|
||||
|
||||
{:ok, updated} =
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "shipped",
|
||||
provider_status: "shipped",
|
||||
tracking_number: shipment.tracking_number,
|
||||
tracking_url: shipment.tracking_url,
|
||||
shipped_at: DateTime.utc_now() |> DateTime.truncate(:second)
|
||||
})
|
||||
|
||||
OrderNotifier.deliver_shipping_notification(updated)
|
||||
{:ok, updated}
|
||||
end
|
||||
end
|
||||
|
||||
def handle_printful_event("order_failed", data) do
|
||||
with {:ok, order} <- find_printful_order(data) do
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "failed",
|
||||
provider_status: "failed",
|
||||
fulfilment_error: data["reason"] || "Order failed at Printful"
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
def handle_printful_event("order_canceled", data) do
|
||||
with {:ok, order} <- find_printful_order(data) do
|
||||
Orders.update_fulfilment(order, %{
|
||||
fulfilment_status: "cancelled",
|
||||
provider_status: "canceled"
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
# --- Product events ---
|
||||
|
||||
def handle_printful_event("product_updated", _data) do
|
||||
enqueue_printful_sync()
|
||||
end
|
||||
|
||||
def handle_printful_event("product_synced", _data) do
|
||||
enqueue_printful_sync()
|
||||
end
|
||||
|
||||
def handle_printful_event("product_deleted", %{"sync_product" => %{"id" => product_id}}) do
|
||||
ProductDeleteWorker.enqueue(to_string(product_id))
|
||||
end
|
||||
|
||||
def handle_printful_event("product_deleted", _data) do
|
||||
enqueue_printful_sync()
|
||||
end
|
||||
|
||||
# --- Catch-all ---
|
||||
|
||||
def handle_printful_event(event_type, _data) do
|
||||
Logger.info("Ignoring unhandled Printful event: #{event_type}")
|
||||
:ok
|
||||
end
|
||||
|
||||
# =============================================================================
|
||||
# Private helpers
|
||||
# =============================================================================
|
||||
|
||||
# --- Printify helpers ---
|
||||
|
||||
defp enqueue_product_sync do
|
||||
case Products.get_provider_connection_by_type("printify") do
|
||||
nil -> {:error, :no_connection}
|
||||
conn -> ProductSyncWorker.enqueue(conn.id)
|
||||
end
|
||||
end
|
||||
|
||||
defp find_order_from_resource(%{"external_id" => external_id}) when is_binary(external_id) do
|
||||
case Orders.get_order_by_number(external_id) do
|
||||
nil ->
|
||||
Logger.warning("Order webhook: no order found for external_id=#{external_id}")
|
||||
{:error, :order_not_found}
|
||||
|
||||
order ->
|
||||
{:ok, order}
|
||||
end
|
||||
end
|
||||
|
||||
defp find_order_from_resource(resource) do
|
||||
Logger.warning("Order webhook: missing external_id in resource #{inspect(resource)}")
|
||||
{:error, :missing_external_id}
|
||||
end
|
||||
|
||||
defp extract_shipment(resource) do
|
||||
shipments = resource["shipments"] || []
|
||||
shipment = List.last(shipments) || %{}
|
||||
|
||||
%{
|
||||
tracking_number: shipment["tracking_number"],
|
||||
tracking_url: shipment["tracking_url"]
|
||||
}
|
||||
end
|
||||
|
||||
# --- Printful helpers ---
|
||||
|
||||
defp enqueue_printful_sync do
|
||||
case Products.get_provider_connection_by_type("printful") do
|
||||
nil -> {:error, :no_connection}
|
||||
conn -> ProductSyncWorker.enqueue(conn.id)
|
||||
end
|
||||
end
|
||||
|
||||
# Printful order webhooks include external_id in the order data
|
||||
defp find_printful_order(%{"order" => %{"external_id" => ext_id}})
|
||||
when is_binary(ext_id) and ext_id != "" do
|
||||
find_order_by_external_id(ext_id)
|
||||
end
|
||||
|
||||
# Fallback: look for external_id at top level
|
||||
defp find_printful_order(%{"external_id" => ext_id})
|
||||
when is_binary(ext_id) and ext_id != "" do
|
||||
find_order_by_external_id(ext_id)
|
||||
end
|
||||
|
||||
defp find_printful_order(data) do
|
||||
Logger.warning("Printful order webhook: can't find external_id in #{inspect(data)}")
|
||||
{:error, :missing_external_id}
|
||||
end
|
||||
|
||||
defp find_order_by_external_id(external_id) do
|
||||
case Orders.get_order_by_number(external_id) do
|
||||
nil ->
|
||||
Logger.warning("Order webhook: no order found for external_id=#{external_id}")
|
||||
{:error, :order_not_found}
|
||||
|
||||
order ->
|
||||
{:ok, order}
|
||||
end
|
||||
end
|
||||
|
||||
defp extract_printful_shipment(data) do
|
||||
shipment = data["shipment"] || %{}
|
||||
|
||||
%{
|
||||
tracking_number: shipment["tracking_number"],
|
||||
tracking_url: shipment["tracking_url"]
|
||||
}
|
||||
end
|
||||
end
|
||||
41
lib/berrypod/webhooks/product_delete_worker.ex
Normal file
41
lib/berrypod/webhooks/product_delete_worker.ex
Normal file
@@ -0,0 +1,41 @@
|
||||
defmodule Berrypod.Webhooks.ProductDeleteWorker do
|
||||
@moduledoc """
|
||||
Oban worker for deleting products removed from POD providers.
|
||||
"""
|
||||
|
||||
use Oban.Worker, queue: :sync, max_attempts: 3
|
||||
|
||||
alias Berrypod.Products
|
||||
|
||||
require Logger
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Oban.Job{args: %{"provider_product_id" => provider_product_id}}) do
|
||||
case Products.get_provider_connection_by_type("printify") do
|
||||
nil ->
|
||||
Logger.warning("No Printify connection found for product deletion")
|
||||
{:cancel, :no_connection}
|
||||
|
||||
conn ->
|
||||
case Products.get_product_by_provider(conn.id, provider_product_id) do
|
||||
nil ->
|
||||
Logger.info("Product #{provider_product_id} already deleted or not found")
|
||||
:ok
|
||||
|
||||
product ->
|
||||
Logger.info("Deleting product #{product.id} (provider: #{provider_product_id})")
|
||||
|
||||
case Products.delete_product(product) do
|
||||
{:ok, _} -> :ok
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue(provider_product_id) do
|
||||
%{provider_product_id: to_string(provider_product_id)}
|
||||
|> new()
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user