IO.puts(warning)
end
-exported_config = Path.join([Path.dirname(config_path), "prod.exported_from_db.secret.exs"])
+exported_config =
+ config_path
+ |> Path.dirname()
+ |> Path.join("prod.exported_from_db.secret.exs")
if File.exists?(exported_config) do
import_config exported_config
## Transfer config from DB to `config/env.exported_from_db.secret.exs`
+To delete transfered settings from database optional flag `-d` can be used.
```sh tab="OTP"
- ./bin/pleroma_ctl config migrate_from_db <env>
+ ./bin/pleroma_ctl config migrate_from_db --env=<env> [-d]
```
```sh tab="From Source"
-mix pleroma.config migrate_from_db <env>
+mix pleroma.config migrate_from_db --env=<env> [-d]
```
-
end
def run(["migrate_from_db" | options]) do
- # TODO: add support for releases
start_pleroma()
{opts, _} =
OptionParser.parse!(options,
- strict: [env: :string, delete_from_db: :boolean],
- aliases: [d: :delete_from_db]
+ strict: [env: :string, delete: :boolean],
+ aliases: [d: :delete]
)
- with {:active?, true} <-
- {:active?, Pleroma.Config.get([:configurable_from_database])},
- env when is_binary(env) <- opts[:env] || "prod",
- config_path <- config_path(env),
- {:ok, file} <- File.open(config_path, [:write, :utf8]) do
- IO.write(file, config_header())
-
- ConfigDB
- |> Repo.all()
- |> Enum.each(&write_to_file_with_deletion(&1, file, opts[:delete_from_db]))
-
- File.close(file)
- System.cmd("mix", ["format", config_path])
- else
- {:active?, false} ->
- shell_info(
- "Migration is not allowed by config. You can change this behavior in instance settings."
- )
-
- error ->
- shell_info("Error occuried while opening file. #{inspect(error)}")
- end
- end
-
- defp config_path(env) do
- path =
- if Pleroma.Config.get(:release) do
- :config_path
- |> Pleroma.Config.get()
- |> Path.dirname()
- else
- "config"
- end
-
- Path.join(path, "#{env}.exported_from_db.secret.exs")
+ migrate_from_db(opts)
end
@spec migrate_to_db(Path.t() | nil) :: any()
def migrate_to_db(file_path \\ nil) do
if Pleroma.Config.get([:configurable_from_database]) do
- user_config_file =
- if Pleroma.Config.get(:release),
- do: Pleroma.Config.get(:config_path),
- else: "config/#{Pleroma.Config.get(:env)}.secret.exs"
+ config_file =
+ if file_path do
+ file_path
+ else
+ if Pleroma.Config.get(:release) do
+ Pleroma.Config.get(:config_path)
+ else
+ "config/#{Pleroma.Config.get(:env)}.secret.exs"
+ end
+ end
- config_file = file_path || user_config_file
do_migrate_to_db(config_file)
else
- shell_info(
- "Migration is not allowed by config. You can change this behavior in instance settings."
- )
+ migration_error()
end
end
- if Code.ensure_loaded?(Config.Reader) do
- defp config_header, do: "import Config\r\n\r\n"
- defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
- else
- defp config_header, do: "use Mix.Config\r\n\r\n"
- defp read_file(config_file), do: Mix.Config.eval!(config_file)
- end
-
defp do_migrate_to_db(config_file) do
if File.exists?(config_file) do
- {custom_config, _paths} = read_file(config_file)
+ custom_config =
+ config_file
+ |> read_file()
+ |> elem(0)
custom_config
|> Keyword.keys()
- |> Enum.each(&create(&1, custom_config[&1]))
+ |> Enum.each(&create(&1, custom_config))
else
shell_info("To migrate settings, you must define custom settings in #{config_file}.")
end
end
defp create(group, settings) do
- Enum.reject(settings, fn {k, _v} ->
- k in [Pleroma.Repo, Pleroma.Web.Endpoint, :env, :configurable_from_database] or
- (group == :phoenix and k == :serve_endpoints)
- end)
+ group
+ |> Pleroma.Config.Loader.filter_group(settings)
|> Enum.each(fn {key, value} ->
key = inspect(key)
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
shell_info("Settings for group :#{group} migrated.")
end
- defp write_to_file_with_deletion(config, file, with_deletion) do
- IO.write(
- file,
- "config #{config.group}, #{config.key}, #{
- inspect(ConfigDB.from_binary(config.value), limit: :infinity)
- }\r\n\r\n"
- )
+ defp migrate_from_db(opts) do
+ if Pleroma.Config.get([:configurable_from_database]) do
+ env = opts[:env] || "prod"
+
+ config_path =
+ if Pleroma.Config.get(:release) do
+ :config_path
+ |> Pleroma.Config.get()
+ |> Path.dirname()
+ else
+ "config"
+ end
+ |> Path.join("#{env}.exported_from_db.secret.exs")
+
+ file = File.open!(config_path, [:write, :utf8])
+
+ IO.write(file, config_header())
+
+ ConfigDB
+ |> Repo.all()
+ |> Enum.each(&write_and_delete(&1, file, opts[:delete]))
- if with_deletion do
- {:ok, _} = Repo.delete(config)
- shell_info("#{config.key} deleted from DB.")
+ :ok = File.close(file)
+ System.cmd("mix", ["format", config_path])
+ else
+ migration_error()
end
end
+
+ defp migration_error do
+ shell_error(
+ "Migration is not allowed in config. You can change this behavior by setting `configurable_from_database` to true."
+ )
+ end
+
+ if Code.ensure_loaded?(Config.Reader) do
+ defp config_header, do: "import Config\r\n\r\n"
+ defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
+ else
+ defp config_header, do: "use Mix.Config\r\n\r\n"
+ defp read_file(config_file), do: Mix.Config.eval!(config_file)
+ end
+
+ defp write_and_delete(config, file, delete?) do
+ config
+ |> write(file)
+ |> delete(delete?)
+ end
+
+ defp write(config, file) do
+ value =
+ config.value
+ |> ConfigDB.from_binary()
+ |> inspect(limit: :infinity)
+
+ IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n")
+
+ config
+ end
+
+ defp delete(config, true) do
+ {:ok, _} = Repo.delete(config)
+ shell_info("#{config.key} deleted from DB.")
+ end
+
+ defp delete(_config, _), do: :ok
end
defmodule Pleroma.ConfigDB do
use Ecto.Schema
+
import Ecto.Changeset
import Ecto.Query
import Pleroma.Web.Gettext
+
alias __MODULE__
alias Pleroma.Repo
@type t :: %__MODULE__{}
+ @full_key_update [
+ {:pleroma, :ecto_repos},
+ {:quack, :meta},
+ {:mime, :types},
+ {:cors_plug, [:max_age, :methods, :expose, :headers]},
+ {:auto_linker, :opts},
+ {:swarm, :node_blacklist},
+ {:logger, :backends}
+ ]
+
+ @full_subkey_update [
+ {:pleroma, :assets, :mascots},
+ {:pleroma, :emoji, :groups},
+ {:pleroma, :workers, :retries},
+ {:pleroma, :mrf_subchain, :match_actor},
+ {:pleroma, :mrf_keyword, :replace}
+ ]
+
+ @regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
+
+ @delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
+
schema "config" do
field(:key, :string)
field(:group, :string)
end
end
- @full_subkey_update [
- {:pleroma, :assets, :mascots},
- {:pleroma, :emoji, :groups},
- {:pleroma, :workers, :retries},
- {:pleroma, :mrf_subchain, :match_actor},
- {:pleroma, :mrf_keyword, :replace}
- ]
-
- @spec deep_merge(atom(), atom(), keyword(), keyword()) :: keyword()
- def deep_merge(group, key, old_value, new_value) do
- old_keys =
- old_value
- |> Keyword.keys()
- |> MapSet.new()
+ @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
+ def merge_group(group, key, old_value, new_value) do
+ new_keys = to_map_set(new_value)
- new_keys =
- new_value
- |> Keyword.keys()
- |> MapSet.new()
-
- intersect_keys = old_keys |> MapSet.intersection(new_keys) |> MapSet.to_list()
-
- subkeys = sub_key_full_update(group, key, intersect_keys)
+ intersect_keys =
+ old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
merged_value = ConfigDB.merge(old_value, new_value)
- Enum.reduce(subkeys, merged_value, fn subkey, acc ->
+ @full_subkey_update
+ |> Enum.map(fn
+ {g, k, subkey} when g == group and k == key ->
+ if subkey in intersect_keys, do: subkey, else: []
+
+ _ ->
+ []
+ end)
+ |> List.flatten()
+ |> Enum.reduce(merged_value, fn subkey, acc ->
Keyword.put(acc, subkey, new_value[subkey])
end)
end
+ defp to_map_set(keyword) do
+ keyword
+ |> Keyword.keys()
+ |> MapSet.new()
+ end
+
@spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean()
def sub_key_full_update?(group, key, subkeys) do
Enum.any?(@full_subkey_update, fn {g, k, subkey} ->
end)
end
- defp sub_key_full_update(group, key, subkeys) do
- Enum.map(@full_subkey_update, fn
- {g, k, subkey} when g == group and k == key ->
- if subkey in subkeys, do: subkey, else: []
-
- _ ->
- []
- end)
- |> List.flatten()
- end
-
+ @spec merge(keyword(), keyword()) :: keyword()
def merge(config1, config2) when is_list(config1) and is_list(config2) do
Keyword.merge(config1, config2, fn _, app1, app2 ->
if Keyword.keyword?(app1) and Keyword.keyword?(app2) do
end
end
- @full_key_update [
- {:pleroma, :ecto_repos},
- {:quack, :meta},
- {:mime, :types},
- {:cors_plug, [:max_age, :methods, :expose, :headers]},
- {:auto_linker, :opts},
- {:swarm, :node_blacklist},
- {:logger, :backends}
- ]
-
- defp only_full_update?(%ConfigDB{} = config) do
- config_group = ConfigDB.from_string(config.group)
- config_key = ConfigDB.from_string(config.key)
-
- Enum.any?(@full_key_update, fn
- {group, key} when is_list(key) ->
- config_group == group and config_key in key
-
- {group, key} ->
- config_group == group and config_key == key
- end)
- end
-
- defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
-
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update_or_create(params) do
search_opts = Map.take(params, [:group, :key])
transformed_value <- do_transform(params[:value]),
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config},
new_value <-
- deep_merge(
+ merge_group(
ConfigDB.from_string(config.group),
ConfigDB.from_string(config.key),
old_value,
end
end
+ defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
+
+ defp only_full_update?(%ConfigDB{} = config) do
+ config_group = ConfigDB.from_string(config.group)
+ config_key = ConfigDB.from_string(config.key)
+
+ Enum.any?(@full_key_update, fn
+ {group, key} when is_list(key) ->
+ config_group == group and config_key in key
+
+ {group, key} ->
+ config_group == group and config_key == key
+ end)
+ end
+
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} | {:ok, nil}
def delete(params) do
search_opts = Map.delete(params, :subkeys)
}
end
- # TODO: will become useless after removing hackney
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
- defp do_convert(entity) when is_tuple(entity),
- do: %{"tuple" => do_convert(Tuple.to_list(entity))}
+ defp do_convert(entity) when is_tuple(entity) do
+ value =
+ entity
+ |> Tuple.to_list()
+ |> do_convert()
+
+ %{"tuple" => value}
+ end
- defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity),
- do: entity
+ defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
+ entity
+ end
defp do_convert(entity)
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
- ":#{to_string(entity)}"
+ ":#{entity}"
end
defp do_convert(entity) when is_atom(entity), do: inspect(entity)
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end
- # TODO: will become useless after removing hackney
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} =
entity
end
end
- @delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
-
- defp find_valid_delimiter([], _string, _),
- do: raise(ArgumentError, message: "valid delimiter for Regex expression not found")
+ defp find_valid_delimiter([], _string, _) do
+ raise(ArgumentError, message: "valid delimiter for Regex expression not found")
+ end
defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter)
when is_tuple(delimiter) do
end
end
- @regex_parts ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
-
defp do_transform_string("~r" <> _pattern = regex) do
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
- Regex.named_captures(@regex_parts, regex),
+ Regex.named_captures(@regex, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
defmodule Pleroma.Config.Holder do
@config Pleroma.Config.Loader.load_and_merge()
@spec config() :: keyword()
- def config do
- @config
- |> Keyword.keys()
- |> Enum.map(&filter(&1, config(&1)))
- |> List.flatten()
- end
+ def config, do: @config
@spec config(atom()) :: any()
def config(group), do: @config[group]
@spec config(atom(), atom()) :: any()
def config(group, key), do: @config[group][key]
-
- defp filter(group, settings) when group not in [:swarm] do
- filtered =
- Enum.reject(settings, fn {k, _v} ->
- k in [Pleroma.Repo, Pleroma.Web.Endpoint, :env, :configurable_from_database] or
- (group == :phoenix and k == :serve_endpoints)
- end)
-
- {group, filtered}
- end
-
- defp filter(_, _), do: []
end
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
defmodule Pleroma.Config.Loader do
@paths ["config/config.exs", "config/#{Mix.env()}.exs"]
+ @reject_keys [
+ Pleroma.Repo,
+ Pleroma.Web.Endpoint,
+ :env,
+ :configurable_from_database,
+ :database,
+ :swarm
+ ]
+
if Code.ensure_loaded?(Config.Reader) do
@spec load(Path.t()) :: keyword()
def load(path), do: Config.Reader.read!(path)
# support for Elixir less than 1.9
@spec load(Path.t()) :: keyword()
def load(path) do
- {config, _paths} = Mix.Config.eval!(path)
- config
+ path
+ |> Mix.Config.eval!()
+ |> elem(0)
end
defp do_merge(conf1, conf2), do: Mix.Config.merge(conf1, conf2)
all_paths
|> Enum.map(&load(&1))
- |> merge()
+ |> Enum.reduce([], &do_merge(&2, &1))
+ |> filter()
end
- @spec merge([keyword()], keyword()) :: keyword()
- def merge(configs, acc \\ [])
- def merge([], acc), do: acc
+ defp filter(configs) do
+ configs
+ |> Keyword.keys()
+ |> Enum.reduce([], &Keyword.put(&2, &1, filter_group(&1, configs)))
+ end
- def merge([config | others], acc) do
- merge(others, do_merge(acc, config))
+ @spec filter_group(atom(), keyword()) :: keyword()
+ def filter_group(group, configs) do
+ Enum.reject(configs[group], fn {key, _v} ->
+ key in @reject_keys or (group == :phoenix and key == :serve_endpoints)
+ end)
end
end
group = ConfigDB.from_string(setting.group)
value = ConfigDB.from_binary(setting.value)
- if group != :phoenix and key != :serve_endpoints do
- default = Pleroma.Config.Holder.config(group, key)
+ default = Pleroma.Config.Holder.config(group, key)
- merged_value =
- if can_be_merged?(default, value) do
- ConfigDB.deep_merge(group, key, default, value)
- else
- value
- end
+ merged_value =
+ if can_be_merged?(default, value) do
+ ConfigDB.merge_group(group, key, default, value)
+ else
+ value
+ end
- :ok = Application.put_env(group, key, merged_value)
+ :ok = Application.put_env(group, key, merged_value)
- if group != :logger do
- group
+ if group != :logger do
+ group
+ else
+ # change logger configuration in runtime, without restart
+ if Keyword.keyword?(merged_value) and
+ key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
+ Logger.configure_backend(key, merged_value)
else
- # change logger configuration in runtime, without restart
- if Keyword.keyword?(merged_value) and
- key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
- Logger.configure_backend(key, merged_value)
- else
- Logger.configure([{key, merged_value}])
- end
-
- nil
+ Logger.configure([{key, merged_value}])
end
+
+ nil
end
rescue
e ->
:ok <- Application.stop(app) do
:ok = Application.start(app)
else
- nil -> Logger.warn("#{app} is not started.")
- error -> Logger.warn(inspect(error))
+ nil ->
+ Logger.warn("#{app} is not started.")
+
+ error ->
+ error
+ |> inspect()
+ |> Logger.warn()
end
end
- defp can_be_merged?(val1, val2) when is_map(val1) and is_map(val2), do: true
-
defp can_be_merged?(val1, val2) when is_list(val1) and is_list(val2) do
Keyword.keyword?(val1) and Keyword.keyword?(val2)
end
merged_value =
if !is_nil(db_value) and Keyword.keyword?(db_value) and
ConfigDB.sub_key_full_update?(group, key, Keyword.keys(db_value)) do
- ConfigDB.deep_merge(group, key, value, db_value)
+ ConfigDB.merge_group(group, key, value, db_value)
else
value
end
--- /dev/null
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Config.HolderTest do
+ use ExUnit.Case, async: true
+
+ alias Pleroma.Config.Holder
+
+ test "config/0" do
+ config = Holder.config()
+ assert config[:pleroma][Pleroma.Uploaders.Local][:uploads] == "test/uploads"
+ assert config[:tesla][:adapter] == Tesla.Mock
+
+ refute config[:pleroma][Pleroma.Repo]
+ refute config[:pleroma][Pleroma.Web.Endpoint]
+ refute config[:pleroma][:env]
+ refute config[:pleroma][:configurable_from_database]
+ refute config[:pleroma][:database]
+ refute config[:phoenix][:serve_endpoints]
+ end
+
+ test "config/1" do
+ pleroma_config = Holder.config(:pleroma)
+ assert pleroma_config[Pleroma.Uploaders.Local][:uploads] == "test/uploads"
+ tesla_config = Holder.config(:tesla)
+ assert tesla_config[:adapter] == Tesla.Mock
+ end
+
+ test "config/2" do
+ assert Holder.config(:pleroma, Pleroma.Uploaders.Local) == [uploads: "test/uploads"]
+ assert Holder.config(:tesla, :adapter) == Tesla.Mock
+ end
+end
--- /dev/null
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.Config.LoaderTest do
+ use ExUnit.Case, async: true
+
+ alias Pleroma.Config.Loader
+
+ test "load/1" do
+ config = Loader.load("test/fixtures/config/temp.secret.exs")
+ assert config[:pleroma][:first_setting][:key] == "value"
+ assert config[:pleroma][:first_setting][:key2] == [Pleroma.Repo]
+ assert config[:quack][:level] == :info
+ end
+
+ test "load_and_merge/0" do
+ config = Loader.load_and_merge()
+
+ refute config[:pleroma][Pleroma.Repo]
+ refute config[:pleroma][Pleroma.Web.Endpoint]
+ refute config[:pleroma][:env]
+ refute config[:pleroma][:configurable_from_database]
+ refute config[:pleroma][:database]
+ refute config[:phoenix][:serve_endpoints]
+
+ assert config[:pleroma][:ecto_repos] == [Pleroma.Repo]
+ assert config[:pleroma][Pleroma.Uploaders.Local][:uploads] == "test/uploads"
+ assert config[:tesla][:adapter] == Tesla.Mock
+ end
+
+ test "filter_group/2" do
+ assert Loader.filter_group(:pleroma,
+ pleroma: [
+ {Pleroma.Repo, [a: 1, b: 2]},
+ {Pleroma.Upload, [a: 1, b: 2]},
+ {Pleroma.Web.Endpoint, []},
+ env: :test,
+ configurable_from_database: true,
+ database: []
+ ]
+ ) == [{Pleroma.Upload, [a: 1, b: 2]}]
+ end
+end