purge chat and shout endpoints
[akkoma] / lib / pleroma / application.ex
1 # Pleroma: A lightweight social networking server
2 # Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
3 # SPDX-License-Identifier: AGPL-3.0-only
4
5 defmodule Pleroma.Application do
6 use Application
7
8 import Cachex.Spec
9
10 alias Pleroma.Config
11
12 require Logger
13
14 @name Mix.Project.config()[:name]
15 @version Mix.Project.config()[:version]
16 @repository Mix.Project.config()[:source_url]
17 @mix_env Mix.env()
18
19 def name, do: @name
20 def version, do: @version
21 def named_version, do: @name <> " " <> @version
22 def repository, do: @repository
23
24 def user_agent do
25 if Process.whereis(Pleroma.Web.Endpoint) do
26 case Config.get([:http, :user_agent], :default) do
27 :default ->
28 info = "#{Pleroma.Web.Endpoint.url()} <#{Config.get([:instance, :email], "")}>"
29 named_version() <> "; " <> info
30
31 custom ->
32 custom
33 end
34 else
35 # fallback, if endpoint is not started yet
36 "Pleroma Data Loader"
37 end
38 end
39
40 # See http://elixir-lang.org/docs/stable/elixir/Application.html
41 # for more information on OTP Applications
42 def start(_type, _args) do
43 # Scrubbers are compiled at runtime and therefore will cause a conflict
44 # every time the application is restarted, so we disable module
45 # conflicts at runtime
46 Code.compiler_options(ignore_module_conflict: true)
47 # Disable warnings_as_errors at runtime, it breaks Phoenix live reload
48 # due to protocol consolidation warnings
49 Code.compiler_options(warnings_as_errors: false)
50 Config.Holder.save_default()
51 Pleroma.HTML.compile_scrubbers()
52 Pleroma.Config.Oban.warn()
53 Config.DeprecationWarnings.warn()
54 Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
55 Pleroma.ApplicationRequirements.verify!()
56 setup_instrumenters()
57 load_custom_modules()
58 Pleroma.Docs.JSON.compile()
59 limiters_setup()
60
61 # Define workers and child supervisors to be supervised
62 children =
63 [
64 Pleroma.Repo,
65 Config.TransferTask,
66 Pleroma.Emoji,
67 Pleroma.Web.Plugs.RateLimiter.Supervisor
68 ] ++
69 cachex_children() ++
70 http_children() ++
71 [
72 Pleroma.Stats,
73 Pleroma.JobQueueMonitor,
74 {Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]},
75 {Oban, Config.get(Oban)},
76 Pleroma.Web.Endpoint
77 ] ++
78 elasticsearch_children() ++
79 task_children(@mix_env) ++
80 dont_run_in_test(@mix_env)
81
82 # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
83 # for other strategies and supported options
84 # If we have a lot of caches, default max_restarts can cause test
85 # resets to fail.
86 # Go for the default 3 unless we're in test
87 max_restarts =
88 if @mix_env == :test do
89 100
90 else
91 3
92 end
93
94 opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
95
96 with {:ok, data} <- Supervisor.start_link(children, opts) do
97 set_postgres_server_version()
98 {:ok, data}
99 else
100 e ->
101 Logger.error("Failed to start!")
102 Logger.error("#{inspect(e)}")
103 e
104 end
105 end
106
107 defp set_postgres_server_version do
108 version =
109 with %{rows: [[version]]} <- Ecto.Adapters.SQL.query!(Pleroma.Repo, "show server_version"),
110 {num, _} <- Float.parse(version) do
111 num
112 else
113 e ->
114 Logger.warn(
115 "Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
116 )
117
118 9.6
119 end
120
121 :persistent_term.put({Pleroma.Repo, :postgres_version}, version)
122 end
123
124 def load_custom_modules do
125 dir = Config.get([:modules, :runtime_dir])
126
127 if dir && File.exists?(dir) do
128 dir
129 |> Pleroma.Utils.compile_dir()
130 |> case do
131 {:error, _errors, _warnings} ->
132 raise "Invalid custom modules"
133
134 {:ok, modules, _warnings} ->
135 if @mix_env != :test do
136 Enum.each(modules, fn mod ->
137 Logger.info("Custom module loaded: #{inspect(mod)}")
138 end)
139 end
140
141 :ok
142 end
143 end
144 end
145
146 defp setup_instrumenters do
147 require Prometheus.Registry
148
149 if Application.get_env(:prometheus, Pleroma.Repo.Instrumenter) do
150 :ok =
151 :telemetry.attach(
152 "prometheus-ecto",
153 [:pleroma, :repo, :query],
154 &Pleroma.Repo.Instrumenter.handle_event/4,
155 %{}
156 )
157
158 Pleroma.Repo.Instrumenter.setup()
159 end
160
161 Pleroma.Web.Endpoint.MetricsExporter.setup()
162 Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
163
164 # Note: disabled until prometheus-phx is integrated into prometheus-phoenix:
165 # Pleroma.Web.Endpoint.Instrumenter.setup()
166 PrometheusPhx.setup()
167 end
168
169 defp cachex_children do
170 [
171 build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
172 build_cachex("user", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
173 build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
174 build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000),
175 build_cachex("scrubber", limit: 2500),
176 build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
177 build_cachex("web_resp", limit: 2500),
178 build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
179 build_cachex("failed_proxy_url", limit: 2500),
180 build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
181 ]
182 end
183
184 defp emoji_packs_expiration,
185 do: expiration(default: :timer.seconds(5 * 60), interval: :timer.seconds(60))
186
187 defp idempotency_expiration,
188 do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
189
190 defp seconds_valid_interval,
191 do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
192
193 @spec build_cachex(String.t(), keyword()) :: map()
194 def build_cachex(type, opts),
195 do: %{
196 id: String.to_atom("cachex_" <> type),
197 start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]},
198 type: :worker
199 }
200
201 defp dont_run_in_test(env) when env in [:test, :benchmark], do: []
202
203 defp dont_run_in_test(_) do
204 [
205 {Registry,
206 [
207 name: Pleroma.Web.Streamer.registry(),
208 keys: :duplicate,
209 partitions: System.schedulers_online()
210 ]}
211 ] ++ background_migrators()
212 end
213
214 defp background_migrators do
215 [
216 Pleroma.Migrators.HashtagsTableMigrator
217 ]
218 end
219
220 defp task_children(:test) do
221 [
222 %{
223 id: :web_push_init,
224 start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
225 restart: :temporary
226 }
227 ]
228 end
229
230 defp task_children(_) do
231 [
232 %{
233 id: :web_push_init,
234 start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
235 restart: :temporary
236 },
237 %{
238 id: :internal_fetch_init,
239 start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
240 restart: :temporary
241 }
242 ]
243 end
244
245 def elasticsearch_children do
246 config = Config.get([Pleroma.Search, :module])
247
248 if config == Pleroma.Search.Elasticsearch do
249 [Pleroma.Search.Elasticsearch.Cluster]
250 else
251 []
252 end
253 end
254
255 @spec limiters_setup() :: :ok
256 def limiters_setup do
257 config = Config.get(ConcurrentLimiter, [])
258
259 [
260 Pleroma.Web.RichMedia.Helpers,
261 Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy,
262 Pleroma.Search
263 ]
264 |> Enum.each(fn module ->
265 mod_config = Keyword.get(config, module, [])
266
267 max_running = Keyword.get(mod_config, :max_running, 5)
268 max_waiting = Keyword.get(mod_config, :max_waiting, 5)
269
270 ConcurrentLimiter.new(module, max_running, max_waiting)
271 end)
272 end
273
274 defp http_children do
275 config =
276 [:http, :adapter]
277 |> Config.get([])
278 |> Keyword.put(:name, MyFinch)
279
280 [{Finch, config}]
281 end
282 end