honkoma/lib/pleroma/web/rich_media/parser.ex

116 lines
2.8 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2019-01-01 20:26:40 +00:00
defmodule Pleroma.Web.RichMedia.Parser do
2019-07-11 13:04:42 +00:00
defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
def parse(nil), do: {:error, "No URL provided"}
if Pleroma.Config.get(:env) == :test do
def parse(url), do: parse_url(url)
else
def parse(url) do
2019-01-28 20:19:07 +00:00
try do
Cachex.fetch!(:rich_media_cache, url, fn _ ->
{:commit, parse_url(url)}
end)
|> set_ttl_based_on_image(url)
2019-01-28 20:19:07 +00:00
rescue
e ->
{:error, "Cachex error: #{inspect(e)}"}
end
end
end
2019-01-01 20:26:40 +00:00
@doc """
Set the rich media cache based on the expiration time of image.
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
## Example
defmodule MyModule do
@behaviour Pleroma.Web.RichMedia.Parser.TTL
def ttl(data, url) do
image_url = Map.get(data, :image)
# do some parsing in the url and get the ttl of the image
# and return ttl is unix time
parse_ttl_from_url(image_url)
end
end
Define the module in the config
config :pleroma, :rich_media,
ttl_setters: [MyModule]
"""
def set_ttl_based_on_image({:ok, data}, url) do
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
{:ok, data}
else
_ ->
{:ok, data}
end
end
defp get_ttl_from_image(data, url) do
Pleroma.Config.get([:rich_media, :ttl_setters])
|> Enum.reduce({:ok, nil}, fn
module, {:ok, _ttl} ->
module.ttl(data, url)
_, error ->
error
end)
end
defp parse_url(url) do
try do
{:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
html
|> parse_html()
|> maybe_parse()
2020-06-09 17:49:24 +00:00
|> Map.put("url", url)
|> clean_parsed_data()
|> check_parsed_data()
rescue
2019-01-28 20:19:07 +00:00
e ->
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
end
2019-01-02 14:02:50 +00:00
end
defp parse_html(html), do: Floki.parse_document!(html)
2019-01-02 14:02:50 +00:00
defp maybe_parse(html) do
2019-07-11 13:04:42 +00:00
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
2019-01-01 20:26:40 +00:00
case parser.parse(html, acc) do
2020-06-11 13:57:31 +00:00
data when data != %{} -> {:halt, data}
_ -> {:cont, acc}
2019-01-01 20:26:40 +00:00
end
end)
end
2019-01-02 14:02:50 +00:00
2020-06-09 17:49:24 +00:00
defp check_parsed_data(%{"title" => title} = data)
when is_binary(title) and title != "" do
{:ok, data}
2019-01-02 14:02:50 +00:00
end
defp check_parsed_data(data) do
{:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
2019-01-02 14:02:50 +00:00
end
defp clean_parsed_data(data) do
data
|> Enum.reject(fn {key, val} ->
2020-06-09 17:49:24 +00:00
not match?({:ok, _}, Jason.encode(%{key => val}))
end)
|> Map.new()
end
2019-01-01 20:26:40 +00:00
end