Merge branch 'broadcast-activity-id-on-deletion' into 'develop'

Broadcast proper deleted activity id to conform to MastoAPI streaming spec

See merge request pleroma/pleroma!917
This commit is contained in:
lambda 2019-03-11 13:19:09 +00:00
commit 9df4fa22dd
6 changed files with 40 additions and 14 deletions

View file

@ -107,6 +107,18 @@ def get_in_reply_to_activity(%Activity{data: %{"object" => %{"inReplyTo" => ap_i
def get_in_reply_to_activity(_), do: nil
def delete_by_ap_id(id) when is_binary(id) do
by_object_ap_id(id)
|> Repo.delete_all(returning: true)
|> elem(1)
|> Enum.find(fn
%{data: %{"type" => "Create", "object" => %{"id" => ap_id}}} -> ap_id == id
_ -> nil
end)
end
def delete_by_ap_id(_), do: nil
for {ap_type, type} <- @mastodon_notification_types do
def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}),
do: unquote(type)

View file

@ -86,9 +86,9 @@ def swap_object_with_tombstone(object) do
def delete(%Object{data: %{"id" => id}} = object) do
with {:ok, _obj} = swap_object_with_tombstone(object),
Repo.delete_all(Activity.by_object_ap_id(id)),
deleted_activity = Activity.delete_by_ap_id(id),
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}") do
{:ok, object}
{:ok, object, deleted_activity}
end
end

View file

@ -311,14 +311,14 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
user = User.get_cached_by_ap_id(actor)
to = object.data["to"] || [] ++ object.data["cc"] || []
data = %{
"type" => "Delete",
"actor" => actor,
"object" => id,
"to" => to
}
with {:ok, _} <- Object.delete(object),
with {:ok, object, activity} <- Object.delete(object),
data <- %{
"type" => "Delete",
"actor" => actor,
"object" => id,
"to" => to,
"deleted_activity_id" => activity && activity.id
},
{:ok, activity} <- insert(data, local),
# Changing note count prior to enqueuing federation task in order to avoid race conditions on updating user.info
{:ok, _actor} <- decrease_note_count_if_public(user, object),

View file

@ -736,6 +736,7 @@ def prepare_outgoing(%{"type" => "Reject"} = data) do
def prepare_outgoing(%{"type" => _type} = data) do
data =
data
|> strip_internal_fields
|> maybe_fix_object_url
|> Map.merge(Utils.make_json_ld_header())
@ -870,7 +871,8 @@ defp strip_internal_fields(object) do
"announcements",
"announcement_count",
"emoji",
"context_id"
"context_id",
"deleted_activity_id"
])
end

View file

@ -211,15 +211,19 @@ def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = ite
end)
end
def push_to_socket(topics, topic, %Activity{id: id, data: %{"type" => "Delete"}}) do
def push_to_socket(topics, topic, %Activity{
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
}) do
Enum.each(topics[topic] || [], fn socket ->
send(
socket.transport_pid,
{:text, %{event: "delete", payload: to_string(id)} |> Jason.encode!()}
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
)
end)
end
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
def push_to_socket(topics, topic, item) do
Enum.each(topics[topic] || [], fn socket ->
# Get the current user so we have up-to-date blocks etc.

View file

@ -39,7 +39,15 @@ test "it sends to public" do
task =
Task.async(fn ->
assert_receive {:text, _}, 4_000
expected_event =
%{
"event" => "delete",
"payload" => activity.id
}
|> Jason.encode!()
assert_receive {:text, received_event}, 4_000
assert received_event == expected_event
end)
fake_socket = %{