Merge branch 'feat/keep-chat-perf-up' into 'develop'

Optimize chat perf in long run

See merge request pleroma/pleroma-fe!1350
This commit is contained in:
Shpuld Shpludson 2021-02-22 15:01:05 +00:00
commit 589ab6510c
5 changed files with 49 additions and 0 deletions

View file

@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### Changed
- Display 'people voted' instead of 'votes' for multi-choice polls
- Optimized chat to not get horrible performance after keeping the same chat open for a long time
### Added
- Added reason field for registration when approval is required

View file

@ -234,6 +234,13 @@ const Chat = {
const scrollable = this.$refs.scrollable
return scrollable && scrollable.scrollTop <= 0
},
cullOlderCheck () {
window.setTimeout(() => {
if (this.bottomedOut(JUMP_TO_BOTTOM_BUTTON_VISIBILITY_OFFSET)) {
this.$store.dispatch('cullOlderMessages', this.currentChatMessageService.chatId)
}
}, 5000)
},
handleScroll: _.throttle(function () {
if (!this.currentChat) { return }
@ -241,6 +248,7 @@ const Chat = {
this.fetchChat({ maxId: this.currentChatMessageService.minId })
} else if (this.bottomedOut(JUMP_TO_BOTTOM_BUTTON_VISIBILITY_OFFSET)) {
this.jumpToBottomButtonVisible = false
this.cullOlderCheck()
if (this.newMessageCount > 0) {
// Use a delay before marking as read to prevent situation where new messages
// arrive just as you're leaving the view and messages that you didn't actually

View file

@ -115,6 +115,9 @@ const chats = {
},
handleMessageError ({ commit }, value) {
commit('handleMessageError', { commit, ...value })
},
cullOlderMessages ({ commit }, chatId) {
commit('cullOlderMessages', chatId)
}
},
mutations: {
@ -227,6 +230,9 @@ const chats = {
handleMessageError (state, { chatId, fakeId, isRetry }) {
const chatMessageService = state.openedChatMessageServices[chatId]
chatService.handleMessageError(chatMessageService, fakeId, isRetry)
},
cullOlderMessages (state, chatId) {
chatService.cullOlderMessages(state.openedChatMessageServices[chatId])
}
}
}

View file

@ -48,6 +48,22 @@ const deleteMessage = (storage, messageId) => {
}
}
const cullOlderMessages = (storage) => {
const maxIndex = storage.messages.length
const minIndex = maxIndex - 50
if (maxIndex <= 50) return
storage.messages = _.sortBy(storage.messages, ['id'])
storage.minId = storage.messages[minIndex].id
for (const message of storage.messages) {
if (message.id < storage.minId) {
delete storage.idIndex[message.id]
delete storage.idempotencyKeyIndex[message.idempotency_key]
}
}
storage.messages = storage.messages.slice(minIndex, maxIndex)
}
const handleMessageError = (storage, fakeId, isRetry) => {
if (!storage) { return }
const fakeMessage = storage.idIndex[fakeId]
@ -201,6 +217,7 @@ const ChatService = {
empty,
getView,
deleteMessage,
cullOlderMessages,
resetNewMessageCount,
clear,
handleMessageError

View file

@ -88,4 +88,21 @@ describe('chatService', () => {
expect(view.map(i => i.type)).to.eql(['date', 'message', 'message', 'date', 'message'])
})
})
describe('.cullOlderMessages', () => {
it('keeps 50 newest messages and idIndex matches', () => {
const chat = chatService.empty()
for (let i = 100; i > 0; i--) {
// Use decimal values with toFixed to hack together constant length predictable strings
chatService.add(chat, { messages: [{ ...message1, id: 'a' + (i / 1000).toFixed(3), idempotency_key: i }] })
}
chatService.cullOlderMessages(chat)
expect(chat.messages.length).to.eql(50)
expect(chat.messages[0].id).to.eql('a0.051')
expect(chat.minId).to.eql('a0.051')
expect(chat.messages[49].id).to.eql('a0.100')
expect(Object.keys(chat.idIndex).length).to.eql(50)
})
})
})