-
Notifications
You must be signed in to change notification settings - Fork 0
fix(server): always fetch unread feeds from offset 0 in AI mode #147
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,41 @@ | ||
| package ru.trett.rss.server.services | ||
|
|
||
| import cats.effect.IO | ||
| import cats.effect.unsafe.implicits.global | ||
| import org.http4s.client.Client | ||
| import org.scalamock.scalatest.MockFactory | ||
| import org.scalatest.funsuite.AnyFunSuite | ||
| import org.scalatest.matchers.should.Matchers | ||
| import org.typelevel.log4cats.LoggerFactory | ||
| import org.typelevel.log4cats.noop.NoOpFactory | ||
| import ru.trett.rss.server.models.User | ||
| import ru.trett.rss.server.repositories.FeedRepository | ||
|
|
||
| class SummarizeServiceSpec extends AnyFunSuite with Matchers with MockFactory { | ||
|
|
||
| test("streamSummary should always fetch feeds from offset 0 regardless of provided offset") { | ||
| val feedRepository = mock[FeedRepository] | ||
| val client = mock[Client[IO]] | ||
| val user = User("user-id", "User", "user@example.com", User.Settings()) | ||
|
|
||
| implicit val loggerFactory: LoggerFactory[IO] = NoOpFactory[IO] | ||
|
|
||
| // Mock getTotalUnreadCount | ||
| (feedRepository.getTotalUnreadCount) | ||
| .expects("user-id") | ||
| .returning(IO.pure(60)) | ||
|
|
||
| // This is the CRITICAL part: even if streamSummary is called with offset 30, | ||
| // it MUST call feedRepository.getUnreadFeeds with offset 0 because | ||
| // it's in AI mode and feeds from the previous batch were already marked as read. | ||
| (feedRepository | ||
| .getUnreadFeeds(_: User, _: Int, _: Int)) | ||
| .expects(user, 30, 0) // batchSize is 30, expected offset is 0 | ||
| .returning(IO.pure(List.empty)) | ||
|
|
||
| val service = new SummarizeService(feedRepository, client, "api-key") | ||
|
|
||
| // Call with offset 30 (simulating "Load More" click) | ||
| service.streamSummary(user, 30).compile.toList.unsafeRunSync() | ||
| } | ||
|
Comment on lines
+16
to
+40
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a good test for the 'AI mode' behavior. To improve test coverage and prevent regressions like the one I mentioned in This test would verify that:
This would ensure that the non-AI mode functionality remains correct and prevent future regressions. |
||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The
summarizeStreamfunction at these lines is vulnerable to prompt injection, which can lead to stored Cross-Site Scripting (XSS) as the LLM-generated HTML output is not sanitized. It is crucial to sanitize this output using a library like Jsoup with a strict allow-list. Additionally, the change in this area unconditionally applies 'AI mode' behavior, causing a regression for non-AI users by breaking pagination and incorrectly marking feeds as read. The conditional logic based onuser.settings.isAiModemust be restored.