Prevent duplicates when editing (#853)

* prevent creating duplicate URLs on edit

* Prevent duplicates when editing
This commit is contained in:
Sascha Ißbrücker 2024-09-24 13:13:32 +02:00 committed by GitHub
parent 7b405c054d
commit d1f81fee0e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 97 additions and 4 deletions

View file

@ -111,6 +111,24 @@ class BookmarkSerializer(serializers.ModelSerializer):
return update_bookmark(instance, tag_string, self.context["user"])
def validate(self, attrs):
# When creating a bookmark, the service logic prevents duplicate URLs by
# updating the existing bookmark instead. When editing a bookmark,
# there is no assumption that it would update a different bookmark if
# the URL is a duplicate, so raise a validation error in that case.
if self.instance and "url" in attrs:
is_duplicate = (
Bookmark.objects.filter(owner=self.instance.owner, url=attrs["url"])
.exclude(pk=self.instance.pk)
.exists()
)
if is_duplicate:
raise serializers.ValidationError(
{"url": "A bookmark with this URL already exists."}
)
return attrs
class TagSerializer(serializers.ModelSerializer):
class Meta:

View file

@ -168,6 +168,24 @@ class BookmarkForm(forms.ModelForm):
self.instance and self.instance.notes
)
def clean_url(self):
# When creating a bookmark, the service logic prevents duplicate URLs by
# updating the existing bookmark instead, which is also communicated in
# the form's UI. When editing a bookmark, there is no assumption that
# it would update a different bookmark if the URL is a duplicate, so
# raise a validation error in that case.
url = self.cleaned_data["url"]
if self.instance.pk:
is_duplicate = (
Bookmark.objects.filter(owner=self.instance.owner, url=url)
.exclude(pk=self.instance.pk)
.exists()
)
if is_duplicate:
raise forms.ValidationError("A bookmark with this URL already exists.")
return url
class BookmarkSearch:
SORT_ADDED_ASC = "added_asc"

View file

@ -141,6 +141,40 @@ class BookmarkEditViewTestCase(TestCase, BookmarkFactoryMixin):
html,
)
def test_should_prevent_duplicate_urls(self):
edited_bookmark = self.setup_bookmark(url="http://example.com/edited")
existing_bookmark = self.setup_bookmark(url="http://example.com/existing")
other_user_bookmark = self.setup_bookmark(
url="http://example.com/other-user", user=User.objects.create_user("other")
)
# if the URL isn't modified it's not a duplicate
form_data = self.create_form_data({"url": edited_bookmark.url})
response = self.client.post(
reverse("bookmarks:edit", args=[edited_bookmark.id]), form_data
)
self.assertEqual(response.status_code, 302)
# if the URL is already bookmarked by another user, it's not a duplicate
form_data = self.create_form_data({"url": other_user_bookmark.url})
response = self.client.post(
reverse("bookmarks:edit", args=[edited_bookmark.id]), form_data
)
self.assertEqual(response.status_code, 302)
# if the URL is already bookmarked by the same user, it's a duplicate
form_data = self.create_form_data({"url": existing_bookmark.url})
response = self.client.post(
reverse("bookmarks:edit", args=[edited_bookmark.id]), form_data
)
self.assertEqual(response.status_code, 422)
self.assertInHTML(
"<li>A bookmark with this URL already exists.</li>",
response.content.decode(),
)
edited_bookmark.refresh_from_db()
self.assertNotEqual(edited_bookmark.url, existing_bookmark.url)
def test_should_redirect_to_return_url(self):
bookmark = self.setup_bookmark()
form_data = self.create_form_data()

View file

@ -685,6 +685,29 @@ class BookmarksApiTestCase(LinkdingApiTestCase, BookmarkFactoryMixin):
updated_bookmark = Bookmark.objects.get(id=bookmark.id)
self.assertCountEqual(updated_bookmark.tags.all(), [tag1, tag2])
def test_update_bookmark_should_prevent_duplicate_urls(self):
self.authenticate()
edited_bookmark = self.setup_bookmark(url="https://example.com/edited")
existing_bookmark = self.setup_bookmark(url="https://example.com/existing")
other_user_bookmark = self.setup_bookmark(
url="https://example.com/other", user=self.setup_user()
)
# if the URL isn't modified it's not a duplicate
data = {"url": edited_bookmark.url}
url = reverse("bookmarks:bookmark-detail", args=[edited_bookmark.id])
self.put(url, data, expected_status_code=status.HTTP_200_OK)
# if the URL is already bookmarked by another user, it's not a duplicate
data = {"url": other_user_bookmark.url}
url = reverse("bookmarks:bookmark-detail", args=[edited_bookmark.id])
self.put(url, data, expected_status_code=status.HTTP_200_OK)
# if the URL is already bookmarked by the same user, it's a duplicate
data = {"url": existing_bookmark.url}
url = reverse("bookmarks:bookmark-detail", args=[edited_bookmark.id])
self.put(url, data, expected_status_code=status.HTTP_400_BAD_REQUEST)
def test_patch_bookmark(self):
self.authenticate()
bookmark = self.setup_bookmark()

View file

@ -127,11 +127,9 @@ POST /api/bookmarks/
Creates a new bookmark. Tags are simply assigned using their names. Including
`is_archived: true` saves a bookmark directly to the archive.
If the title and description are not provided or empty, the application automatically tries to scrape them from the bookmarked website. This behavior can be disabled by adding the `disable_scraping` query parameter to the API request. If you have an application where you want to keep using scraped metadata, but also allow users to leave the title or description empty, you should:
If the provided URL is already bookmarked, this silently updates the existing bookmark instead of creating a new one. If you are implementing a user interface, consider notifying users about this behavior. You can use the `/check` endpoint to check if a URL is already bookmarked and at the same time get the existing bookmark data. This behavior may change in the future to return an error instead.
- Fetch the scraped title and description using the `/check` endpoint.
- Prefill the title and description fields in your app with the fetched values and allow users to clear those values.
- Add the `disable_scraping` query parameter to prevent the API from adding them back again.
If the title and description are not provided or empty, the application automatically tries to scrape them from the bookmarked website. This behavior can be disabled by adding the `disable_scraping` query parameter to the API request.
Example payload:
@ -164,6 +162,8 @@ When using `PATCH`, only the fields that should be updated need to be provided.
Regardless which method is used, any field that is not provided is not modified.
Tags are simply assigned using their names.
If the provided URL is already bookmarked this returns an error.
Example payload:
```json