fixed bug where if duplicate pages exist comic will be rescanned.

This commit is contained in:
2021-04-08 15:51:30 +01:00
parent b19d3aeaa2
commit 53d1ae6364
2 changed files with 19 additions and 1 deletions

View File

@@ -111,7 +111,12 @@ class ComicBook(models.Model):
archive = zipfile.ZipFile(archive_path) archive = zipfile.ZipFile(archive_path)
except zipfile.BadZipfile: except zipfile.BadZipfile:
return False return False
page_obj = ComicPage.objects.get(Comic=self, index=page) try:
page_obj = ComicPage.objects.get(Comic=self, index=page)
except ComicPage.MultipleObjectsReturned:
ComicPage.objects.filter(Comic=self).delete()
self.process_comic_pages(archive, self)
page_obj = ComicPage.objects.get(Comic=self, index=page)
try: try:
out = (archive.open(page_obj.page_file_name), page_obj.content_type) out = (archive.open(page_obj.page_file_name), page_obj.content_type)
except rarfile.NoRarEntry: except rarfile.NoRarEntry:

View File

@@ -292,3 +292,16 @@ class ComicBookTests(TestCase):
c.login(username="test", password="test") c.login(username="test", password="test")
response = c.get(f"/comic/read/{urlsafe_base64_encode(book.selector.bytes)}/0/img") response = c.get(f"/comic/read/{urlsafe_base64_encode(book.selector.bytes)}/0/img")
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
def test_duplicate_pages(self):
c = Client()
user = User.objects.get(username="test")
generate_directory(user)
book = ComicBook.objects.get(file_name='test1.rar')
page = ComicPage.objects.get(Comic=book, index=0)
dup_page = ComicPage(Comic=book, index=0, page_file_name=page.page_file_name, content_type=page.content_type)
dup_page.save()
c.login(username="test", password="test")
response = c.get(f"/comic/read/{urlsafe_base64_encode(book.selector.bytes)}/0/img")
self.assertEqual(response.status_code, 200)