summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSebastian Rasmussen <sebras@gmail.com>2018-05-11 20:05:33 +0800
committerSebastian Rasmussen <sebras@gmail.com>2018-05-11 20:09:22 +0800
commit03ea88f91e120898a26153cecc8cf3d7747a57ba (patch)
tree7b3c6da1a2a53790b6f7b8ca45b8ea26c4adbe7c
parente6dfa88940aec6820411fc0d013c2bd0faa5547e (diff)
downloadmupdf-03ea88f91e120898a26153cecc8cf3d7747a57ba.tar.xz
Fix 699332: Copy resources when cleaning content streams.
Previously if both cleaning and sanitizing content streams the pages' resource dictionaries would retain the actually used resources. If the content streams were only cleaned and not sanitized the page's resource dictionaries were incorrectly emptied. All resources, whether used or not, ought to be retained, as is the case after this commit.
-rw-r--r--source/pdf/pdf-clean.c4
1 files changed, 2 insertions, 2 deletions
diff --git a/source/pdf/pdf-clean.c b/source/pdf/pdf-clean.c
index 3a97a8fb..19ddd3b4 100644
--- a/source/pdf/pdf-clean.c
+++ b/source/pdf/pdf-clean.c
@@ -166,14 +166,13 @@ void pdf_filter_page_contents(fz_context *ctx, pdf_document *doc, pdf_page *page
fz_try(ctx)
{
- res = pdf_new_dict(ctx, doc, 1);
-
contents = pdf_page_contents(ctx, page);
resources = pdf_page_resources(ctx, page);
proc_buffer = pdf_new_buffer_processor(ctx, buffer, ascii);
if (sanitize)
{
+ res = pdf_new_dict(ctx, doc, 1);
proc_filter = pdf_new_filter_processor_with_text_filter(ctx, doc, proc_buffer, resources, res, text_filter, after_text, proc_arg);
pdf_process_contents(ctx, proc_filter, doc, resources, contents, cookie);
@@ -181,6 +180,7 @@ void pdf_filter_page_contents(fz_context *ctx, pdf_document *doc, pdf_page *page
}
else
{
+ res = pdf_deep_copy_obj(ctx, resources);
pdf_process_contents(ctx, proc_buffer, doc, resources, contents, cookie);
}
pdf_close_processor(ctx, proc_buffer);