From 03ea88f91e120898a26153cecc8cf3d7747a57ba Mon Sep 17 00:00:00 2001 From: Sebastian Rasmussen Date: Fri, 11 May 2018 20:05:33 +0800 Subject: Fix 699332: Copy resources when cleaning content streams. Previously if both cleaning and sanitizing content streams the pages' resource dictionaries would retain the actually used resources. If the content streams were only cleaned and not sanitized the page's resource dictionaries were incorrectly emptied. All resources, whether used or not, ought to be retained, as is the case after this commit. --- source/pdf/pdf-clean.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'source/pdf') diff --git a/source/pdf/pdf-clean.c b/source/pdf/pdf-clean.c index 3a97a8fb..19ddd3b4 100644 --- a/source/pdf/pdf-clean.c +++ b/source/pdf/pdf-clean.c @@ -166,14 +166,13 @@ void pdf_filter_page_contents(fz_context *ctx, pdf_document *doc, pdf_page *page fz_try(ctx) { - res = pdf_new_dict(ctx, doc, 1); - contents = pdf_page_contents(ctx, page); resources = pdf_page_resources(ctx, page); proc_buffer = pdf_new_buffer_processor(ctx, buffer, ascii); if (sanitize) { + res = pdf_new_dict(ctx, doc, 1); proc_filter = pdf_new_filter_processor_with_text_filter(ctx, doc, proc_buffer, resources, res, text_filter, after_text, proc_arg); pdf_process_contents(ctx, proc_filter, doc, resources, contents, cookie); @@ -181,6 +180,7 @@ void pdf_filter_page_contents(fz_context *ctx, pdf_document *doc, pdf_page *page } else { + res = pdf_deep_copy_obj(ctx, resources); pdf_process_contents(ctx, proc_buffer, doc, resources, contents, cookie); } pdf_close_processor(ctx, proc_buffer); -- cgit v1.2.3