origami-docspring 2.2.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +18 -0
- data/examples/attachments/attachment.rb +7 -8
- data/examples/attachments/nested_document.rb +6 -5
- data/examples/encryption/encryption.rb +5 -4
- data/examples/events/events.rb +7 -6
- data/examples/flash/flash.rb +10 -9
- data/examples/forms/javascript.rb +14 -13
- data/examples/forms/xfa.rb +67 -66
- data/examples/javascript/hello_world.rb +6 -5
- data/examples/javascript/js_emulation.rb +26 -26
- data/examples/loop/goto.rb +12 -11
- data/examples/loop/named.rb +17 -16
- data/examples/signature/signature.rb +11 -11
- data/examples/uri/javascript.rb +25 -24
- data/examples/uri/open-uri.rb +5 -4
- data/examples/uri/submitform.rb +11 -10
- data/lib/origami/3d.rb +330 -334
- data/lib/origami/acroform.rb +267 -268
- data/lib/origami/actions.rb +266 -278
- data/lib/origami/annotations.rb +659 -670
- data/lib/origami/array.rb +192 -196
- data/lib/origami/boolean.rb +66 -70
- data/lib/origami/catalog.rb +360 -363
- data/lib/origami/collections.rb +132 -133
- data/lib/origami/compound.rb +125 -129
- data/lib/origami/destinations.rb +226 -237
- data/lib/origami/dictionary.rb +155 -154
- data/lib/origami/encryption.rb +967 -923
- data/lib/origami/extensions/fdf.rb +270 -275
- data/lib/origami/extensions/ppklite.rb +323 -328
- data/lib/origami/filespec.rb +170 -173
- data/lib/origami/filters/ascii.rb +162 -167
- data/lib/origami/filters/ccitt/tables.rb +248 -252
- data/lib/origami/filters/ccitt.rb +309 -312
- data/lib/origami/filters/crypt.rb +31 -34
- data/lib/origami/filters/dct.rb +47 -50
- data/lib/origami/filters/flate.rb +57 -60
- data/lib/origami/filters/jbig2.rb +50 -53
- data/lib/origami/filters/jpx.rb +40 -43
- data/lib/origami/filters/lzw.rb +151 -155
- data/lib/origami/filters/predictors.rb +250 -255
- data/lib/origami/filters/runlength.rb +111 -115
- data/lib/origami/filters.rb +319 -325
- data/lib/origami/font.rb +173 -177
- data/lib/origami/functions.rb +62 -66
- data/lib/origami/graphics/colors.rb +203 -208
- data/lib/origami/graphics/instruction.rb +79 -81
- data/lib/origami/graphics/path.rb +141 -144
- data/lib/origami/graphics/patterns.rb +156 -160
- data/lib/origami/graphics/render.rb +51 -47
- data/lib/origami/graphics/state.rb +144 -142
- data/lib/origami/graphics/text.rb +185 -188
- data/lib/origami/graphics/xobject.rb +818 -804
- data/lib/origami/graphics.rb +25 -26
- data/lib/origami/header.rb +63 -65
- data/lib/origami/javascript.rb +718 -651
- data/lib/origami/linearization.rb +284 -285
- data/lib/origami/metadata.rb +156 -135
- data/lib/origami/name.rb +98 -100
- data/lib/origami/null.rb +49 -51
- data/lib/origami/numeric.rb +133 -135
- data/lib/origami/obfuscation.rb +180 -182
- data/lib/origami/object.rb +634 -631
- data/lib/origami/optionalcontent.rb +147 -149
- data/lib/origami/outline.rb +46 -48
- data/lib/origami/outputintents.rb +76 -77
- data/lib/origami/page.rb +637 -596
- data/lib/origami/parser.rb +214 -221
- data/lib/origami/parsers/fdf.rb +44 -45
- data/lib/origami/parsers/pdf/lazy.rb +147 -154
- data/lib/origami/parsers/pdf/linear.rb +104 -109
- data/lib/origami/parsers/pdf.rb +109 -107
- data/lib/origami/parsers/ppklite.rb +44 -46
- data/lib/origami/pdf.rb +886 -896
- data/lib/origami/reference.rb +116 -120
- data/lib/origami/signature.rb +617 -625
- data/lib/origami/stream.rb +560 -558
- data/lib/origami/string.rb +366 -368
- data/lib/origami/template/patterns.rb +50 -52
- data/lib/origami/template/widgets.rb +111 -114
- data/lib/origami/trailer.rb +153 -157
- data/lib/origami/tree.rb +55 -57
- data/lib/origami/version.rb +19 -19
- data/lib/origami/webcapture.rb +87 -90
- data/lib/origami/xfa/config.rb +409 -414
- data/lib/origami/xfa/connectionset.rb +113 -117
- data/lib/origami/xfa/datasets.rb +38 -42
- data/lib/origami/xfa/localeset.rb +33 -37
- data/lib/origami/xfa/package.rb +49 -52
- data/lib/origami/xfa/pdf.rb +54 -59
- data/lib/origami/xfa/signature.rb +33 -37
- data/lib/origami/xfa/sourceset.rb +34 -38
- data/lib/origami/xfa/stylesheet.rb +35 -39
- data/lib/origami/xfa/template.rb +1630 -1634
- data/lib/origami/xfa/xdc.rb +33 -37
- data/lib/origami/xfa/xfa.rb +132 -123
- data/lib/origami/xfa/xfdf.rb +34 -38
- data/lib/origami/xfa/xmpmeta.rb +34 -38
- data/lib/origami/xfa.rb +50 -53
- data/lib/origami/xreftable.rb +462 -462
- data/lib/origami.rb +37 -38
- data/test/test_actions.rb +22 -20
- data/test/test_annotations.rb +54 -52
- data/test/test_forms.rb +23 -21
- data/test/test_native_types.rb +82 -78
- data/test/test_object_tree.rb +25 -24
- data/test/test_pages.rb +43 -41
- data/test/test_pdf.rb +2 -0
- data/test/test_pdf_attachment.rb +23 -21
- data/test/test_pdf_create.rb +16 -15
- data/test/test_pdf_encrypt.rb +69 -66
- data/test/test_pdf_parse.rb +131 -129
- data/test/test_pdf_parse_lazy.rb +53 -53
- data/test/test_pdf_sign.rb +67 -67
- data/test/test_streams.rb +145 -143
- data/test/test_xrefs.rb +46 -45
- metadata +64 -8
data/lib/origami/pdf.rb
CHANGED
@@ -1,22 +1,22 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# This file is part of Origami, PDF manipulation framework for Ruby
|
5
|
+
# Copyright (C) 2016 Guillaume Delugré.
|
6
|
+
#
|
7
|
+
# Origami is free software: you can redistribute it and/or modify
|
8
|
+
# it under the terms of the GNU Lesser General Public License as published by
|
9
|
+
# the Free Software Foundation, either version 3 of the License, or
|
10
|
+
# (at your option) any later version.
|
11
|
+
#
|
12
|
+
# Origami is distributed in the hope that it will be useful,
|
13
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
14
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
15
|
+
# GNU Lesser General Public License for more details.
|
16
|
+
#
|
17
|
+
# You should have received a copy of the GNU Lesser General Public License
|
18
|
+
# along with Origami. If not, see <http://www.gnu.org/licenses/>.
|
19
|
+
#
|
20
20
|
|
21
21
|
require 'origami/object'
|
22
22
|
require 'origami/compound'
|
@@ -62,1047 +62,1037 @@ require 'origami/parsers/pdf/linear'
|
|
62
62
|
require 'origami/parsers/pdf/lazy'
|
63
63
|
|
64
64
|
module Origami
|
65
|
-
|
66
|
-
|
65
|
+
class InvalidPDFError < Error # :nodoc:
|
66
|
+
end
|
67
|
+
|
68
|
+
#
|
69
|
+
# Main class representing a PDF file and its inner contents.
|
70
|
+
# A PDF file contains a set of Revision.
|
71
|
+
#
|
72
|
+
class PDF
|
73
|
+
#
|
74
|
+
# Class representing a particular revision in a PDF file.
|
75
|
+
# Revision contains :
|
76
|
+
# * A Body, which is a sequence of Object.
|
77
|
+
# * A XRef::Section, holding XRef information about objects in body.
|
78
|
+
# * A Trailer.
|
79
|
+
#
|
80
|
+
class Revision
|
81
|
+
attr_accessor :pdf
|
82
|
+
attr_accessor :body, :xreftable, :xrefstm
|
83
|
+
attr_reader :trailer
|
84
|
+
|
85
|
+
def initialize(doc)
|
86
|
+
@document = doc
|
87
|
+
@body = {}
|
88
|
+
@xreftable = nil
|
89
|
+
@xrefstm = nil
|
90
|
+
@trailer = nil
|
91
|
+
end
|
92
|
+
|
93
|
+
def trailer=(trl)
|
94
|
+
trl.document = @document
|
95
|
+
|
96
|
+
@trailer = trl
|
97
|
+
end
|
98
|
+
|
99
|
+
def xreftable?
|
100
|
+
!@xreftable.nil?
|
101
|
+
end
|
102
|
+
|
103
|
+
def xrefstm?
|
104
|
+
!@xrefstm.nil?
|
105
|
+
end
|
106
|
+
|
107
|
+
def each_object(&b)
|
108
|
+
@body.each_value(&b)
|
109
|
+
end
|
110
|
+
|
111
|
+
def objects
|
112
|
+
@body.values
|
113
|
+
end
|
67
114
|
end
|
68
115
|
|
69
116
|
#
|
70
|
-
#
|
71
|
-
# A PDF file contains a set of Revision.
|
117
|
+
# Document header and revisions.
|
72
118
|
#
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
def initialize(doc)
|
88
|
-
@document = doc
|
89
|
-
@body = {}
|
90
|
-
@xreftable = nil
|
91
|
-
@xrefstm = nil
|
92
|
-
@trailer = nil
|
93
|
-
end
|
94
|
-
|
95
|
-
def trailer=(trl)
|
96
|
-
trl.document = @document
|
97
|
-
|
98
|
-
@trailer = trl
|
99
|
-
end
|
100
|
-
|
101
|
-
def xreftable?
|
102
|
-
not @xreftable.nil?
|
103
|
-
end
|
104
|
-
|
105
|
-
def xrefstm?
|
106
|
-
not @xrefstm.nil?
|
107
|
-
end
|
108
|
-
|
109
|
-
def each_object(&b)
|
110
|
-
@body.each_value(&b)
|
111
|
-
end
|
112
|
-
|
113
|
-
def objects
|
114
|
-
@body.values
|
115
|
-
end
|
119
|
+
attr_accessor :header, :revisions
|
120
|
+
|
121
|
+
class << self
|
122
|
+
#
|
123
|
+
# Reads and parses a PDF file from disk.
|
124
|
+
#
|
125
|
+
def read(path, options = {})
|
126
|
+
path = File.expand_path(path) if path.is_a?(::String)
|
127
|
+
lazy = options[:lazy]
|
128
|
+
|
129
|
+
parser_class = if lazy
|
130
|
+
PDF::LazyParser
|
131
|
+
else
|
132
|
+
PDF::LinearParser
|
116
133
|
end
|
117
134
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
parser_class = PDF::LazyParser
|
133
|
-
else
|
134
|
-
parser_class = PDF::LinearParser
|
135
|
-
end
|
136
|
-
|
137
|
-
parser_class.new(options).parse(path)
|
138
|
-
end
|
135
|
+
parser_class.new(options).parse(path)
|
136
|
+
end
|
137
|
+
|
138
|
+
#
|
139
|
+
# Creates a new PDF and saves it.
|
140
|
+
# If a block is passed, the PDF instance can be processed before saving.
|
141
|
+
#
|
142
|
+
def create(output, options = {})
|
143
|
+
pdf = PDF.new
|
144
|
+
yield(pdf) if block_given?
|
145
|
+
pdf.save(output, options)
|
146
|
+
end
|
147
|
+
alias_method :write, :create
|
148
|
+
end
|
139
149
|
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
end
|
150
|
+
#
|
151
|
+
# Creates a new PDF instance.
|
152
|
+
# _parser_:: The Parser object creating the document.
|
153
|
+
# If none is specified, some default structures are automatically created to get a minimal working document.
|
154
|
+
#
|
155
|
+
def initialize(parser = nil)
|
156
|
+
@header = PDF::Header.new
|
157
|
+
@revisions = []
|
158
|
+
@parser = parser
|
159
|
+
@loaded = false
|
151
160
|
|
152
|
-
|
153
|
-
|
154
|
-
# _parser_:: The Parser object creating the document.
|
155
|
-
# If none is specified, some default structures are automatically created to get a minimal working document.
|
156
|
-
#
|
157
|
-
def initialize(parser = nil)
|
158
|
-
@header = PDF::Header.new
|
159
|
-
@revisions = []
|
160
|
-
@parser = parser
|
161
|
-
@loaded = false
|
161
|
+
add_new_revision
|
162
|
+
@revisions.first.trailer = Trailer.new
|
162
163
|
|
163
|
-
|
164
|
-
|
164
|
+
init if parser.nil?
|
165
|
+
end
|
165
166
|
|
166
|
-
|
167
|
-
|
167
|
+
#
|
168
|
+
# Original file name if parsed from disk, nil otherwise.
|
169
|
+
#
|
170
|
+
def original_filename
|
171
|
+
@parser&.target_filename
|
172
|
+
end
|
168
173
|
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
174
|
+
#
|
175
|
+
# Original file size if parsed from a data stream, nil otherwise.
|
176
|
+
#
|
177
|
+
def original_filesize
|
178
|
+
@parser&.target_filesize
|
179
|
+
end
|
175
180
|
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
181
|
+
#
|
182
|
+
# Original data parsed to create this document, nil if created from scratch.
|
183
|
+
#
|
184
|
+
def original_data
|
185
|
+
@parser&.target_data
|
186
|
+
end
|
182
187
|
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
188
|
+
#
|
189
|
+
# Saves the current document.
|
190
|
+
# _filename_:: The path where to save this PDF.
|
191
|
+
#
|
192
|
+
def save(path, params = {})
|
193
|
+
options =
|
194
|
+
{
|
195
|
+
delinearize: true,
|
196
|
+
recompile: true,
|
197
|
+
decrypt: false
|
198
|
+
}
|
199
|
+
options.update(params)
|
200
|
+
|
201
|
+
if frozen? # incompatible flags with frozen doc (signed)
|
202
|
+
options[:recompile] =
|
203
|
+
options[:rebuild_xrefs] =
|
204
|
+
options[:noindent] =
|
205
|
+
options[:obfuscate] = false
|
206
|
+
end
|
207
|
+
|
208
|
+
if path.respond_to?(:write)
|
209
|
+
fd = path
|
210
|
+
else
|
211
|
+
path = File.expand_path(path)
|
212
|
+
fd = File.open(path, 'w').binmode
|
213
|
+
close = true
|
214
|
+
end
|
215
|
+
|
216
|
+
load_all_objects unless loaded?
|
217
|
+
|
218
|
+
intents_as_pdfa1 if options[:intent] =~ /pdf[\/-]?A1?/i
|
219
|
+
delinearize! if options[:delinearize] && linearized?
|
220
|
+
compile(options) if options[:recompile]
|
221
|
+
|
222
|
+
fd.write output(options)
|
223
|
+
fd.close if close
|
224
|
+
|
225
|
+
self
|
226
|
+
end
|
227
|
+
alias_method :write, :save
|
189
228
|
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
decrypt: false
|
200
|
-
}
|
201
|
-
options.update(params)
|
202
|
-
|
203
|
-
if self.frozen? # incompatible flags with frozen doc (signed)
|
204
|
-
options[:recompile] =
|
205
|
-
options[:rebuild_xrefs] =
|
206
|
-
options[:noindent] =
|
207
|
-
options[:obfuscate] = false
|
208
|
-
end
|
229
|
+
#
|
230
|
+
# Saves the file up to given revision number.
|
231
|
+
# This can be useful to visualize the modifications over different incremental updates.
|
232
|
+
# _revision_:: The revision number to save.
|
233
|
+
# _filename_:: The path where to save this PDF.
|
234
|
+
#
|
235
|
+
def save_upto(revision, filename)
|
236
|
+
save(filename, up_to_revision: revision)
|
237
|
+
end
|
209
238
|
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
239
|
+
#
|
240
|
+
# Returns an array of strings, names and streams matching the given pattern.
|
241
|
+
# _streams_: Search into decoded stream data.
|
242
|
+
# _object_streams_: Search into objects inside object streams.
|
243
|
+
#
|
244
|
+
def grep(pattern, streams: true, object_streams: true) # :nodoc:
|
245
|
+
pattern = /#{Regexp.escape(pattern)}/i if pattern.is_a?(::String)
|
246
|
+
raise TypeError, "Expected a String or Regexp" unless pattern.is_a?(Regexp)
|
217
247
|
|
218
|
-
|
248
|
+
result = []
|
219
249
|
|
220
|
-
|
221
|
-
|
222
|
-
|
250
|
+
indirect_objects.each do |object|
|
251
|
+
result.concat search_object(object, pattern,
|
252
|
+
streams: streams, object_streams: object_streams)
|
253
|
+
end
|
223
254
|
|
224
|
-
|
225
|
-
|
255
|
+
result
|
256
|
+
end
|
226
257
|
|
227
|
-
|
228
|
-
|
229
|
-
|
258
|
+
#
|
259
|
+
# Returns an array of Objects whose name (in a Dictionary) is matching _pattern_.
|
260
|
+
#
|
261
|
+
def ls(pattern, follow_references: true)
|
262
|
+
pattern = /#{Regexp.escape(pattern)}/i if pattern.is_a?(::String)
|
263
|
+
raise TypeError, "Expected a String or Regexp" unless pattern.is_a?(Regexp)
|
264
|
+
|
265
|
+
grep(pattern, streams: false, object_streams: true)
|
266
|
+
.select { |object| object.is_a?(Name) and object.parent.is_a?(Dictionary) and object.parent.key?(object) }
|
267
|
+
.collect { |object|
|
268
|
+
result = object.parent[object]
|
269
|
+
follow_references ? result.solve : result
|
270
|
+
}
|
271
|
+
end
|
230
272
|
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
273
|
+
#
|
274
|
+
# Iterates over the objects of the document.
|
275
|
+
# _compressed_: iterates over the objects inside object streams.
|
276
|
+
# _recursive_: iterates recursively inside objects like arrays and dictionaries.
|
277
|
+
#
|
278
|
+
def each_object(compressed: false, recursive: false, &block)
|
279
|
+
unless block_given?
|
280
|
+
return enum_for(__method__, compressed: compressed,
|
281
|
+
recursive: recursive)
|
282
|
+
end
|
240
283
|
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
# _object_streams_: Search into objects inside object streams.
|
245
|
-
#
|
246
|
-
def grep(pattern, streams: true, object_streams: true) #:nodoc:
|
284
|
+
@revisions.each do |revision|
|
285
|
+
revision.each_object do |object|
|
286
|
+
block.call(object)
|
247
287
|
|
248
|
-
|
249
|
-
raise TypeError, "Expected a String or Regexp" unless pattern.is_a?(Regexp)
|
288
|
+
walk_object(object, &block) if recursive
|
250
289
|
|
251
|
-
|
290
|
+
if object.is_a?(ObjectStream) && compressed
|
291
|
+
object.each do |child_obj|
|
292
|
+
block.call(child_obj)
|
252
293
|
|
253
|
-
|
254
|
-
result.concat search_object(object, pattern,
|
255
|
-
streams: streams, object_streams: object_streams)
|
294
|
+
walk_object(child_obj) if recursive
|
256
295
|
end
|
257
|
-
|
258
|
-
result
|
296
|
+
end
|
259
297
|
end
|
298
|
+
end
|
299
|
+
end
|
260
300
|
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
self.grep(pattern, streams: false, object_streams: true)
|
270
|
-
.select {|object| object.is_a?(Name) and object.parent.is_a?(Dictionary) and object.parent.key?(object) }
|
271
|
-
.collect {|object| result = object.parent[object]; follow_references ? result.solve : result }
|
272
|
-
end
|
301
|
+
#
|
302
|
+
# Return an array of indirect objects.
|
303
|
+
#
|
304
|
+
def indirect_objects
|
305
|
+
@revisions.inject([]) do |set, rev| set.concat(rev.objects) end
|
306
|
+
end
|
307
|
+
alias_method :root_objects, :indirect_objects
|
273
308
|
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
309
|
+
#
|
310
|
+
# Adds a new object to the PDF file.
|
311
|
+
# If this object has no version number, then a new one will be automatically
|
312
|
+
# computed and assignated to him.
|
313
|
+
#
|
314
|
+
# It returns a Reference to this Object.
|
315
|
+
# _object_:: The object to add.
|
316
|
+
#
|
317
|
+
def <<(object)
|
318
|
+
owner = object.document
|
319
|
+
|
320
|
+
#
|
321
|
+
# Does object belongs to another PDF ?
|
322
|
+
#
|
323
|
+
if owner && !owner.equal?(self)
|
324
|
+
import object
|
325
|
+
else
|
326
|
+
add_to_revision(object, @revisions.last)
|
327
|
+
end
|
328
|
+
end
|
329
|
+
alias_method :insert, :<<
|
283
330
|
|
284
|
-
|
285
|
-
|
286
|
-
|
331
|
+
#
|
332
|
+
# Similar to PDF#insert or PDF#<<, but for an object belonging to another document.
|
333
|
+
# Object will be recursively copied and new version numbers will be assigned.
|
334
|
+
# Returns the new reference to the imported object.
|
335
|
+
# _object_:: The object to import.
|
336
|
+
#
|
337
|
+
def import(object)
|
338
|
+
insert(object.export)
|
339
|
+
end
|
287
340
|
|
288
|
-
|
341
|
+
#
|
342
|
+
# Adds a new object to a specific revision.
|
343
|
+
# If this object has no version number, then a new one will be automatically
|
344
|
+
# computed and assignated to him.
|
345
|
+
#
|
346
|
+
# It returns a Reference to this Object.
|
347
|
+
# _object_:: The object to add.
|
348
|
+
# _revision_:: The revision to add the object to.
|
349
|
+
#
|
350
|
+
def add_to_revision(object, revision)
|
351
|
+
object.set_indirect(true)
|
352
|
+
object.set_document(self)
|
289
353
|
|
290
|
-
|
291
|
-
object.each do |child_obj|
|
292
|
-
block.call(child_obj)
|
354
|
+
object.no, object.generation = allocate_new_object_number if object.no == 0
|
293
355
|
|
294
|
-
|
295
|
-
end
|
296
|
-
end
|
297
|
-
end
|
298
|
-
end
|
299
|
-
end
|
356
|
+
revision.body[object.reference] = object
|
300
357
|
|
301
|
-
|
302
|
-
|
303
|
-
#
|
304
|
-
def indirect_objects
|
305
|
-
@revisions.inject([]) do |set, rev| set.concat(rev.objects) end
|
306
|
-
end
|
307
|
-
alias root_objects indirect_objects
|
358
|
+
object.reference
|
359
|
+
end
|
308
360
|
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
# It returns a Reference to this Object.
|
315
|
-
# _object_:: The object to add.
|
316
|
-
#
|
317
|
-
def <<(object)
|
318
|
-
owner = object.document
|
319
|
-
|
320
|
-
#
|
321
|
-
# Does object belongs to another PDF ?
|
322
|
-
#
|
323
|
-
if owner and not owner.equal?(self)
|
324
|
-
import object
|
325
|
-
else
|
326
|
-
add_to_revision(object, @revisions.last)
|
327
|
-
end
|
328
|
-
end
|
329
|
-
alias insert <<
|
361
|
+
#
|
362
|
+
# Ends the current Revision, and starts a new one.
|
363
|
+
#
|
364
|
+
def add_new_revision
|
365
|
+
root = @revisions.last.trailer[:Root] unless @revisions.empty?
|
330
366
|
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
# Returns the new reference to the imported object.
|
335
|
-
# _object_:: The object to import.
|
336
|
-
#
|
337
|
-
def import(object)
|
338
|
-
self.insert(object.export)
|
339
|
-
end
|
367
|
+
@revisions << Revision.new(self)
|
368
|
+
@revisions.last.trailer = Trailer.new
|
369
|
+
@revisions.last.trailer.Root = root
|
340
370
|
|
341
|
-
|
342
|
-
|
343
|
-
# If this object has no version number, then a new one will be automatically
|
344
|
-
# computed and assignated to him.
|
345
|
-
#
|
346
|
-
# It returns a Reference to this Object.
|
347
|
-
# _object_:: The object to add.
|
348
|
-
# _revision_:: The revision to add the object to.
|
349
|
-
#
|
350
|
-
def add_to_revision(object, revision)
|
351
|
-
object.set_indirect(true)
|
352
|
-
object.set_document(self)
|
371
|
+
self
|
372
|
+
end
|
353
373
|
|
354
|
-
|
374
|
+
#
|
375
|
+
# Removes a whole document revision.
|
376
|
+
# _index_:: Revision index, first is 0.
|
377
|
+
#
|
378
|
+
def remove_revision(index)
|
379
|
+
if (index < 0) || (index > @revisions.size)
|
380
|
+
raise IndexError, "Not a valid revision index"
|
381
|
+
end
|
355
382
|
|
356
|
-
|
383
|
+
if @revisions.size == 1
|
384
|
+
raise InvalidPDFError, "Cannot remove last revision"
|
385
|
+
end
|
357
386
|
|
358
|
-
|
359
|
-
|
387
|
+
@revisions.delete_at(index)
|
388
|
+
self
|
389
|
+
end
|
360
390
|
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
391
|
+
#
|
392
|
+
# Looking for an object present at a specified file offset.
|
393
|
+
#
|
394
|
+
def get_object_by_offset(offset) # :nodoc:
|
395
|
+
each_object.find { |obj| obj.file_offset == offset }
|
396
|
+
end
|
366
397
|
|
367
|
-
|
368
|
-
|
369
|
-
|
398
|
+
#
|
399
|
+
# Remove an object.
|
400
|
+
#
|
401
|
+
def delete_object(no, generation = 0)
|
402
|
+
case no
|
403
|
+
when Reference
|
404
|
+
target = no
|
405
|
+
when ::Integer
|
406
|
+
target = Reference.new(no, generation)
|
407
|
+
else
|
408
|
+
raise TypeError, "Invalid parameter type : #{no.class}"
|
409
|
+
end
|
410
|
+
|
411
|
+
@revisions.each do |rev|
|
412
|
+
rev.body.delete(target)
|
413
|
+
end
|
414
|
+
end
|
370
415
|
|
371
|
-
|
416
|
+
#
|
417
|
+
# Search for an indirect object in the document.
|
418
|
+
# _no_:: Reference or number of the object.
|
419
|
+
# _generation_:: Object generation.
|
420
|
+
#
|
421
|
+
def get_object(no, generation = 0, use_xrefstm: true) # :nodoc:
|
422
|
+
case no
|
423
|
+
when Reference
|
424
|
+
target = no
|
425
|
+
when ::Integer
|
426
|
+
target = Reference.new(no, generation)
|
427
|
+
when Origami::Object
|
428
|
+
return no
|
429
|
+
else
|
430
|
+
raise TypeError, "Invalid parameter type : #{no.class}"
|
431
|
+
end
|
432
|
+
|
433
|
+
#
|
434
|
+
# Search through accessible indirect objects.
|
435
|
+
#
|
436
|
+
@revisions.reverse_each do |rev|
|
437
|
+
return rev.body[target] if rev.body.include?(target)
|
438
|
+
end
|
439
|
+
|
440
|
+
#
|
441
|
+
# Search through xref sections.
|
442
|
+
#
|
443
|
+
@revisions.reverse_each do |rev|
|
444
|
+
next unless rev.xreftable?
|
445
|
+
|
446
|
+
xref = rev.xreftable.find(target.refno)
|
447
|
+
next if xref.nil? || xref.free?
|
448
|
+
|
449
|
+
# Try loading the object if it is not present.
|
450
|
+
object = load_object_at_offset(rev, xref.offset)
|
451
|
+
return object unless object.nil?
|
452
|
+
end
|
453
|
+
|
454
|
+
return nil unless use_xrefstm
|
455
|
+
|
456
|
+
# Search through xref streams.
|
457
|
+
@revisions.reverse_each do |rev|
|
458
|
+
next unless rev.xrefstm?
|
459
|
+
|
460
|
+
xrefstm = rev.xrefstm
|
461
|
+
|
462
|
+
xref = xrefstm.find(target.refno)
|
463
|
+
next if xref.nil?
|
464
|
+
|
465
|
+
#
|
466
|
+
# We found a matching XRef.
|
467
|
+
#
|
468
|
+
if xref.is_a?(XRefToCompressedObject)
|
469
|
+
objstm = get_object(xref.objstmno, 0, use_xrefstm: use_xrefstm)
|
470
|
+
|
471
|
+
object = objstm.extract_by_index(xref.index)
|
472
|
+
if object.is_a?(Origami::Object) && (object.no == target.refno)
|
473
|
+
return object
|
474
|
+
else
|
475
|
+
return objstm.extract(target.refno)
|
476
|
+
end
|
477
|
+
elsif xref.is_a?(XRef)
|
478
|
+
object = load_object_at_offset(rev, xref.offset)
|
479
|
+
return object unless object.nil?
|
372
480
|
end
|
481
|
+
end
|
373
482
|
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
end
|
382
|
-
|
383
|
-
if @revisions.size == 1
|
384
|
-
raise InvalidPDFError, "Cannot remove last revision"
|
385
|
-
end
|
483
|
+
#
|
484
|
+
# Lastly search directly into Object streams (might be very slow).
|
485
|
+
#
|
486
|
+
@revisions.reverse_each do |rev|
|
487
|
+
stream = rev.objects.find { |obj| obj.is_a?(ObjectStream) and obj.include?(target.refno) }
|
488
|
+
return stream.extract(target.refno) unless stream.nil?
|
489
|
+
end
|
386
490
|
|
387
|
-
|
388
|
-
|
389
|
-
|
491
|
+
nil
|
492
|
+
end
|
493
|
+
alias_method :[], :get_object
|
390
494
|
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
495
|
+
#
|
496
|
+
# Casts a PDF object into another object type.
|
497
|
+
# The target type must be a subtype of the original type.
|
498
|
+
#
|
499
|
+
def cast_object(reference, type) # :nodoc:
|
500
|
+
@revisions.each do |rev|
|
501
|
+
if rev.body.include?(reference)
|
502
|
+
object = rev.body[reference]
|
503
|
+
return object if object.is_a?(type)
|
397
504
|
|
398
|
-
|
399
|
-
|
400
|
-
#
|
401
|
-
def delete_object(no, generation = 0)
|
402
|
-
case no
|
403
|
-
when Reference
|
404
|
-
target = no
|
405
|
-
when ::Integer
|
406
|
-
target = Reference.new(no, generation)
|
407
|
-
else
|
408
|
-
raise TypeError, "Invalid parameter type : #{no.class}"
|
409
|
-
end
|
505
|
+
if type < rev.body[reference].class
|
506
|
+
rev.body[reference] = object.cast_to(type, @parser)
|
410
507
|
|
411
|
-
|
412
|
-
|
413
|
-
end
|
508
|
+
return rev.body[reference]
|
509
|
+
end
|
414
510
|
end
|
511
|
+
end
|
415
512
|
|
416
|
-
|
417
|
-
|
418
|
-
# _no_:: Reference or number of the object.
|
419
|
-
# _generation_:: Object generation.
|
420
|
-
#
|
421
|
-
def get_object(no, generation = 0, use_xrefstm: true) #:nodoc:
|
422
|
-
case no
|
423
|
-
when Reference
|
424
|
-
target = no
|
425
|
-
when ::Integer
|
426
|
-
target = Reference.new(no, generation)
|
427
|
-
when Origami::Object
|
428
|
-
return no
|
429
|
-
else
|
430
|
-
raise TypeError, "Invalid parameter type : #{no.class}"
|
431
|
-
end
|
432
|
-
|
433
|
-
#
|
434
|
-
# Search through accessible indirect objects.
|
435
|
-
#
|
436
|
-
@revisions.reverse_each do |rev|
|
437
|
-
return rev.body[target] if rev.body.include?(target)
|
438
|
-
end
|
439
|
-
|
440
|
-
#
|
441
|
-
# Search through xref sections.
|
442
|
-
#
|
443
|
-
@revisions.reverse_each do |rev|
|
444
|
-
next unless rev.xreftable?
|
445
|
-
|
446
|
-
xref = rev.xreftable.find(target.refno)
|
447
|
-
next if xref.nil? or xref.free?
|
448
|
-
|
449
|
-
# Try loading the object if it is not present.
|
450
|
-
object = load_object_at_offset(rev, xref.offset)
|
451
|
-
return object unless object.nil?
|
452
|
-
end
|
513
|
+
nil
|
514
|
+
end
|
453
515
|
|
454
|
-
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
#
|
468
|
-
if xref.is_a?(XRefToCompressedObject)
|
469
|
-
objstm = get_object(xref.objstmno, 0, use_xrefstm: use_xrefstm)
|
470
|
-
|
471
|
-
object = objstm.extract_by_index(xref.index)
|
472
|
-
if object.is_a?(Origami::Object) and object.no == target.refno
|
473
|
-
return object
|
474
|
-
else
|
475
|
-
return objstm.extract(target.refno)
|
476
|
-
end
|
477
|
-
elsif xref.is_a?(XRef)
|
478
|
-
object = load_object_at_offset(rev, xref.offset)
|
479
|
-
return object unless object.nil?
|
480
|
-
end
|
481
|
-
end
|
516
|
+
#
|
517
|
+
# Returns a new number/generation for future object.
|
518
|
+
#
|
519
|
+
def allocate_new_object_number
|
520
|
+
last_object = each_object(compressed: true).max_by { |object| object.no }
|
521
|
+
no = if last_object.nil?
|
522
|
+
1
|
523
|
+
else
|
524
|
+
last_object.no + 1
|
525
|
+
end
|
526
|
+
|
527
|
+
[no, 0]
|
528
|
+
end
|
482
529
|
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
487
|
-
|
488
|
-
|
489
|
-
|
530
|
+
#
|
531
|
+
# Mark the document as complete.
|
532
|
+
# No more objects needs to be fetched by the parser.
|
533
|
+
#
|
534
|
+
def loaded!
|
535
|
+
@loaded = true
|
536
|
+
end
|
490
537
|
|
491
|
-
|
492
|
-
|
493
|
-
|
538
|
+
#
|
539
|
+
# Returns if the document as been fully loaded by the parser.
|
540
|
+
#
|
541
|
+
def loaded?
|
542
|
+
@loaded
|
543
|
+
end
|
494
544
|
|
495
|
-
|
496
|
-
|
497
|
-
# The target type must be a subtype of the original type.
|
498
|
-
#
|
499
|
-
def cast_object(reference, type) #:nodoc:
|
500
|
-
@revisions.each do |rev|
|
501
|
-
if rev.body.include?(reference)
|
502
|
-
object = rev.body[reference]
|
503
|
-
return object if object.is_a?(type)
|
504
|
-
|
505
|
-
if type < rev.body[reference].class
|
506
|
-
rev.body[reference] = object.cast_to(type, @parser)
|
507
|
-
|
508
|
-
return rev.body[reference]
|
509
|
-
end
|
510
|
-
end
|
511
|
-
end
|
545
|
+
##########################
|
546
|
+
private
|
512
547
|
|
513
|
-
|
514
|
-
end
|
548
|
+
##########################
|
515
549
|
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
550
|
+
#
|
551
|
+
# Iterates over the children of an object, avoiding cycles.
|
552
|
+
#
|
553
|
+
def walk_object(object, excludes: [], &block)
|
554
|
+
return enum_for(__method__, object, excludes: excludes) unless block_given?
|
520
555
|
|
521
|
-
|
522
|
-
|
523
|
-
no = 1
|
524
|
-
else
|
525
|
-
no = last_object.no + 1
|
526
|
-
end
|
556
|
+
return if excludes.include?(object)
|
557
|
+
excludes.push(object)
|
527
558
|
|
528
|
-
|
559
|
+
case object
|
560
|
+
when CompoundObject
|
561
|
+
object.each_value do |value|
|
562
|
+
yield(value)
|
563
|
+
walk_object(value, excludes: excludes, &block)
|
529
564
|
end
|
530
565
|
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
@loaded = true
|
537
|
-
end
|
566
|
+
when Stream
|
567
|
+
yield(object.dictionary)
|
568
|
+
walk_object(object.dictionary, excludes: excludes, &block)
|
569
|
+
end
|
570
|
+
end
|
538
571
|
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
|
572
|
+
#
|
573
|
+
# Searches through an object, possibly going into object streams.
|
574
|
+
# Returns an array of matching strings, names and streams.
|
575
|
+
#
|
576
|
+
def search_object(object, pattern, streams: true, object_streams: true)
|
577
|
+
result = []
|
578
|
+
|
579
|
+
case object
|
580
|
+
when Stream
|
581
|
+
result.concat object.dictionary.strings_cache.select { |str| str.match(pattern) }
|
582
|
+
result.concat object.dictionary.names_cache.select { |name| name.value.match(pattern) }
|
583
|
+
|
584
|
+
begin
|
585
|
+
result.push object if streams && object.data.match(pattern)
|
586
|
+
rescue Filter::Error
|
587
|
+
return result # Skip object if a decoding error occured.
|
544
588
|
end
|
545
589
|
|
546
|
-
|
547
|
-
private
|
548
|
-
##########################
|
590
|
+
return result unless object.is_a?(ObjectStream) && object_streams
|
549
591
|
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
def walk_object(object, excludes: [], &block)
|
554
|
-
return enum_for(__method__, object, excludes: excludes) unless block_given?
|
555
|
-
|
556
|
-
return if excludes.include?(object)
|
557
|
-
excludes.push(object)
|
558
|
-
|
559
|
-
case object
|
560
|
-
when CompoundObject
|
561
|
-
object.each_value do |value|
|
562
|
-
yield(value)
|
563
|
-
walk_object(value, excludes: excludes, &block)
|
564
|
-
end
|
565
|
-
|
566
|
-
when Stream
|
567
|
-
yield(object.dictionary)
|
568
|
-
walk_object(object.dictionary, excludes: excludes, &block)
|
569
|
-
end
|
592
|
+
object.each do |child|
|
593
|
+
result.concat search_object(child, pattern,
|
594
|
+
streams: streams, object_streams: object_streams)
|
570
595
|
end
|
571
596
|
|
572
|
-
|
573
|
-
|
574
|
-
# Returns an array of matching strings, names and streams.
|
575
|
-
#
|
576
|
-
def search_object(object, pattern, streams: true, object_streams: true)
|
577
|
-
result = []
|
578
|
-
|
579
|
-
case object
|
580
|
-
when Stream
|
581
|
-
result.concat object.dictionary.strings_cache.select{|str| str.match(pattern) }
|
582
|
-
result.concat object.dictionary.names_cache.select{|name| name.value.match(pattern) }
|
583
|
-
|
584
|
-
begin
|
585
|
-
result.push object if streams and object.data.match(pattern)
|
586
|
-
rescue Filter::Error
|
587
|
-
return result # Skip object if a decoding error occured.
|
588
|
-
end
|
597
|
+
when Name, String
|
598
|
+
result.push object if object.value.match(pattern)
|
589
599
|
|
590
|
-
|
600
|
+
when ObjectCache
|
601
|
+
result.concat object.strings_cache.select { |str| str.match(pattern) }
|
602
|
+
result.concat object.names_cache.select { |name| name.value.match(pattern) }
|
603
|
+
end
|
591
604
|
|
592
|
-
|
593
|
-
|
594
|
-
streams: streams, object_streams: object_streams)
|
595
|
-
end
|
605
|
+
result
|
606
|
+
end
|
596
607
|
|
597
|
-
|
598
|
-
|
608
|
+
#
|
609
|
+
# Load an object from its given file offset.
|
610
|
+
# The document must have an associated Parser.
|
611
|
+
#
|
612
|
+
def load_object_at_offset(revision, offset)
|
613
|
+
return nil if loaded? || @parser.nil?
|
614
|
+
pos = @parser.pos
|
599
615
|
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
-
end
|
616
|
+
begin
|
617
|
+
object = @parser.parse_object(offset)
|
618
|
+
return nil if object.nil?
|
604
619
|
|
605
|
-
|
620
|
+
if is_a?(Encryption::EncryptedDocument)
|
621
|
+
make_encrypted_object(object)
|
606
622
|
end
|
607
623
|
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
def load_object_at_offset(revision, offset)
|
613
|
-
return nil if loaded? or @parser.nil?
|
614
|
-
pos = @parser.pos
|
615
|
-
|
616
|
-
begin
|
617
|
-
object = @parser.parse_object(offset)
|
618
|
-
return nil if object.nil?
|
624
|
+
add_to_revision(object, revision)
|
625
|
+
ensure
|
626
|
+
@parser.pos = pos
|
627
|
+
end
|
619
628
|
|
620
|
-
|
621
|
-
|
622
|
-
end
|
629
|
+
object
|
630
|
+
end
|
623
631
|
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
632
|
+
#
|
633
|
+
# Method called on encrypted objects loaded into the document.
|
634
|
+
#
|
635
|
+
def make_encrypted_object(object)
|
636
|
+
case object
|
637
|
+
when String
|
638
|
+
object.extend(Encryption::EncryptedString)
|
639
|
+
when Stream
|
640
|
+
object.extend(Encryption::EncryptedStream)
|
641
|
+
when ObjectCache
|
642
|
+
object.strings_cache.each do |string|
|
643
|
+
string.extend(Encryption::EncryptedString)
|
644
|
+
end
|
645
|
+
end
|
646
|
+
end
|
628
647
|
|
629
|
-
|
648
|
+
#
|
649
|
+
# Force the loading of all objects in the document.
|
650
|
+
#
|
651
|
+
def load_all_objects
|
652
|
+
return if loaded? || @parser.nil?
|
653
|
+
|
654
|
+
@revisions.each do |revision|
|
655
|
+
if revision.xreftable?
|
656
|
+
xrefs = revision.xreftable
|
657
|
+
elsif revision.xrefstm?
|
658
|
+
xrefs = revision.xrefstm
|
659
|
+
else
|
660
|
+
next
|
630
661
|
end
|
631
662
|
|
632
|
-
|
633
|
-
|
634
|
-
#
|
635
|
-
def make_encrypted_object(object)
|
636
|
-
case object
|
637
|
-
when String
|
638
|
-
object.extend(Encryption::EncryptedString)
|
639
|
-
when Stream
|
640
|
-
object.extend(Encryption::EncryptedStream)
|
641
|
-
when ObjectCache
|
642
|
-
object.strings_cache.each do |string|
|
643
|
-
string.extend(Encryption::EncryptedString)
|
644
|
-
end
|
645
|
-
end
|
663
|
+
xrefs.each_with_number do |xref, no|
|
664
|
+
get_object(no) unless xref.free?
|
646
665
|
end
|
666
|
+
end
|
647
667
|
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
elsif revision.xrefstm?
|
658
|
-
xrefs = revision.xrefstm
|
659
|
-
else
|
660
|
-
next
|
661
|
-
end
|
662
|
-
|
663
|
-
xrefs.each_with_number do |xref, no|
|
664
|
-
self.get_object(no) unless xref.free?
|
665
|
-
end
|
666
|
-
end
|
668
|
+
loaded!
|
669
|
+
end
|
670
|
+
|
671
|
+
#
|
672
|
+
# Compute and update XRef::Section for each Revision.
|
673
|
+
#
|
674
|
+
def rebuild_xrefs
|
675
|
+
size = 0
|
676
|
+
startxref = @header.to_s.size
|
667
677
|
|
668
|
-
|
678
|
+
@revisions.each do |revision|
|
679
|
+
revision.each_object do |object|
|
680
|
+
startxref += object.to_s.size
|
669
681
|
end
|
670
682
|
|
671
|
-
|
672
|
-
|
673
|
-
#
|
674
|
-
def rebuild_xrefs
|
675
|
-
size = 0
|
676
|
-
startxref = @header.to_s.size
|
683
|
+
size += revision.body.size
|
684
|
+
revision.xreftable = build_xrefs(revision.objects)
|
677
685
|
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
end
|
686
|
+
revision.trailer ||= Trailer.new
|
687
|
+
revision.trailer.Size = size + 1
|
688
|
+
revision.trailer.startxref = startxref
|
682
689
|
|
683
|
-
|
684
|
-
|
690
|
+
startxref += revision.xreftable.to_s.size + revision.trailer.to_s.size
|
691
|
+
end
|
685
692
|
|
686
|
-
|
687
|
-
|
688
|
-
revision.trailer.startxref = startxref
|
693
|
+
self
|
694
|
+
end
|
689
695
|
|
690
|
-
|
691
|
-
|
696
|
+
#
|
697
|
+
# This method is meant to recompute, verify and correct main PDF structures, in order to output a proper file.
|
698
|
+
# * Allocates objects references.
|
699
|
+
# * Sets some objects missing required values.
|
700
|
+
#
|
701
|
+
def compile(options = {})
|
702
|
+
load_all_objects unless loaded?
|
703
|
+
|
704
|
+
#
|
705
|
+
# A valid document must have at least one page.
|
706
|
+
#
|
707
|
+
append_page if pages.empty?
|
708
|
+
|
709
|
+
#
|
710
|
+
# Allocates object numbers and creates references.
|
711
|
+
# Invokes object finalization methods.
|
712
|
+
#
|
713
|
+
physicalize(options)
|
714
|
+
|
715
|
+
#
|
716
|
+
# Sets the PDF version header.
|
717
|
+
#
|
718
|
+
version, level = version_required
|
719
|
+
@header.major_version = version[0, 1].to_i
|
720
|
+
@header.minor_version = version[2, 1].to_i
|
721
|
+
|
722
|
+
set_extension_level(version, level) if level > 0
|
723
|
+
|
724
|
+
self
|
725
|
+
end
|
692
726
|
|
693
|
-
|
727
|
+
#
|
728
|
+
# Converts a logical PDF view into a physical view ready for writing.
|
729
|
+
#
|
730
|
+
def physicalize(options = {})
|
731
|
+
@revisions.each do |revision|
|
732
|
+
# Do not use each_object here as build_object may modify the iterator.
|
733
|
+
revision.objects.each do |obj|
|
734
|
+
build_object(obj, revision, options)
|
694
735
|
end
|
736
|
+
end
|
695
737
|
|
696
|
-
|
697
|
-
|
698
|
-
# * Allocates objects references.
|
699
|
-
# * Sets some objects missing required values.
|
700
|
-
#
|
701
|
-
def compile(options = {})
|
702
|
-
|
703
|
-
load_all_objects unless loaded?
|
704
|
-
|
705
|
-
#
|
706
|
-
# A valid document must have at least one page.
|
707
|
-
#
|
708
|
-
append_page if pages.empty?
|
738
|
+
self
|
739
|
+
end
|
709
740
|
|
710
|
-
|
711
|
-
|
712
|
-
|
713
|
-
|
714
|
-
|
741
|
+
def build_object(object, revision, options)
|
742
|
+
# Build any compressed object before building the object stream.
|
743
|
+
if object.is_a?(ObjectStream)
|
744
|
+
object.each do |compressed_obj|
|
745
|
+
build_object(compressed_obj, revision, options)
|
746
|
+
end
|
747
|
+
end
|
715
748
|
|
716
|
-
|
717
|
-
# Sets the PDF version header.
|
718
|
-
#
|
719
|
-
version, level = version_required
|
720
|
-
@header.major_version = version[0,1].to_i
|
721
|
-
@header.minor_version = version[2,1].to_i
|
749
|
+
object.pre_build
|
722
750
|
|
723
|
-
|
751
|
+
case object
|
752
|
+
when Stream
|
753
|
+
build_object(object.dictionary, revision, options)
|
754
|
+
when CompoundObject
|
755
|
+
build_compound_object(object, revision, options)
|
756
|
+
end
|
724
757
|
|
725
|
-
|
726
|
-
|
758
|
+
object.post_build
|
759
|
+
end
|
727
760
|
|
728
|
-
|
729
|
-
|
730
|
-
#
|
731
|
-
def physicalize(options = {})
|
761
|
+
def build_compound_object(object, revision, options)
|
762
|
+
return unless object.is_a?(CompoundObject)
|
732
763
|
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
end
|
738
|
-
end
|
764
|
+
# Flatten the object by adding indirect objects to the revision and
|
765
|
+
# replacing them with their reference.
|
766
|
+
object.update_values! do |child|
|
767
|
+
next(child) unless child.indirect?
|
739
768
|
|
740
|
-
|
769
|
+
if get_object(child.reference)
|
770
|
+
child.reference
|
771
|
+
else
|
772
|
+
reference = add_to_revision(child, revision)
|
773
|
+
build_object(child, revision, options)
|
774
|
+
reference
|
741
775
|
end
|
776
|
+
end
|
742
777
|
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
747
|
-
|
748
|
-
end
|
749
|
-
end
|
778
|
+
# Finalize all the children objects.
|
779
|
+
object.each_value do |child|
|
780
|
+
build_object(child, revision, options)
|
781
|
+
end
|
782
|
+
end
|
750
783
|
|
751
|
-
|
784
|
+
#
|
785
|
+
# Returns the final binary representation of the current document.
|
786
|
+
#
|
787
|
+
def output(params = {})
|
788
|
+
has_objstm = each_object.any? { |obj| obj.is_a?(ObjectStream) }
|
789
|
+
|
790
|
+
options =
|
791
|
+
{
|
792
|
+
eol: $/,
|
793
|
+
rebuild_xrefs: true,
|
794
|
+
noindent: false,
|
795
|
+
obfuscate: false,
|
796
|
+
use_xrefstm: has_objstm,
|
797
|
+
use_xreftable: !has_objstm,
|
798
|
+
up_to_revision: @revisions.size
|
799
|
+
}
|
800
|
+
options.update(params)
|
801
|
+
|
802
|
+
# Ensures we are using a valid EOL delimiter.
|
803
|
+
assert_valid_eol(options[:eol])
|
804
|
+
|
805
|
+
# Do not emit more revisions than present in the document.
|
806
|
+
options[:up_to_revision] = [@revisions.size, options[:up_to_revision]].min
|
807
|
+
|
808
|
+
# Reset to default params if no xrefs are chosen (hybrid files not supported yet)
|
809
|
+
if options[:use_xrefstm] == options[:use_xreftable]
|
810
|
+
options[:use_xrefstm] = has_objstm
|
811
|
+
options[:use_xreftable] = !has_objstm
|
812
|
+
end
|
813
|
+
|
814
|
+
# Indent level for objects.
|
815
|
+
indent = ((options[:noindent] == true) ? 0 : 1)
|
816
|
+
|
817
|
+
# Get trailer dictionary
|
818
|
+
trailer_dict = trailer.dictionary
|
819
|
+
|
820
|
+
prev_xref_offset = nil
|
821
|
+
xrefstm_offset = nil
|
822
|
+
|
823
|
+
# Header
|
824
|
+
bin = +"" # Use mutable string
|
825
|
+
bin << @header.to_s(eol: options[:eol])
|
826
|
+
|
827
|
+
# For each revision
|
828
|
+
@revisions[0, options[:up_to_revision]].each do |rev|
|
829
|
+
# Create xref table/stream.
|
830
|
+
if options[:rebuild_xrefs] == true
|
831
|
+
lastno_table, lastno_stm = 0, 0
|
832
|
+
brange_table, brange_stm = 0, 0
|
833
|
+
|
834
|
+
xrefs_stm = [XRef.new(0, 0, XRef::FREE)]
|
835
|
+
xrefs_table = [XRef.new(0, XRef::FIRSTFREE, XRef::FREE)]
|
836
|
+
|
837
|
+
if options[:use_xreftable] == true
|
838
|
+
xrefsection = XRef::Section.new
|
839
|
+
end
|
752
840
|
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
757
|
-
|
841
|
+
if options[:use_xrefstm] == true
|
842
|
+
xrefstm = rev.xrefstm || XRefStream.new
|
843
|
+
if xrefstm == rev.xrefstm
|
844
|
+
xrefstm.clear
|
845
|
+
else
|
846
|
+
add_to_revision(xrefstm, rev)
|
758
847
|
end
|
759
|
-
|
760
|
-
object.post_build
|
848
|
+
end
|
761
849
|
end
|
762
850
|
|
763
|
-
|
764
|
-
return unless object.is_a?(CompoundObject)
|
765
|
-
|
766
|
-
# Flatten the object by adding indirect objects to the revision and
|
767
|
-
# replacing them with their reference.
|
768
|
-
object.update_values! do |child|
|
769
|
-
next(child) unless child.indirect?
|
770
|
-
|
771
|
-
if get_object(child.reference)
|
772
|
-
child.reference
|
773
|
-
else
|
774
|
-
reference = add_to_revision(child, revision)
|
775
|
-
build_object(child, revision, options)
|
776
|
-
reference
|
777
|
-
end
|
778
|
-
end
|
851
|
+
objset = rev.objects
|
779
852
|
|
780
|
-
|
781
|
-
|
782
|
-
|
783
|
-
|
853
|
+
if (options[:rebuild_xrefs] == true) && (options[:use_xrefstm] == true)
|
854
|
+
objset.find_all { |obj| obj.is_a?(ObjectStream) }.each do |objstm|
|
855
|
+
objset.concat objstm.objects
|
856
|
+
end
|
784
857
|
end
|
785
858
|
|
786
|
-
|
787
|
-
|
788
|
-
#
|
789
|
-
|
790
|
-
|
791
|
-
|
792
|
-
|
793
|
-
|
794
|
-
|
795
|
-
|
796
|
-
|
797
|
-
|
798
|
-
|
799
|
-
|
800
|
-
|
801
|
-
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
options[:up_to_revision] = [ @revisions.size, options[:up_to_revision] ].min
|
810
|
-
|
811
|
-
# Reset to default params if no xrefs are chosen (hybrid files not supported yet)
|
812
|
-
if options[:use_xrefstm] == options[:use_xreftable]
|
813
|
-
options[:use_xrefstm] = has_objstm
|
814
|
-
options[:use_xreftable] = (not has_objstm)
|
859
|
+
previous_obj = nil
|
860
|
+
|
861
|
+
# For each object, in number order
|
862
|
+
# Move any XRefStream to the end of the revision.
|
863
|
+
objset.sort_by { |obj| [obj.is_a?(XRefStream) ? 1 : 0, obj.no, obj.generation] }
|
864
|
+
.each do |obj|
|
865
|
+
# Ensures that every object has a unique reference number.
|
866
|
+
# Duplicates should never happen in a well-formed revision and will cause breakage of xrefs.
|
867
|
+
if previous_obj && (previous_obj.reference == obj.reference)
|
868
|
+
raise InvalidPDFError, "Duplicate object detected, reference #{obj.reference}"
|
869
|
+
else
|
870
|
+
previous_obj = obj
|
871
|
+
end
|
872
|
+
|
873
|
+
# Create xref entry.
|
874
|
+
if options[:rebuild_xrefs] == true
|
875
|
+
|
876
|
+
# Adding subsections if needed
|
877
|
+
if options[:use_xreftable] && ((obj.no - lastno_table).abs > 1)
|
878
|
+
xrefsection << XRef::Subsection.new(brange_table, xrefs_table)
|
879
|
+
|
880
|
+
xrefs_table.clear
|
881
|
+
brange_table = obj.no
|
815
882
|
end
|
816
883
|
|
817
|
-
|
818
|
-
|
819
|
-
|
820
|
-
|
821
|
-
trailer_dict = self.trailer.dictionary
|
822
|
-
|
823
|
-
prev_xref_offset = nil
|
824
|
-
xrefstm_offset = nil
|
825
|
-
|
826
|
-
# Header
|
827
|
-
bin = ""
|
828
|
-
bin << @header.to_s(eol: options[:eol])
|
829
|
-
|
830
|
-
# For each revision
|
831
|
-
@revisions[0, options[:up_to_revision]].each do |rev|
|
884
|
+
if options[:use_xrefstm] && ((obj.no - lastno_stm).abs > 1)
|
885
|
+
xrefs_stm.each do |xref| xrefstm << xref end
|
886
|
+
xrefstm.Index ||= []
|
887
|
+
xrefstm.Index << brange_stm << xrefs_stm.length
|
832
888
|
|
833
|
-
|
834
|
-
|
835
|
-
|
836
|
-
brange_table, brange_stm = 0, 0
|
837
|
-
|
838
|
-
xrefs_stm = [ XRef.new(0, 0, XRef::FREE) ]
|
839
|
-
xrefs_table = [ XRef.new(0, XRef::FIRSTFREE, XRef::FREE) ]
|
840
|
-
|
841
|
-
if options[:use_xreftable] == true
|
842
|
-
xrefsection = XRef::Section.new
|
843
|
-
end
|
844
|
-
|
845
|
-
if options[:use_xrefstm] == true
|
846
|
-
xrefstm = rev.xrefstm || XRefStream.new
|
847
|
-
if xrefstm == rev.xrefstm
|
848
|
-
xrefstm.clear
|
849
|
-
else
|
850
|
-
add_to_revision(xrefstm, rev)
|
851
|
-
end
|
852
|
-
end
|
853
|
-
end
|
854
|
-
|
855
|
-
objset = rev.objects
|
889
|
+
xrefs_stm.clear
|
890
|
+
brange_stm = obj.no
|
891
|
+
end
|
856
892
|
|
857
|
-
|
858
|
-
|
859
|
-
|
893
|
+
# Process embedded objects
|
894
|
+
if options[:use_xrefstm] && (obj.parent != obj) && obj.parent.is_a?(ObjectStream)
|
895
|
+
index = obj.parent.index(obj.no)
|
860
896
|
|
861
|
-
|
897
|
+
xrefs_stm << XRefToCompressedObject.new(obj.parent.no, index)
|
862
898
|
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
899
|
+
lastno_stm = obj.no
|
900
|
+
else
|
901
|
+
xrefs_stm << XRef.new(bin.size, obj.generation, XRef::USED)
|
902
|
+
xrefs_table << XRef.new(bin.size, obj.generation, XRef::USED)
|
867
903
|
|
868
|
-
|
869
|
-
|
870
|
-
|
871
|
-
raise InvalidPDFError, "Duplicate object detected, reference #{obj.reference}"
|
872
|
-
else
|
873
|
-
previous_obj = obj
|
874
|
-
end
|
904
|
+
lastno_table = lastno_stm = obj.no
|
905
|
+
end
|
906
|
+
end
|
875
907
|
|
876
|
-
|
877
|
-
if options[:rebuild_xrefs] == true
|
908
|
+
if (obj.parent == obj) || !obj.parent.is_a?(ObjectStream)
|
878
909
|
|
879
|
-
|
880
|
-
|
881
|
-
|
910
|
+
# Finalize XRefStm
|
911
|
+
if (options[:rebuild_xrefs] == true) && (options[:use_xrefstm] == true) && (obj == xrefstm)
|
912
|
+
xrefstm_offset = bin.size
|
882
913
|
|
883
|
-
|
884
|
-
brange_table = obj.no
|
885
|
-
end
|
914
|
+
xrefs_stm.each do |xref| xrefstm << xref end
|
886
915
|
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
|
916
|
+
xrefstm.W = [1, (xrefstm_offset.to_s(2).size + 7) >> 3, 2]
|
917
|
+
if xrefstm.DecodeParms.is_a?(Dictionary) && xrefstm.DecodeParms.has_key?(:Columns)
|
918
|
+
xrefstm.DecodeParms[:Columns] = xrefstm.W[0] + xrefstm.W[1] + xrefstm.W[2]
|
919
|
+
end
|
891
920
|
|
892
|
-
|
893
|
-
|
894
|
-
end
|
921
|
+
xrefstm.Index ||= []
|
922
|
+
xrefstm.Index << brange_stm << xrefs_stm.size
|
895
923
|
|
896
|
-
|
897
|
-
|
898
|
-
|
924
|
+
xrefstm.dictionary = xrefstm.dictionary.merge(trailer_dict)
|
925
|
+
xrefstm.Prev = prev_xref_offset
|
926
|
+
rev.trailer.dictionary = nil
|
899
927
|
|
900
|
-
|
928
|
+
add_to_revision(xrefstm, rev)
|
901
929
|
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
xrefs_table << XRef.new(bin.size, obj.generation, XRef::USED)
|
930
|
+
xrefstm.pre_build
|
931
|
+
xrefstm.post_build
|
932
|
+
end
|
906
933
|
|
907
|
-
|
908
|
-
|
909
|
-
|
934
|
+
# Output object code
|
935
|
+
bin << if obj.is_a?(Dictionary) || obj.is_a?(Stream)
|
936
|
+
obj.to_s(eol: options[:eol], indent: indent)
|
937
|
+
else
|
938
|
+
obj.to_s(eol: options[:eol])
|
939
|
+
end
|
940
|
+
end
|
941
|
+
end # end each object
|
910
942
|
|
911
|
-
|
943
|
+
rev.trailer ||= Trailer.new
|
912
944
|
|
913
|
-
|
914
|
-
|
915
|
-
xrefstm_offset = bin.size
|
945
|
+
# XRef table
|
946
|
+
if options[:rebuild_xrefs] == true
|
916
947
|
|
917
|
-
|
948
|
+
if options[:use_xreftable] == true
|
949
|
+
table_offset = bin.size
|
918
950
|
|
919
|
-
|
920
|
-
|
921
|
-
xrefstm.DecodeParms[:Columns] = xrefstm.W[0] + xrefstm.W[1] + xrefstm.W[2]
|
922
|
-
end
|
951
|
+
xrefsection << XRef::Subsection.new(brange_table, xrefs_table)
|
952
|
+
rev.xreftable = xrefsection
|
923
953
|
|
924
|
-
|
925
|
-
|
954
|
+
rev.trailer.dictionary = trailer_dict
|
955
|
+
rev.trailer.Size = objset.size + 1
|
956
|
+
rev.trailer.Prev = prev_xref_offset
|
926
957
|
|
927
|
-
|
928
|
-
|
929
|
-
rev.trailer.dictionary = nil
|
958
|
+
rev.trailer.XRefStm = xrefstm_offset if options[:use_xrefstm] == true
|
959
|
+
end
|
930
960
|
|
931
|
-
|
961
|
+
startxref = (options[:use_xreftable] == true) ? table_offset : xrefstm_offset
|
962
|
+
rev.trailer.startxref = prev_xref_offset = startxref
|
932
963
|
|
933
|
-
|
934
|
-
xrefstm.post_build
|
935
|
-
end
|
964
|
+
end
|
936
965
|
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
bin << obj.to_s(eol: options[:eol])
|
942
|
-
end
|
943
|
-
end
|
944
|
-
end # end each object
|
966
|
+
# Trailer
|
967
|
+
bin << rev.xreftable.to_s(eol: options[:eol]) if options[:use_xreftable] == true
|
968
|
+
bin << ((options[:obfuscate] == true) ? rev.trailer.to_obfuscated_str : rev.trailer.to_s(eol: options[:eol], indent: indent))
|
969
|
+
end # end each revision
|
945
970
|
|
946
|
-
|
971
|
+
bin
|
972
|
+
end
|
947
973
|
|
948
|
-
|
949
|
-
|
974
|
+
def assert_valid_eol(d)
|
975
|
+
allowed = ["\n", "\r", "\r\n"]
|
976
|
+
unless allowed.include?(d)
|
977
|
+
raise ArgumentError, "Invalid EOL delimiter #{d.inspect}, allowed: #{allowed.inspect}"
|
978
|
+
end
|
979
|
+
end
|
950
980
|
|
951
|
-
|
952
|
-
|
981
|
+
#
|
982
|
+
# Instanciates basic structures required for a valid PDF file.
|
983
|
+
#
|
984
|
+
def init
|
985
|
+
catalog = (self.Catalog = (trailer_key(:Root) || Catalog.new))
|
986
|
+
@revisions.last.trailer.Root = catalog.reference
|
953
987
|
|
954
|
-
|
955
|
-
rev.xreftable = xrefsection
|
988
|
+
loaded!
|
956
989
|
|
957
|
-
|
958
|
-
|
959
|
-
rev.trailer.Prev = prev_xref_offset
|
990
|
+
self
|
991
|
+
end
|
960
992
|
|
961
|
-
|
962
|
-
|
993
|
+
def filesize # :nodoc:
|
994
|
+
output(rebuild_xrefs: false).size
|
995
|
+
end
|
963
996
|
|
964
|
-
|
965
|
-
|
997
|
+
def version_required # :nodoc:
|
998
|
+
each_object.max_by { |obj| obj.version_required }.version_required
|
999
|
+
end
|
966
1000
|
|
967
|
-
|
1001
|
+
#
|
1002
|
+
# Compute and update XRef::Section for each Revision.
|
1003
|
+
#
|
1004
|
+
def rebuild_dummy_xrefs # :nodoc
|
1005
|
+
build_dummy_xrefs = ->(objects) do
|
1006
|
+
lastno = 0
|
1007
|
+
brange = 0
|
968
1008
|
|
969
|
-
|
970
|
-
bin << rev.xreftable.to_s(eol: options[:eol]) if options[:use_xreftable] == true
|
971
|
-
bin << (options[:obfuscate] == true ? rev.trailer.to_obfuscated_str : rev.trailer.to_s(eol: options[:eol], indent: indent))
|
1009
|
+
xrefs = [XRef.new(0, XRef::FIRSTFREE, XRef::FREE)]
|
972
1010
|
|
973
|
-
|
1011
|
+
xrefsection = XRef::Section.new
|
1012
|
+
objects.sort_by { |object| object.reference }
|
1013
|
+
.each do |object|
|
1014
|
+
if (object.no - lastno).abs > 1
|
1015
|
+
xrefsection << XRef::Subsection.new(brange, xrefs)
|
1016
|
+
brange = object.no
|
1017
|
+
xrefs.clear
|
1018
|
+
end
|
974
1019
|
|
975
|
-
|
976
|
-
end
|
1020
|
+
xrefs << XRef.new(0, 0, XRef::FREE)
|
977
1021
|
|
978
|
-
|
979
|
-
allowed = [ "\n", "\r", "\r\n" ]
|
980
|
-
unless allowed.include?(d)
|
981
|
-
raise ArgumentError, "Invalid EOL delimiter #{d.inspect}, allowed: #{allowed.inspect}"
|
982
|
-
end
|
1022
|
+
lastno = object.no
|
983
1023
|
end
|
984
1024
|
|
985
|
-
|
986
|
-
# Instanciates basic structures required for a valid PDF file.
|
987
|
-
#
|
988
|
-
def init
|
989
|
-
catalog = (self.Catalog = (trailer_key(:Root) || Catalog.new))
|
990
|
-
@revisions.last.trailer.Root = catalog.reference
|
991
|
-
|
992
|
-
loaded!
|
1025
|
+
xrefsection << XRef::Subsection.new(brange, xrefs)
|
993
1026
|
|
994
|
-
|
995
|
-
|
1027
|
+
xrefsection
|
1028
|
+
end
|
996
1029
|
|
997
|
-
|
998
|
-
|
999
|
-
end
|
1030
|
+
size = 0
|
1031
|
+
startxref = @header.to_s.size
|
1000
1032
|
|
1001
|
-
|
1002
|
-
|
1033
|
+
@revisions.each do |revision|
|
1034
|
+
revision.each_object do |object|
|
1035
|
+
startxref += object.to_s.size
|
1003
1036
|
end
|
1004
1037
|
|
1005
|
-
|
1006
|
-
|
1007
|
-
#
|
1008
|
-
def rebuild_dummy_xrefs #:nodoc
|
1009
|
-
|
1010
|
-
build_dummy_xrefs = -> (objects) do
|
1011
|
-
lastno = 0
|
1012
|
-
brange = 0
|
1013
|
-
|
1014
|
-
xrefs = [ XRef.new(0, XRef::FIRSTFREE, XRef::FREE) ]
|
1015
|
-
|
1016
|
-
xrefsection = XRef::Section.new
|
1017
|
-
objects.sort_by {|object| object.reference }
|
1018
|
-
.each do |object|
|
1019
|
-
|
1020
|
-
if (object.no - lastno).abs > 1
|
1021
|
-
xrefsection << XRef::Subsection.new(brange, xrefs)
|
1022
|
-
brange = object.no
|
1023
|
-
xrefs.clear
|
1024
|
-
end
|
1025
|
-
|
1026
|
-
xrefs << XRef.new(0, 0, XRef::FREE)
|
1027
|
-
|
1028
|
-
lastno = object.no
|
1029
|
-
end
|
1030
|
-
|
1031
|
-
xrefsection << XRef::Subsection.new(brange, xrefs)
|
1032
|
-
|
1033
|
-
xrefsection
|
1034
|
-
end
|
1035
|
-
|
1036
|
-
size = 0
|
1037
|
-
startxref = @header.to_s.size
|
1038
|
+
size += revision.body.size
|
1039
|
+
revision.xreftable = build_dummy_xrefs.call(revision.objects)
|
1038
1040
|
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
end
|
1041
|
+
revision.trailer ||= Trailer.new
|
1042
|
+
revision.trailer.Size = size + 1
|
1043
|
+
revision.trailer.startxref = startxref
|
1043
1044
|
|
1044
|
-
|
1045
|
-
|
1045
|
+
startxref += revision.xreftable.to_s.size + revision.trailer.to_s.size
|
1046
|
+
end
|
1046
1047
|
|
1047
|
-
|
1048
|
-
|
1049
|
-
revision.trailer.startxref = startxref
|
1050
|
-
|
1051
|
-
startxref += revision.xreftable.to_s.size + revision.trailer.to_s.size
|
1052
|
-
end
|
1048
|
+
self
|
1049
|
+
end
|
1053
1050
|
|
1054
|
-
|
1051
|
+
#
|
1052
|
+
# Build a xref section from a set of objects.
|
1053
|
+
#
|
1054
|
+
def build_xrefs(objects) # :nodoc:
|
1055
|
+
lastno = 0
|
1056
|
+
brange = 0
|
1057
|
+
|
1058
|
+
xrefs = [XRef.new(0, XRef::FIRSTFREE, XRef::FREE)]
|
1059
|
+
|
1060
|
+
xrefsection = XRef::Section.new
|
1061
|
+
objects.sort_by { |object| object.reference }
|
1062
|
+
.each do |object|
|
1063
|
+
if (object.no - lastno).abs > 1
|
1064
|
+
xrefsection << XRef::Subsection.new(brange, xrefs)
|
1065
|
+
brange = object.no
|
1066
|
+
xrefs.clear
|
1055
1067
|
end
|
1056
1068
|
|
1057
|
-
|
1058
|
-
# Build a xref section from a set of objects.
|
1059
|
-
#
|
1060
|
-
def build_xrefs(objects) #:nodoc:
|
1061
|
-
|
1062
|
-
lastno = 0
|
1063
|
-
brange = 0
|
1064
|
-
|
1065
|
-
xrefs = [ XRef.new(0, XRef::FIRSTFREE, XRef::FREE) ]
|
1066
|
-
|
1067
|
-
xrefsection = XRef::Section.new
|
1068
|
-
objects.sort_by {|object| object.reference}
|
1069
|
-
.each do |object|
|
1069
|
+
xrefs << XRef.new(get_object_offset(object.no, object.generation), object.generation, XRef::USED)
|
1070
1070
|
|
1071
|
-
|
1072
|
-
|
1073
|
-
brange = object.no
|
1074
|
-
xrefs.clear
|
1075
|
-
end
|
1071
|
+
lastno = object.no
|
1072
|
+
end
|
1076
1073
|
|
1077
|
-
|
1074
|
+
xrefsection << XRef::Subsection.new(brange, xrefs)
|
1078
1075
|
|
1079
|
-
|
1080
|
-
|
1076
|
+
xrefsection
|
1077
|
+
end
|
1081
1078
|
|
1082
|
-
|
1079
|
+
def get_object_offset(no, generation) # :nodoc:
|
1080
|
+
objectoffset = @header.to_s.size
|
1083
1081
|
|
1084
|
-
|
1082
|
+
@revisions.each do |revision|
|
1083
|
+
revision.objects.sort_by { |object| object.reference }
|
1084
|
+
.each do |object|
|
1085
|
+
if (object.no == no) && (object.generation == generation) then return objectoffset
|
1086
|
+
else
|
1087
|
+
objectoffset += object.to_s.size
|
1088
|
+
end
|
1085
1089
|
end
|
1086
1090
|
|
1087
|
-
|
1088
|
-
|
1089
|
-
|
1090
|
-
@revisions.each do |revision|
|
1091
|
-
revision.objects.sort_by {|object| object.reference }
|
1092
|
-
.each do |object|
|
1091
|
+
objectoffset += revision.xreftable.to_s.size
|
1092
|
+
objectoffset += revision.trailer.to_s.size
|
1093
|
+
end
|
1093
1094
|
|
1094
|
-
|
1095
|
-
else
|
1096
|
-
objectoffset += object.to_s.size
|
1097
|
-
end
|
1098
|
-
end
|
1099
|
-
|
1100
|
-
objectoffset += revision.xreftable.to_s.size
|
1101
|
-
objectoffset += revision.trailer.to_s.size
|
1102
|
-
end
|
1103
|
-
|
1104
|
-
nil
|
1105
|
-
end
|
1095
|
+
nil
|
1106
1096
|
end
|
1107
|
-
|
1097
|
+
end
|
1108
1098
|
end
|