_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q16100 | Origami.PageTreeNode.each_page | train | def each_page(browsed_nodes: [], &block)
return enum_for(__method__) { self.Count.to_i } unless block_given?
if browsed_nodes.any?{|node| node.equal?(self)}
raise InvalidPageTreeError, "Cyclic tree graph detected"
end
unless self.Kids.is_a?(Array)
raise InvalidPageTreeError, "Kids must be an Array"
end
browsed_nodes.push(self)
unless self.Count.nil?
[ self.Count.value, self.Kids.length ].min.times do |n|
node = self.Kids[n].solve
case node
when PageTreeNode then node.each_page(browsed_nodes: browsed_nodes, &block)
when Page then yield(node)
else
raise InvalidPageTreeError, "not a Page or PageTreeNode"
end
end
end
self
end | ruby | {
"resource": ""
} |
q16101 | Origami.PageTreeNode.get_page | train | def get_page(n)
raise IndexError, "Page numbers are referenced starting from 1" if n < 1
raise IndexError, "Page not found" if n > self.Count.to_i
self.each_page.lazy.drop(n - 1).first or raise IndexError, "Page not found"
end | ruby | {
"resource": ""
} |
q16102 | Origami.Page.each_content_stream | train | def each_content_stream
contents = self.Contents
return enum_for(__method__) do
case contents
when Array then contents.length
when Stream then 1
else
0
end
end unless block_given?
case contents
when Stream then yield(contents)
when Array then contents.each { |stm| yield(stm.solve) }
end
end | ruby | {
"resource": ""
} |
q16103 | Origami.Page.add_annotation | train | def add_annotation(*annotations)
self.Annots ||= []
annotations.each do |annot|
annot.solve[:P] = self if self.indirect?
self.Annots << annot
end
end | ruby | {
"resource": ""
} |
q16104 | Origami.Page.each_annotation | train | def each_annotation
annots = self.Annots
return enum_for(__method__) { annots.is_a?(Array) ? annots.length : 0 } unless block_given?
return unless annots.is_a?(Array)
annots.each do |annot|
yield(annot.solve)
end
end | ruby | {
"resource": ""
} |
q16105 | Origami.Page.add_flash_application | train | def add_flash_application(swfspec, params = {})
options =
{
windowed: false,
transparent: false,
navigation_pane: false,
toolbar: false,
pass_context_click: false,
activation: Annotation::RichMedia::Activation::PAGE_OPEN,
deactivation: Annotation::RichMedia::Deactivation::PAGE_CLOSE,
flash_vars: nil
}
options.update(params)
annot = create_richmedia(:Flash, swfspec, options)
add_annotation(annot)
annot
end | ruby | {
"resource": ""
} |
q16106 | Origami.PDF.metadata | train | def metadata
metadata_stm = self.Catalog.Metadata
if metadata_stm.is_a?(Stream)
doc = REXML::Document.new(metadata_stm.data)
info = {}
doc.elements.each('*/*/rdf:Description') do |description|
description.attributes.each_attribute do |attr|
case attr.prefix
when 'pdf','xap'
info[attr.name] = attr.value
end
end
description.elements.each('*') do |element|
value = (element.elements['.//rdf:li'] || element).text
info[element.name] = value.to_s
end
end
info
end
end | ruby | {
"resource": ""
} |
q16107 | Origami.PDF.create_metadata | train | def create_metadata(info = {})
skeleton = <<-XMP
<?packet begin="\xef\xbb\xbf" id="W5M0MpCehiHzreSzNTczkc9d"?>
<x:xmpmeta xmlns:x="adobe:ns:meta/">
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
<rdf:Description rdf:about="" xmlns:pdf="http://ns.adobe.com/pdf/1.3/">
</rdf:Description>
</rdf:RDF>
</x:xmpmeta>
<?xpacket end="w"?>
XMP
xml =
if self.Catalog.Metadata.is_a?(Stream)
self.Catalog.Metadata.data
else
skeleton
end
doc = REXML::Document.new(xml)
desc = doc.elements['*/*/rdf:Description']
info.each do |name, value|
elt = REXML::Element.new "pdf:#{name}"
elt.text = value
desc.elements << elt
end
xml = ""; doc.write(xml, 4)
if self.Catalog.Metadata.is_a?(Stream)
self.Catalog.Metadata.data = xml
else
self.Catalog.Metadata = Stream.new(xml)
end
self.Catalog.Metadata
end | ruby | {
"resource": ""
} |
q16108 | Origami.PDF.trailer | train | def trailer
#
# First look for a standard trailer dictionary
#
if @revisions.last.trailer.dictionary?
trl = @revisions.last.trailer
#
# Otherwise look for a xref stream.
#
else
trl = @revisions.last.xrefstm
end
raise InvalidPDFError, "No trailer found" if trl.nil?
trl
end | ruby | {
"resource": ""
} |
q16109 | Origami.PPKLite.add_certificate | train | def add_certificate(certfile, attributes, viewable: false, editable: false)
if certfile.is_a?(OpenSSL::X509::Certificate)
x509 = certfile
else
x509 = OpenSSL::X509::Certificate.new(certfile)
end
address_book = get_address_book
cert = Certificate.new
cert.Cert = x509.to_der
cert.ID = address_book.NextID
address_book.NextID += 1
cert.Trust = attributes
cert.Viewable = viewable
cert.Editable = editable
address_book.Entries.push(self << cert)
end | ruby | {
"resource": ""
} |
q16110 | Origami.Stream.each_filter | train | def each_filter
filters = self.Filter
return enum_for(__method__) do
case filters
when NilClass then 0
when Array then filters.length
else
1
end
end unless block_given?
return if filters.nil?
if filters.is_a?(Array)
filters.each do |filter| yield(filter) end
else
yield(filters)
end
self
end | ruby | {
"resource": ""
} |
q16111 | Origami.Stream.set_predictor | train | def set_predictor(predictor, colors: 1, bitspercomponent: 8, columns: 1)
filters = self.filters
layer = filters.index(:FlateDecode) or filters.index(:LZWDecode)
if layer.nil?
raise InvalidStreamObjectError, 'Predictor functions can only be used with Flate or LZW filters'
end
params = Filter::LZW::DecodeParms.new
params[:Predictor] = predictor
params[:Colors] = colors if colors != 1
params[:BitsPerComponent] = bitspercomponent if bitspercomponent != 8
params[:Columns] = columns if columns != 1
set_decode_params(layer, params)
self
end | ruby | {
"resource": ""
} |
q16112 | Origami.Stream.decode! | train | def decode!
self.decrypt! if self.is_a?(Encryption::EncryptedStream)
return if decoded?
filters = self.filters
dparams = decode_params
@data = @encoded_data.dup
@data.freeze
filters.each_with_index do |filter, layer|
params = dparams[layer].is_a?(Dictionary) ? dparams[layer] : {}
# Handle Crypt filters.
if filter == :Crypt
raise Filter::Error, "Crypt filter must be the first filter" unless layer.zero?
# Skip the Crypt filter.
next
end
begin
@data = decode_data(@data, filter, params)
rescue Filter::Error => error
@data = error.decoded_data
raise
end
end
self
end | ruby | {
"resource": ""
} |
q16113 | Origami.Stream.encode! | train | def encode!
return if encoded?
filters = self.filters
dparams = decode_params
@encoded_data = @data.dup
(filters.length - 1).downto(0) do |layer|
params = dparams[layer].is_a?(Dictionary) ? dparams[layer] : {}
filter = filters[layer]
# Handle Crypt filters.
if filter == :Crypt
raise Filter::Error, "Crypt filter must be the first filter" unless layer.zero?
# Skip the Crypt filter.
next
end
@encoded_data = encode_data(@encoded_data, filter, params)
end
self.Length = @encoded_data.length
self
end | ruby | {
"resource": ""
} |
q16114 | Origami.ObjectStream.import_object_from_document | train | def import_object_from_document(object)
obj_doc = object.document
# Remove the previous instance if the object is indirect to avoid duplicates.
if obj_doc.equal?(@document)
@document.delete_object(object.reference) if object.indirect?
# Otherwise, create a exported version of the object.
else
object = object.export
end
object
end | ruby | {
"resource": ""
} |
q16115 | Origami.String.to_utf8 | train | def to_utf8
detect_encoding
utf16 = self.encoding.to_utf16be(self.value)
utf16.slice!(0, Encoding::UTF16BE::BOM.size)
utf16.encode("utf-8", "utf-16be")
end | ruby | {
"resource": ""
} |
q16116 | Origami.ContentStream.draw_polygon | train | def draw_polygon(coords = [], attr = {})
load!
stroke_color = attr.fetch(:stroke_color, DEFAULT_STROKE_COLOR)
fill_color = attr.fetch(:fill_color, DEFAULT_FILL_COLOR)
line_cap = attr.fetch(:line_cap, DEFAULT_LINECAP)
line_join = attr.fetch(:line_join, DEFAULT_LINEJOIN)
line_width = attr.fetch(:line_width, DEFAULT_LINEWIDTH)
dash_pattern = attr.fetch(:dash, DEFAULT_DASHPATTERN)
stroke = attr[:stroke].nil? ? true : attr[:stroke]
fill = attr[:fill].nil? ? false : attr[:fill]
stroke = true if fill == false and stroke == false
set_fill_color(fill_color) if fill
set_stroke_color(stroke_color) if stroke
set_line_width(line_width)
set_line_cap(line_cap)
set_line_join(line_join)
set_dash_pattern(dash_pattern)
if @canvas.gs.text_state.is_in_text_object?
@instructions << PDF::Instruction.new('ET').render(@canvas)
end
unless coords.size < 1
x,y = coords.slice!(0)
@instructions << PDF::Instruction.new('m',x,y).render(@canvas)
coords.each do |px,py|
@instructions << PDF::Instruction.new('l',px,py).render(@canvas)
end
@instructions << (i =
if stroke and not fill
PDF::Instruction.new('s')
elsif fill and not stroke
PDF::Instruction.new('f')
elsif fill and stroke
PDF::Instruction.new('b')
end
)
i.render(@canvas)
end
self
end | ruby | {
"resource": ""
} |
q16117 | Origami.StandardObject.version_required | train | def version_required #:nodoc:
max = [ "1.0", 0 ]
self.each_key do |field|
attributes = self.class.fields[field.value]
if attributes.nil?
STDERR.puts "Warning: object #{self.class} has undocumented field #{field.value}"
next
end
version = attributes[:Version] || '1.0'
level = attributes[:ExtensionLevel] || 0
current = [ version, level ]
max = [ max, current, self[field.value].version_required ].max
end
max
end | ruby | {
"resource": ""
} |
q16118 | Origami.Object.set_indirect | train | def set_indirect(bool)
unless bool == true or bool == false
raise TypeError, "The argument must be boolean"
end
if bool == false
@no = @generation = 0
@document = nil
@file_offset = nil
end
@indirect = bool
self
end | ruby | {
"resource": ""
} |
q16119 | Origami.Object.copy | train | def copy
saved_doc = @document
saved_parent = @parent
@document = @parent = nil # do not process parent object and document in the copy
# Perform the recursive copy (quite dirty).
copyobj = Marshal.load(Marshal.dump(self))
# restore saved values
@document = saved_doc
@parent = saved_parent
copyobj.set_document(saved_doc) if copyobj.indirect?
copyobj.parent = parent
copyobj
end | ruby | {
"resource": ""
} |
q16120 | Origami.Object.cast_to | train | def cast_to(type, parser = nil)
assert_cast_type(type)
cast = type.new(self.copy, parser)
cast.file_offset = @file_offset
transfer_attributes(cast)
end | ruby | {
"resource": ""
} |
q16121 | Origami.Object.reference | train | def reference
raise InvalidObjectError, "Cannot reference a direct object" unless self.indirect?
ref = Reference.new(@no, @generation)
ref.parent = self
ref
end | ruby | {
"resource": ""
} |
q16122 | Origami.Object.xrefs | train | def xrefs
raise InvalidObjectError, "Cannot find xrefs to a direct object" unless self.indirect?
raise InvalidObjectError, "Not attached to any document" if self.document.nil?
@document.each_object(compressed: true)
.flat_map { |object|
case object
when Stream
object.dictionary.xref_cache[self.reference]
when ObjectCache
object.xref_cache[self.reference]
end
}
.compact!
end | ruby | {
"resource": ""
} |
q16123 | Origami.Object.export | train | def export
exported_obj = self.logicalize
exported_obj.no = exported_obj.generation = 0
exported_obj.set_document(nil) if exported_obj.indirect?
exported_obj.parent = nil
exported_obj.xref_cache.clear
exported_obj
end | ruby | {
"resource": ""
} |
q16124 | Origami.Object.type | train | def type
name = (self.class.name or self.class.superclass.name or self.native_type.name)
name.split("::").last.to_sym
end | ruby | {
"resource": ""
} |
q16125 | Origami.Object.transfer_attributes | train | def transfer_attributes(target)
target.no, target.generation = @no, @generation
target.parent = @parent
if self.indirect?
target.set_indirect(true)
target.set_document(@document)
end
target
end | ruby | {
"resource": ""
} |
q16126 | Origami.Object.resolve_all_references | train | def resolve_all_references(obj, browsed: [], cache: {})
return obj if browsed.include?(obj)
browsed.push(obj)
if obj.is_a?(ObjectStream)
obj.each do |subobj|
resolve_all_references(subobj, browsed: browsed, cache: cache)
end
end
if obj.is_a?(Stream)
resolve_all_references(obj.dictionary, browsed: browsed, cache: cache)
end
if obj.is_a?(CompoundObject)
obj.update_values! do |subobj|
if subobj.is_a?(Reference)
subobj = (cache[subobj] ||= subobj.solve.copy)
subobj.no = subobj.generation = 0
subobj.parent = obj
end
resolve_all_references(subobj, browsed: browsed, cache: cache)
end
end
obj
end | ruby | {
"resource": ""
} |
q16127 | Origami.Parser.try_object_promotion | train | def try_object_promotion(obj)
return obj unless Origami::OPTIONS[:enable_type_propagation] and @deferred_casts.key?(obj.reference)
types = @deferred_casts[obj.reference]
types = [ types ] unless types.is_a?(::Array)
# Promote object if a compatible type is found.
cast_type = types.find {|type| type < obj.class }
if cast_type
obj = obj.cast_to(cast_type, self)
else
obj
end
end | ruby | {
"resource": ""
} |
q16128 | Origami.PDF.signed? | train | def signed?
begin
self.Catalog.AcroForm.is_a?(Dictionary) and
self.Catalog.AcroForm.SigFlags.is_a?(Integer) and
(self.Catalog.AcroForm.SigFlags & InteractiveForm::SigFlags::SIGNATURES_EXIST != 0)
rescue InvalidReferenceError
false
end
end | ruby | {
"resource": ""
} |
q16129 | Origami.PDF.extract_signed_data | train | def extract_signed_data(digsig)
# Computes the boundaries of the Contents field.
start_sig = digsig[:Contents].file_offset
stream = StringScanner.new(self.original_data)
stream.pos = digsig[:Contents].file_offset
Object.typeof(stream).parse(stream)
end_sig = stream.pos
stream.terminate
r1, r2 = digsig.ranges
if r1.begin != 0 or
r2.end != self.original_data.size or
r1.end != start_sig or
r2.begin != end_sig
raise SignatureError, "Invalid signature byte range"
end
self.original_data[r1] + self.original_data[r2]
end | ruby | {
"resource": ""
} |
q16130 | Origami.CompoundObject.update_values | train | def update_values(&b)
return enum_for(__method__) unless block_given?
return self.class.new self.transform_values(&b) if self.respond_to?(:transform_values)
return self.class.new self.map(&b) if self.respond_to?(:map)
raise NotImplementedError, "This object does not implement this method"
end | ruby | {
"resource": ""
} |
q16131 | Origami.CompoundObject.update_values! | train | def update_values!(&b)
return enum_for(__method__) unless block_given?
return self.transform_values!(&b) if self.respond_to?(:transform_values!)
return self.map!(&b) if self.respond_to?(:map!)
raise NotImplementedError, "This object does not implement this method"
end | ruby | {
"resource": ""
} |
q16132 | Origami.PDF.linearized? | train | def linearized?
begin
first_obj = @revisions.first.objects.min_by{|obj| obj.file_offset}
rescue
return false
end
@revisions.size > 1 and first_obj.is_a?(Dictionary) and first_obj.has_key? :Linearized
end | ruby | {
"resource": ""
} |
q16133 | Origami.PDF.delinearize! | train | def delinearize!
raise LinearizationError, 'Not a linearized document' unless self.linearized?
#
# Saves the first trailer.
#
prev_trailer = @revisions.first.trailer
linear_dict = @revisions.first.objects.min_by{|obj| obj.file_offset}
#
# Removes hint streams used by linearization.
#
delete_hint_streams(linear_dict)
#
# Update the trailer.
#
last_trailer = (@revisions.last.trailer ||= Trailer.new)
last_trailer.dictionary ||= Dictionary.new
if prev_trailer.dictionary?
last_trailer.dictionary =
last_trailer.dictionary.merge(prev_trailer.dictionary)
else
xrefstm = @revisions.last.xrefstm
raise LinearizationError,
'Cannot find trailer info while delinearizing document' unless xrefstm.is_a?(XRefStream)
last_trailer.dictionary[:Root] = xrefstm[:Root]
last_trailer.dictionary[:Encrypt] = xrefstm[:Encrypt]
last_trailer.dictionary[:Info] = xrefstm[:Info]
last_trailer.dictionary[:ID] = xrefstm[:ID]
end
#
# Remove all xrefs.
# Fix: Should be merged instead.
#
remove_xrefs
#
# Remove the linearization revision.
#
@revisions.first.body.delete(linear_dict.reference)
@revisions.last.body.merge! @revisions.first.body
remove_revision(0)
self
end | ruby | {
"resource": ""
} |
q16134 | Origami.PDF.delete_hint_streams | train | def delete_hint_streams(linearization_dict)
hints = linearization_dict[:H]
return unless hints.is_a?(Array)
hints.each_slice(2) do |offset, _length|
next unless offset.is_a?(Integer)
stream = get_object_by_offset(offset)
delete_object(stream.reference) if stream.is_a?(Stream)
end
end | ruby | {
"resource": ""
} |
q16135 | Origami.Reference.follow | train | def follow
doc = self.document
if doc.nil?
raise InvalidReferenceError, "Not attached to any document"
end
target = doc.get_object(self)
if target.nil? and not Origami::OPTIONS[:ignore_bad_references]
raise InvalidReferenceError, "Cannot resolve reference : #{self}"
end
target or Null.new
end | ruby | {
"resource": ""
} |
q16136 | Origami.PDF.create_form | train | def create_form(*fields)
acroform = self.Catalog.AcroForm ||= InteractiveForm.new.set_indirect(true)
self.add_fields(*fields)
acroform
end | ruby | {
"resource": ""
} |
q16137 | Origami.PDF.each_field | train | def each_field
return enum_for(__method__) do
if self.form? and self.Catalog.AcroForm.Fields.is_a?(Array)
self.Catalog.AcroForm.Fields.length
else
0
end
end unless block_given?
if self.form? and self.Catalog.AcroForm.Fields.is_a?(Array)
self.Catalog.AcroForm.Fields.each do |field|
yield(field.solve)
end
end
end | ruby | {
"resource": ""
} |
q16138 | Origami.PDF.create_security_handler | train | def create_security_handler(version, revision, params)
# Ensure the document has an ID.
doc_id = (trailer_key(:ID) || generate_id).first
# Create the standard encryption dictionary.
handler = Encryption::Standard::Dictionary.new
handler.Filter = :Standard
handler.V = version
handler.R = revision
handler.Length = params[:key_size]
handler.P = -1 # params[:Permissions]
# Build the crypt filter dictionary.
if revision >= 4
handler.EncryptMetadata = params[:encrypt_metadata]
handler.CF = Dictionary.new
crypt_filter = Encryption::CryptFilterDictionary.new
crypt_filter.AuthEvent = :DocOpen
if revision == 4
crypt_filter.CFM = :AESV2
else
crypt_filter.CFM = :AESV3
end
crypt_filter.Length = params[:key_size] >> 3
handler.CF[:StdCF] = crypt_filter
handler.StmF = handler.StrF = :StdCF
end
user_passwd, owner_passwd = params[:user_passwd], params[:owner_passwd]
# Setup keys.
handler.set_passwords(owner_passwd, user_passwd, doc_id)
encryption_key = handler.compute_user_encryption_key(user_passwd, doc_id)
# Install the encryption dictionary to the document.
self.trailer.Encrypt = self << handler
[ handler, encryption_key ]
end | ruby | {
"resource": ""
} |
q16139 | Origami.PDF.<< | train | def <<(object)
owner = object.document
#
# Does object belongs to another PDF ?
#
if owner and not owner.equal?(self)
import object
else
add_to_revision(object, @revisions.last)
end
end | ruby | {
"resource": ""
} |
q16140 | Origami.PDF.add_to_revision | train | def add_to_revision(object, revision)
object.set_indirect(true)
object.set_document(self)
object.no, object.generation = allocate_new_object_number if object.no == 0
revision.body[object.reference] = object
object.reference
end | ruby | {
"resource": ""
} |
q16141 | Origami.PDF.add_new_revision | train | def add_new_revision
root = @revisions.last.trailer[:Root] unless @revisions.empty?
@revisions << Revision.new(self)
@revisions.last.trailer = Trailer.new
@revisions.last.trailer.Root = root
self
end | ruby | {
"resource": ""
} |
q16142 | Origami.PDF.delete_object | train | def delete_object(no, generation = 0)
case no
when Reference
target = no
when ::Integer
target = Reference.new(no, generation)
else
raise TypeError, "Invalid parameter type : #{no.class}"
end
@revisions.each do |rev|
rev.body.delete(target)
end
end | ruby | {
"resource": ""
} |
q16143 | Origami.PDF.cast_object | train | def cast_object(reference, type) #:nodoc:
@revisions.each do |rev|
if rev.body.include?(reference)
object = rev.body[reference]
return object if object.is_a?(type)
if type < rev.body[reference].class
rev.body[reference] = object.cast_to(type, @parser)
return rev.body[reference]
end
end
end
nil
end | ruby | {
"resource": ""
} |
q16144 | Origami.PDF.search_object | train | def search_object(object, pattern, streams: true, object_streams: true)
result = []
case object
when Stream
result.concat object.dictionary.strings_cache.select{|str| str.match(pattern) }
result.concat object.dictionary.names_cache.select{|name| name.value.match(pattern) }
begin
result.push object if streams and object.data.match(pattern)
rescue Filter::Error
return result # Skip object if a decoding error occured.
end
return result unless object.is_a?(ObjectStream) and object_streams
object.each do |child|
result.concat search_object(child, pattern,
streams: streams, object_streams: object_streams)
end
when Name, String
result.push object if object.value.match(pattern)
when ObjectCache
result.concat object.strings_cache.select{|str| str.match(pattern) }
result.concat object.names_cache.select{|name| name.value.match(pattern) }
end
result
end | ruby | {
"resource": ""
} |
q16145 | Origami.PDF.load_object_at_offset | train | def load_object_at_offset(revision, offset)
return nil if loaded? or @parser.nil?
pos = @parser.pos
begin
object = @parser.parse_object(offset)
return nil if object.nil?
if self.is_a?(Encryption::EncryptedDocument)
make_encrypted_object(object)
end
add_to_revision(object, revision)
ensure
@parser.pos = pos
end
object
end | ruby | {
"resource": ""
} |
q16146 | Origami.PDF.make_encrypted_object | train | def make_encrypted_object(object)
case object
when String
object.extend(Encryption::EncryptedString)
when Stream
object.extend(Encryption::EncryptedStream)
when ObjectCache
object.strings_cache.each do |string|
string.extend(Encryption::EncryptedString)
end
end
end | ruby | {
"resource": ""
} |
q16147 | Origami.PDF.load_all_objects | train | def load_all_objects
return if loaded? or @parser.nil?
@revisions.each do |revision|
if revision.xreftable?
xrefs = revision.xreftable
elsif revision.xrefstm?
xrefs = revision.xrefstm
else
next
end
xrefs.each_with_number do |xref, no|
self.get_object(no) unless xref.free?
end
end
loaded!
end | ruby | {
"resource": ""
} |
q16148 | Origami.PDF.physicalize | train | def physicalize(options = {})
@revisions.each do |revision|
# Do not use each_object here as build_object may modify the iterator.
revision.objects.each do |obj|
build_object(obj, revision, options)
end
end
self
end | ruby | {
"resource": ""
} |
q16149 | Origami.PDF.init | train | def init
catalog = (self.Catalog = (trailer_key(:Root) || Catalog.new))
@revisions.last.trailer.Root = catalog.reference
loaded!
self
end | ruby | {
"resource": ""
} |
q16150 | Origami.PDF.build_xrefs | train | def build_xrefs(objects) #:nodoc:
lastno = 0
brange = 0
xrefs = [ XRef.new(0, XRef::FIRSTFREE, XRef::FREE) ]
xrefsection = XRef::Section.new
objects.sort_by {|object| object.reference}
.each do |object|
if (object.no - lastno).abs > 1
xrefsection << XRef::Subsection.new(brange, xrefs)
brange = object.no
xrefs.clear
end
xrefs << XRef.new(get_object_offset(object.no, object.generation), object.generation, XRef::USED)
lastno = object.no
end
xrefsection << XRef::Subsection.new(brange, xrefs)
xrefsection
end | ruby | {
"resource": ""
} |
q16151 | Origami.Dictionary.transform_values | train | def transform_values(&b)
self.class.new self.map { |k, v|
[ k.to_sym, b.call(v) ]
}.to_h
end | ruby | {
"resource": ""
} |
q16152 | Origami.Dictionary.transform_values! | train | def transform_values!(&b)
self.each_pair do |k, v|
self[k] = b.call(unlink_object(v))
end
end | ruby | {
"resource": ""
} |
q16153 | Origami.PDF.remove_xrefs | train | def remove_xrefs
@revisions.reverse_each do |rev|
if rev.xrefstm?
delete_object(rev.xrefstm.reference)
end
if rev.trailer.XRefStm.is_a?(Integer)
xrefstm = get_object_by_offset(rev.trailer.XRefStm)
delete_object(xrefstm.reference) if xrefstm.is_a?(XRefStream)
end
rev.xrefstm = rev.xreftable = nil
end
end | ruby | {
"resource": ""
} |
q16154 | Origami.XRefStream.each_with_number | train | def each_with_number
return enum_for(__method__) unless block_given?
load! if @xrefs.nil?
ranges = object_ranges
xrefs = @xrefs.to_enum
ranges.each do |range|
range.each do |no|
begin
yield(xrefs.next, no)
rescue StopIteration
raise InvalidXRefStreamObjectError, "Range is bigger than number of entries"
end
end
end
end | ruby | {
"resource": ""
} |
q16155 | Origami.XRefStream.find | train | def find(no)
load! if @xrefs.nil?
ranges = object_ranges
index = 0
ranges.each do |range|
return @xrefs[index + no - range.begin] if range.cover?(no)
index += range.size
end
nil
end | ruby | {
"resource": ""
} |
q16156 | Origami.XRefStream.field_widths | train | def field_widths
widths = self.W
unless widths.is_a?(Array) and widths.length == 3 and widths.all? {|w| w.is_a?(Integer) and w >= 0 }
raise InvalidXRefStreamObjectError, "Invalid W field: #{widths}"
end
widths
end | ruby | {
"resource": ""
} |
q16157 | RailsDb.ApplicationHelper.guess_name | train | def guess_name(sections)
if sections.size > 1
sections[-1] = 'rails_db'
variable = sections.join("_")
result = eval(variable)
end
rescue NameError
sections.delete_at(-2)
guess_name(sections)
end | ruby | {
"resource": ""
} |
q16158 | CocoaPodsKeys.PreInstaller.setup | train | def setup
require 'key_master'
require 'keyring_liberator'
require 'pod/command/keys/set'
require 'cocoapods/user_interface'
require 'dotenv'
ui = Pod::UserInterface
options = @user_options || {}
current_dir = Pathname.pwd
Dotenv.load
project = options.fetch('project') { CocoaPodsKeys::NameWhisperer.get_project_name }
keyring = KeyringLiberator.get_current_keyring(project, current_dir)
unless keyring
check_for_multiple_keyrings(project, current_dir)
end
existing_keyring = !keyring.nil?
keyring = CocoaPodsKeys::Keyring.new(project, current_dir, []) unless keyring
has_shown_intro = false
keys = options.fetch('keys', [])
# Remove keys from the keyring that no longer exist
original_keyring_keys = keyring.keys.clone
original_keyring_keys.each do |key|
keyring.keychain_has_key?(key)
end
# Add keys to the keyring that have been added,
# and prompt for their value if needed.
keys.each do |key|
unless keyring.keychain_has_key?(key)
if ci?
raise Pod::Informative, "CocoaPods-Keys could not find a key named: #{key}"
end
unless has_shown_intro
ui.puts "\n CocoaPods-Keys has detected a keys mismatch for your setup."
has_shown_intro = true
end
ui.puts ' What is the key for ' + key.green
answer = ''
loop do
ui.print ' > '
answer = ui.gets.strip
break unless answer.empty?
end
ui.puts
args = CLAide::ARGV.new([key, answer, keyring.name])
setter = Pod::Command::Keys::Set.new(args)
setter.run
end
end
existing_keyring || !keys.empty?
end | ruby | {
"resource": ""
} |
q16159 | Formtastic.NamespacedClassFinder.find_with_const_defined | train | def find_with_const_defined(class_name)
@namespaces.find do |namespace|
if namespace.const_defined?(class_name)
break namespace.const_get(class_name)
end
end
end | ruby | {
"resource": ""
} |
q16160 | Grape.Endpoint.swagger_object | train | def swagger_object(target_class, request, options)
object = {
info: info_object(options[:info].merge(version: options[:doc_version])),
swagger: '2.0',
produces: content_types_for(target_class),
authorizations: options[:authorizations],
securityDefinitions: options[:security_definitions],
security: options[:security],
host: GrapeSwagger::DocMethods::OptionalObject.build(:host, options, request),
basePath: GrapeSwagger::DocMethods::OptionalObject.build(:base_path, options, request),
schemes: options[:schemes].is_a?(String) ? [options[:schemes]] : options[:schemes]
}
GrapeSwagger::DocMethods::Extensions.add_extensions_to_root(options, object)
object.delete_if { |_, value| value.blank? }
end | ruby | {
"resource": ""
} |
q16161 | Grape.Endpoint.info_object | train | def info_object(infos)
result = {
title: infos[:title] || 'API title',
description: infos[:description],
termsOfService: infos[:terms_of_service_url],
contact: contact_object(infos),
license: license_object(infos),
version: infos[:version]
}
GrapeSwagger::DocMethods::Extensions.add_extensions_to_info(infos, result)
result.delete_if { |_, value| value.blank? }
end | ruby | {
"resource": ""
} |
q16162 | Grape.Endpoint.license_object | train | def license_object(infos)
{
name: infos.delete(:license),
url: infos.delete(:license_url)
}.delete_if { |_, value| value.blank? }
end | ruby | {
"resource": ""
} |
q16163 | Grape.Endpoint.path_and_definition_objects | train | def path_and_definition_objects(namespace_routes, options)
@paths = {}
@definitions = {}
namespace_routes.each_key do |key|
routes = namespace_routes[key]
path_item(routes, options)
end
add_definitions_from options[:models]
[@paths, @definitions]
end | ruby | {
"resource": ""
} |
q16164 | TTY.Tree.node | train | def node(name, type = Node, &block)
parent = @nodes_stack.empty? ? Node::ROOT : @nodes_stack.last
level = [0, @nodes_stack.size - 1].max
prefix = ':pipe' * level
if parent.class == LeafNode
prefix = ':space' * level
end
node = type.new(name, parent.full_path, prefix, @nodes_stack.size)
@nodes << node
return unless block_given?
@nodes_stack << node
if block.arity.zero?
instance_eval(&block)
else
instance_eval(&(->(*_args) { block[node] }))
end
@nodes_stack.pop
end | ruby | {
"resource": ""
} |
q16165 | Panko.SerializationDescriptor.apply_filters | train | def apply_filters(options)
return unless options.key?(:only) || options.key?(:except)
attributes_only_filters, associations_only_filters = resolve_filters(options, :only)
attributes_except_filters, associations_except_filters = resolve_filters(options, :except)
self.attributes = apply_attribute_filters(
attributes,
attributes_only_filters,
attributes_except_filters
)
self.method_fields = apply_attribute_filters(
method_fields,
attributes_only_filters,
attributes_except_filters
)
unless has_many_associations.empty?
self.has_many_associations = apply_association_filters(
has_many_associations,
{ attributes: attributes_only_filters, associations: associations_only_filters },
attributes: attributes_except_filters, associations: associations_except_filters
)
end
unless has_one_associations.empty?
self.has_one_associations = apply_association_filters(
has_one_associations,
{ attributes: attributes_only_filters, associations: associations_only_filters },
attributes: attributes_except_filters, associations: associations_except_filters
)
end
end | ruby | {
"resource": ""
} |
q16166 | DefaultValueFor.ClassMethods.default_value_for | train | def default_value_for(attribute, options = {}, &block)
value = options
allows_nil = true
if options.is_a?(Hash)
opts = options.stringify_keys
value = opts.fetch('value', options)
allows_nil = opts.fetch('allows_nil', true)
end
if !method_defined?(:set_default_values)
include(InstanceMethods)
after_initialize :set_default_values
class_attribute :_default_attribute_values
class_attribute :_default_attribute_values_not_allowing_nil
extend(DelayedClassMethods)
init_hash = true
else
init_hash = !singleton_methods(false).include?(:_default_attribute_values)
end
if init_hash
self._default_attribute_values = {}
self._default_attribute_values_not_allowing_nil = []
end
if block_given?
container = BlockValueContainer.new(block)
else
container = NormalValueContainer.new(value)
end
_default_attribute_values[attribute.to_s] = container
_default_attribute_values_not_allowing_nil << attribute.to_s unless allows_nil
end | ruby | {
"resource": ""
} |
q16167 | ZendeskAppsSupport.Package.compile | train | def compile(options)
begin
app_id = options.fetch(:app_id)
asset_url_prefix = options.fetch(:assets_dir)
name = options.fetch(:app_name)
rescue KeyError => e
raise ArgumentError, e.message
end
locale = options.fetch(:locale, 'en')
source = manifest.iframe_only? ? nil : app_js
app_class_name = "app-#{app_id}"
# if no_template is an array, we still need the templates
templates = manifest.no_template == true ? {} : compiled_templates(app_id, asset_url_prefix)
SRC_TEMPLATE.result(
name: name,
version: manifest.version,
source: source,
app_class_properties: manifest.app_class_properties,
asset_url_prefix: asset_url_prefix,
logo_asset_hash: generate_logo_hash(manifest.products),
location_icons: location_icons,
app_class_name: app_class_name,
author: manifest.author,
translations: manifest.iframe_only? ? nil : runtime_translations(translations_for(locale)),
framework_version: manifest.framework_version,
templates: templates,
modules: commonjs_modules,
iframe_only: manifest.iframe_only?
)
end | ruby | {
"resource": ""
} |
q16168 | Hirb.Helpers::Table.default_field_lengths | train | def default_field_lengths
field_lengths = @headers ? @headers.inject({}) {|h,(k,v)| h[k] = String.size(v); h} :
@fields.inject({}) {|h,e| h[e] = 1; h }
@rows.each do |row|
@fields.each do |field|
len = String.size(row[field])
field_lengths[field] = len if len > field_lengths[field].to_i
end
end
field_lengths
end | ruby | {
"resource": ""
} |
q16169 | Hirb.Helpers::Table.array_to_indices_hash | train | def array_to_indices_hash(array)
array.inject({}) {|hash,e| hash[hash.size] = e; hash }
end | ruby | {
"resource": ""
} |
q16170 | Hirb.Formatter.format_output | train | def format_output(output, options={}, &block)
output_class = determine_output_class(output)
options = parse_console_options(options) if options.delete(:console)
options = Util.recursive_hash_merge(klass_config(output_class), options)
_format_output(output, options, &block)
end | ruby | {
"resource": ""
} |
q16171 | Hirb.DynamicView.dynamic_options | train | def dynamic_options(obj)
view_methods.each do |meth|
if obj.class.ancestors.map {|e| e.to_s }.include?(method_to_class(meth))
begin
return send(meth, obj)
rescue
raise "View failed to generate for '#{method_to_class(meth)}' "+
"while in '#{meth}' with error:\n#{$!.message}"
end
end
end
nil
end | ruby | {
"resource": ""
} |
q16172 | Hirb.Pager.activated_by? | train | def activated_by?(string_to_page, inspect_mode=false)
inspect_mode ? (String.size(string_to_page) > @height * @width) : (string_to_page.count("\n") > @height)
end | ruby | {
"resource": ""
} |
q16173 | Hirb.Util.recursive_hash_merge | train | def recursive_hash_merge(hash1, hash2)
hash1.merge(hash2) {|k,o,n| (o.is_a?(Hash)) ? recursive_hash_merge(o,n) : n}
end | ruby | {
"resource": ""
} |
q16174 | Hirb.Util.find_home | train | def find_home
['HOME', 'USERPROFILE'].each {|e| return ENV[e] if ENV[e] }
return "#{ENV['HOMEDRIVE']}#{ENV['HOMEPATH']}" if ENV['HOMEDRIVE'] && ENV['HOMEPATH']
File.expand_path("~")
rescue
File::ALT_SEPARATOR ? "C:/" : "/"
end | ruby | {
"resource": ""
} |
q16175 | SendGrid.Client.build_args | train | def build_args(args)
args.each do |arg|
arg.each do |key, value|
case key.to_s
when 'query_params'
@query_params = value
when 'request_headers'
update_headers(value)
when 'request_body'
@request_body = value
end
end
end
end | ruby | {
"resource": ""
} |
q16176 | SendGrid.Client.build_url | train | def build_url(query_params: nil)
url = [add_version(''), *@url_path].join('/')
url = build_query_params(url, query_params) if query_params
URI.parse("#{@host}#{url}")
end | ruby | {
"resource": ""
} |
q16177 | SendGrid.Client.make_request | train | def make_request(http, request)
response = http.request(request)
Response.new(response)
end | ruby | {
"resource": ""
} |
q16178 | SendGrid.Client.build_http | train | def build_http(host, port)
params = [host, port]
params += @proxy_options.values_at(:host, :port, :user, :pass) unless @proxy_options.empty?
add_ssl(Net::HTTP.new(*params))
end | ruby | {
"resource": ""
} |
q16179 | SendGrid.Client.add_ssl | train | def add_ssl(http)
if host.start_with?('https')
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_PEER
end
http
end | ruby | {
"resource": ""
} |
q16180 | Ohm.Collection.fetch | train | def fetch(ids)
data = nil
model.synchronize do
ids.each do |id|
redis.queue("HGETALL", namespace[id])
end
data = redis.commit
end
return [] if data.nil?
[].tap do |result|
data.each_with_index do |atts, idx|
result << model.new(Utils.dict(atts).update(:id => ids[idx]))
end
end
end | ruby | {
"resource": ""
} |
q16181 | Ohm.Set.sort | train | def sort(options = {})
if options.has_key?(:get)
options[:get] = to_key(options[:get])
Stal.solve(redis, ["SORT", key, *Utils.sort_options(options)])
else
fetch(Stal.solve(redis, ["SORT", key, *Utils.sort_options(options)]))
end
end | ruby | {
"resource": ""
} |
q16182 | Ohm.Set.find | train | def find(dict)
Ohm::Set.new(
model, namespace, [:SINTER, key, *model.filters(dict)]
)
end | ruby | {
"resource": ""
} |
q16183 | Ohm.MutableSet.replace | train | def replace(models)
ids = models.map(&:id)
model.synchronize do
redis.queue("MULTI")
redis.queue("DEL", key)
ids.each { |id| redis.queue("SADD", key, id) }
redis.queue("EXEC")
redis.commit
end
end | ruby | {
"resource": ""
} |
q16184 | Ohm.Model.set | train | def set(att, val)
if val.to_s.empty?
key.call("HDEL", att)
else
key.call("HSET", att, val)
end
@attributes[att] = val
end | ruby | {
"resource": ""
} |
q16185 | Ohm.Model.save | train | def save
indices = {}
model.indices.each do |field|
next unless (value = send(field))
indices[field] = Array(value).map(&:to_s)
end
uniques = {}
model.uniques.each do |field|
next unless (value = send(field))
uniques[field] = value.to_s
end
features = {
"name" => model.name
}
if defined?(@id)
features["id"] = @id
end
@id = script(LUA_SAVE, 0,
features.to_json,
_sanitized_attributes.to_json,
indices.to_json,
uniques.to_json
)
return self
end | ruby | {
"resource": ""
} |
q16186 | PictureTag.SourceImage.grab_file | train | def grab_file(source_file)
source_name = File.join(PictureTag.config.source_dir, source_file)
unless File.exist? source_name
raise "Jekyll Picture Tag could not find #{source_name}."
end
source_name
end | ruby | {
"resource": ""
} |
q16187 | Roadie.Document.transform | train | def transform
dom = Nokogiri::HTML.parse html
callback before_transformation, dom
improve dom
inline dom, keep_uninlinable_in: :head
rewrite_urls dom
callback after_transformation, dom
remove_ignore_markers dom
serialize_document dom
end | ruby | {
"resource": ""
} |
q16188 | Roadie.AssetScanner.extract_css | train | def extract_css
stylesheets = @dom.css(STYLE_ELEMENT_QUERY).map { |element|
stylesheet = read_stylesheet(element)
element.remove if stylesheet
stylesheet
}.compact
stylesheets
end | ruby | {
"resource": ""
} |
q16189 | Roadie.UrlGenerator.generate_url | train | def generate_url(path, base = "/")
return root_uri.to_s if path.nil? or path.empty?
return path if path_is_anchor?(path)
return add_scheme(path) if path_is_schemeless?(path)
return path if Utils.path_is_absolute?(path)
combine_segments(root_uri, base, path).to_s
end | ruby | {
"resource": ""
} |
q16190 | Roadie.Inliner.add_uninlinable_styles | train | def add_uninlinable_styles(parent, blocks, merge_media_queries)
return if blocks.empty?
parent_node =
case parent
when :head
find_head
when :root
dom
else
raise ArgumentError, "Parent must be either :head or :root. Was #{parent.inspect}"
end
create_style_element(blocks, parent_node, merge_media_queries)
end | ruby | {
"resource": ""
} |
q16191 | Geminabox.GemVersionCollection.by_name | train | def by_name(&block)
@grouped ||= @gems.group_by(&:name).map{|name, collection|
[name, Geminabox::GemVersionCollection.new(collection)]
}.sort_by{|name, collection|
name.downcase
}
if block_given?
@grouped.each(&block)
else
@grouped
end
end | ruby | {
"resource": ""
} |
q16192 | Azure.Certificate.create_ssl_certificate | train | def create_ssl_certificate(cert_params)
file_path = cert_params[:output_file].sub(/\.(\w+)$/, "")
path = prompt_for_file_path
file_path = File.join(path, file_path) unless path.empty?
cert_params[:domain] = prompt_for_domain
rsa_key = generate_keypair cert_params[:key_length]
cert = generate_certificate(rsa_key, cert_params)
write_certificate_to_file cert, file_path, rsa_key, cert_params
puts "*" * 70
puts "Generated Certificates:"
puts "- #{file_path}.pfx - PKCS12 format keypair. Contains both the public and private keys, usually used on the server."
puts "- #{file_path}.b64 - Base64 encoded PKCS12 keypair. Contains both the public and private keys, for upload to the Azure REST API."
puts "- #{file_path}.pem - Base64 encoded public certificate only. Required by the client to connect to the server."
puts "Certificate Thumbprint: #{@thumbprint.to_s.upcase}"
puts "*" * 70
Chef::Config[:knife][:ca_trust_file] = file_path + ".pem" if Chef::Config[:knife][:ca_trust_file].nil?
cert_data = File.read (file_path + ".b64")
add_certificate cert_data, @winrm_cert_passphrase, "pfx", cert_params[:azure_dns_name]
@thumbprint
end | ruby | {
"resource": ""
} |
q16193 | Azure.Images.get_images | train | def get_images(img_type)
images = Hash.new
if img_type == "OSImage"
response = @connection.query_azure("images")
elsif img_type == "VMImage"
response = @connection.query_azure("vmimages")
end
unless response.to_s.empty?
osimages = response.css(img_type)
osimages.each do |image|
item = Image.new(image)
images[item.name] = item
end
end
images
end | ruby | {
"resource": ""
} |
q16194 | Azure.StorageAccounts.exists_on_cloud? | train | def exists_on_cloud?(name)
ret_val = @connection.query_azure("storageservices/#{name}")
error_code, error_message = error_from_response_xml(ret_val) if ret_val
if ret_val.nil? || error_code.length > 0
Chef::Log.warn "Unable to find storage account:" + error_message + " : " + error_message if ret_val
false
else
true
end
end | ruby | {
"resource": ""
} |
q16195 | Azure::ARM.VnetConfig.subnets_list_for_specific_address_space | train | def subnets_list_for_specific_address_space(address_prefix, subnets_list)
list = []
address_space = IPAddress(address_prefix)
subnets_list.each do |sbn|
subnet_address_prefix = IPAddress(sbn.address_prefix)
## check if the subnet belongs to this address space or not ##
list << sbn if address_space.include? subnet_address_prefix
end
list
end | ruby | {
"resource": ""
} |
q16196 | Azure::ARM.VnetConfig.subnets_list | train | def subnets_list(resource_group_name, vnet_name, address_prefix = nil)
list = network_resource_client.subnets.list(resource_group_name, vnet_name)
!address_prefix.nil? && !list.empty? ? subnets_list_for_specific_address_space(address_prefix, list) : list
end | ruby | {
"resource": ""
} |
q16197 | Azure::ARM.VnetConfig.sort_available_networks | train | def sort_available_networks(available_networks)
available_networks.sort_by { |nwrk| nwrk.network.address.split(".").map(&:to_i) }
end | ruby | {
"resource": ""
} |
q16198 | Azure::ARM.VnetConfig.sort_subnets_by_cidr_prefix | train | def sort_subnets_by_cidr_prefix(subnets)
subnets.sort_by.with_index { |sbn, i| [subnet_address_prefix(sbn).split("/")[1].to_i, i] }
end | ruby | {
"resource": ""
} |
q16199 | Azure::ARM.VnetConfig.sort_used_networks_by_hosts_size | train | def sort_used_networks_by_hosts_size(used_network)
used_network.sort_by.with_index { |nwrk, i| [-nwrk.hosts.size, i] }
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.