_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q17600 | Elastomer::Client::RestApiSpec.ApiSpec.valid_param? | train | def valid_param?(api:, param:)
rest_api = get(api)
return true if rest_api.nil?
rest_api.valid_param?(param)
end | ruby | {
"resource": ""
} |
q17601 | Elastomer::Client::RestApiSpec.ApiSpec.select_parts | train | def select_parts(api:, from:)
rest_api = get(api)
return from if rest_api.nil?
rest_api.select_parts(from: from)
end | ruby | {
"resource": ""
} |
q17602 | Elastomer::Client::RestApiSpec.ApiSpec.valid_part? | train | def valid_part?(api:, part:)
rest_api = get(api)
return true if rest_api.nil?
rest_api.valid_part?(part)
end | ruby | {
"resource": ""
} |
q17603 | Elastomer::Client::RestApiSpec.ApiSpec.select_common_params | train | def select_common_params(from:)
return from if @common_params.empty?
from.select {|k,v| valid_common_param?(k)}
end | ruby | {
"resource": ""
} |
q17604 | Elastomer::Client::RestApiSpec.ApiSpec.validate_params! | train | def validate_params!(api:, params:)
rest_api = get(api)
return params if rest_api.nil?
params.keys.each do |key|
unless rest_api.valid_param?(key) || valid_common_param?(key)
raise ::Elastomer::Client::IllegalArgument, "'#{key}' is not a valid parameter for the '#{api}' API"
end
end
params
end | ruby | {
"resource": ""
} |
q17605 | Elastomer.Client.multi_search | train | def multi_search(body = nil, params = nil)
if block_given?
params, body = (body || {}), nil
yield msearch_obj = MultiSearch.new(self, params)
msearch_obj.call
else
raise "multi_search request body cannot be nil" if body.nil?
params ||= {}
response = self.post "{/index}{/type}/_msearch", params.merge(body: body, action: "msearch", rest_api: "msearch")
response.body
end
end | ruby | {
"resource": ""
} |
q17606 | Elastomer.Client.multi_percolate | train | def multi_percolate(body = nil, params = nil)
if block_given?
params, body = (body || {}), nil
yield mpercolate_obj = MultiPercolate.new(self, params)
mpercolate_obj.call
else
raise "multi_percolate request body cannot be nil" if body.nil?
params ||= {}
response = self.post "{/index}{/type}/_mpercolate", params.merge(body: body, action: "mpercolate", rest_api: "mpercolate")
response.body
end
end | ruby | {
"resource": ""
} |
q17607 | Elastomer.Client.bulk | train | def bulk( body = nil, params = nil )
if block_given?
params, body = (body || {}), nil
yield bulk_obj = Bulk.new(self, params)
bulk_obj.call
else
raise "bulk request body cannot be nil" if body.nil?
params ||= {}
response = self.post "{/index}{/type}/_bulk", params.merge(body: body, action: "bulk", rest_api: "bulk")
response.body
end
end | ruby | {
"resource": ""
} |
q17608 | Elastomer.Client.bulk_stream_responses | train | def bulk_stream_responses(ops, params = {})
bulk_obj = Bulk.new(self, params)
Enumerator.new do |yielder|
ops.each do |action, *args|
response = bulk_obj.send(action, *args)
yielder.yield response unless response.nil?
end
response = bulk_obj.call
yielder.yield response unless response.nil?
end
end | ruby | {
"resource": ""
} |
q17609 | Elastomer.Client.bulk_stream_items | train | def bulk_stream_items(ops, params = {})
stats = {
"took" => 0,
"errors" => false,
"success" => 0,
"failure" => 0
}
bulk_stream_responses(ops, params).each do |response|
stats["took"] += response["took"]
stats["errors"] |= response["errors"]
response["items"].each do |item|
if is_ok?(item)
stats["success"] += 1
else
stats["failure"] += 1
end
yield item
end
end
stats
end | ruby | {
"resource": ""
} |
q17610 | Representors.Representor.identifier | train | def identifier
@identifier ||= begin
uri = @representor_hash.href || self.object_id
protocol = @representor_hash.protocol || (uri == self.object_id ? UNKNOWN_PROTOCOL : DEFAULT_PROTOCOL)
PROTOCOL_TEMPLATE % [protocol, uri]
end
end | ruby | {
"resource": ""
} |
q17611 | RepresentorSupport.Utilities.symbolize_keys | train | def symbolize_keys(hash)
Hash[hash.map{|(k,v)| [k.to_sym,v]}]
end | ruby | {
"resource": ""
} |
q17612 | Representors.HaleDeserializer.deserialize_embedded | train | def deserialize_embedded(builder, media)
make_embedded_resource = ->(x) { self.class.new(x).to_representor_hash.to_h }
(media[EMBEDDED_KEY] || {}).each do |name, value|
resource_hash = map_or_apply(make_embedded_resource, value)
builder = builder.add_embedded(name, resource_hash)
end
builder
end | ruby | {
"resource": ""
} |
q17613 | Representors.RepresentorBuilder.add_attribute | train | def add_attribute(name, value, options={})
new_representor_hash = RepresentorHash.new(deep_dup(@representor_hash.to_h))
new_representor_hash.attributes[name] = options.merge({value: value})
RepresentorBuilder.new(new_representor_hash)
end | ruby | {
"resource": ""
} |
q17614 | Representors.RepresentorBuilder.add_transition | train | def add_transition(rel, href, options={})
new_representor_hash = RepresentorHash.new(deep_dup(@representor_hash.to_h))
options = symbolize_keys(options)
options.delete(:method) if options[:method] == Transition::DEFAULT_METHOD
link_values = options.merge({href: href, rel: rel})
if options[DATA_KEY]
link_values[Transition::DESCRIPTORS_KEY] = link_values.delete(DATA_KEY)
end
new_representor_hash.transitions.push(link_values)
RepresentorBuilder.new(new_representor_hash)
end | ruby | {
"resource": ""
} |
q17615 | Representors.RepresentorBuilder.add_transition_array | train | def add_transition_array(rel, array_of_hashes)
array_of_hashes.reduce(RepresentorBuilder.new(@representor_hash)) do |memo, transition|
transition = symbolize_keys(transition)
href = transition.delete(:href)
memo = memo.add_transition(rel, href, transition)
end
end | ruby | {
"resource": ""
} |
q17616 | ActiveRecord.OracleEnhancedProcedures._create_record | train | def _create_record
# check if class has custom create method
if self.class.custom_create_method
# run before/after callbacks defined in model
run_callbacks(:create) do
# timestamp
if self.record_timestamps
current_time = current_time_from_proper_timezone
all_timestamp_attributes_in_model.each do |column|
if respond_to?(column) && respond_to?("#{column}=") && self.send(column).nil?
write_attribute(column.to_s, current_time)
end
end
end
# run
create_using_custom_method
end
else
super
end
end | ruby | {
"resource": ""
} |
q17617 | ActiveRecord.OracleEnhancedProcedures._update_record | train | def _update_record(attribute_names = @attributes.keys)
# check if class has custom update method
if self.class.custom_update_method
# run before/after callbacks defined in model
run_callbacks(:update) do
# timestamp
if should_record_timestamps?
current_time = current_time_from_proper_timezone
timestamp_attributes_for_update_in_model.each do |column|
column = column.to_s
next if will_save_change_to_attribute?(column)
write_attribute(column, current_time)
end
end
# update just dirty attributes
if partial_writes?
# Serialized attributes should always be written in case they've been
# changed in place.
update_using_custom_method(changed | (attributes.keys & self.class.columns.select { |column| column.is_a?(Type::Serialized) }))
else
update_using_custom_method(attributes.keys)
end
end
else
super
end
end | ruby | {
"resource": ""
} |
q17618 | Recurly.Coupon.redeem | train | def redeem account_or_code, currency = nil, extra_opts={}
return false unless link? :redeem
account_code = if account_or_code.is_a? Account
account_or_code.account_code
else
account_or_code
end
redemption_options = {
:account_code => account_code,
:currency => currency || Recurly.default_currency
}.merge(extra_opts)
redemption = Redemption.new(redemption_options)
Redemption.from_response follow_link(:redeem,
:body => redemption.to_xml
)
rescue API::UnprocessableEntity => e
redemption.apply_errors e
redemption
end | ruby | {
"resource": ""
} |
q17619 | Recurly.Coupon.generate | train | def generate(amount)
builder = XML.new("<coupon/>")
builder.add_element 'number_of_unique_codes', amount
resp = follow_link(:generate,
:body => builder.to_s
)
Pager.new(Recurly::Coupon, uri: resp['location'], parent: self, etag: resp['ETag'])
end | ruby | {
"resource": ""
} |
q17620 | Recurly.Coupon.redeem! | train | def redeem!(account_code, currency = nil)
redemption = redeem(account_code, currency)
raise Invalid.new(self) unless redemption && redemption.persisted?
redemption
end | ruby | {
"resource": ""
} |
q17621 | Recurly.Resource.read_attribute | train | def read_attribute(key)
key = key.to_s
if attributes.key? key
value = attributes[key]
elsif links.key?(key) && self.class.reflect_on_association(key)
value = attributes[key] = follow_link key
end
value
end | ruby | {
"resource": ""
} |
q17622 | Recurly.Resource.write_attribute | train | def write_attribute(key, value)
if changed_attributes.key?(key = key.to_s)
changed_attributes.delete key if changed_attributes[key] == value
elsif self[key] != value
changed_attributes[key] = self[key]
end
association = self.class.find_association(key)
if association
value = fetch_associated(key, value)
# FIXME: More explicit; less magic.
elsif value && key.end_with?('_in_cents') && !respond_to?(:currency)
value = Money.new(value, self, key) unless value.is_a?(Money)
end
attributes[key] = value
end | ruby | {
"resource": ""
} |
q17623 | Recurly.Resource.attributes= | train | def attributes=(attributes = {})
attributes.each_pair { |k, v|
respond_to?(name = "#{k}=") and send(name, v) or self[k] = v
}
end | ruby | {
"resource": ""
} |
q17624 | Recurly.Resource.follow_link | train | def follow_link(key, options = {})
if link = links[key = key.to_s]
response = API.send link[:method], link[:href], options[:body], options
if resource_class = link[:resource_class]
response = resource_class.from_response response
response.attributes[self.class.member_name] = self
end
response
end
rescue Recurly::API::NotFound
raise unless resource_class
end | ruby | {
"resource": ""
} |
q17625 | Recurly.Resource.to_xml | train | def to_xml(options = {})
builder = options[:builder] || XML.new("<#{self.class.member_name}/>")
xml_keys.each { |key|
value = respond_to?(key) ? send(key) : self[key]
node = builder.add_element key
# Duck-typing here is problematic because of ActiveSupport's #to_xml.
case value
when Resource, Subscription::AddOns
value.to_xml options.merge(:builder => node)
when Array
value.each do |e|
if e.is_a? Recurly::Resource
# create a node to hold this resource
e_node = node.add_element Helper.singularize(key)
# serialize the resource into this node
e.to_xml(options.merge(builder: e_node))
else
# it's just a primitive value
node.add_element(Helper.singularize(key), e)
end
end
when Hash, Recurly::Money
value.each_pair { |k, v| node.add_element k.to_s, v }
else
node.text = value
end
}
builder.to_s
end | ruby | {
"resource": ""
} |
q17626 | Recurly.Resource.save | train | def save
if new_record? || changed?
clear_errors
@response = API.send(
persisted? ? :put : :post, path, to_xml
)
reload response
persist! true
end
true
rescue API::UnprocessableEntity => e
apply_errors e
Transaction::Error.validate! e, (self if is_a?(Transaction))
false
end | ruby | {
"resource": ""
} |
q17627 | Recurly.Resource.destroy | train | def destroy
return false unless persisted?
@response = API.delete uri
@destroyed = true
rescue API::NotFound => e
raise NotFound, e.description
end | ruby | {
"resource": ""
} |
q17628 | Recurly.Account.invoice! | train | def invoice!(attrs={})
InvoiceCollection.from_response API.post(invoices.uri, attrs.empty? ? nil : Invoice.to_xml(attrs))
rescue Recurly::API::UnprocessableEntity => e
raise Invalid, e.message
end | ruby | {
"resource": ""
} |
q17629 | Recurly.Account.build_invoice | train | def build_invoice
InvoiceCollection.from_response API.post("#{invoices.uri}/preview")
rescue Recurly::API::UnprocessableEntity => e
raise Invalid, e.message
end | ruby | {
"resource": ""
} |
q17630 | Recurly.Account.verify_cvv! | train | def verify_cvv!(verification_value)
bi = BillingInfo.new(verification_value: verification_value)
bi.uri = "#{path}/billing_info/verify_cvv"
bi.save!
bi
end | ruby | {
"resource": ""
} |
q17631 | Recurly.Invoice.enter_offline_payment | train | def enter_offline_payment(attrs={})
Transaction.from_response API.post("#{uri}/transactions", attrs.empty? ? nil : Transaction.to_xml(attrs))
rescue Recurly::API::UnprocessableEntity => e
raise Invalid, e.message
end | ruby | {
"resource": ""
} |
q17632 | Recurly.GiftCard.preview | train | def preview
clear_errors
@response = API.send(:post, "#{path}/preview", to_xml)
reload response
rescue API::UnprocessableEntity => e
apply_errors e
end | ruby | {
"resource": ""
} |
q17633 | Recurly.Subscription.postpone | train | def postpone next_renewal_date, bulk=false
return false unless link? :postpone
reload follow_link(:postpone,
:params => { :next_renewal_date => next_renewal_date, :bulk => bulk }
)
true
end | ruby | {
"resource": ""
} |
q17634 | Recurly.Subscription.update_notes | train | def update_notes(notes)
return false unless link? :notes
self.attributes = notes
reload follow_link(:notes, body: to_xml)
true
end | ruby | {
"resource": ""
} |
q17635 | Recurly.Subscription.pause | train | def pause(remaining_pause_cycles)
builder = XML.new("<subscription/>")
builder.add_element('remaining_pause_cycles', remaining_pause_cycles)
reload API.put("#{uri}/pause", builder.to_s)
true
end | ruby | {
"resource": ""
} |
q17636 | Recurly.XML.add_element | train | def add_element name, value = nil
value = value.respond_to?(:xmlschema) ? value.xmlschema : value.to_s
XML.new super(name, value)
end | ruby | {
"resource": ""
} |
q17637 | RSpec::Puppet.Coverage.add_from_catalog | train | def add_from_catalog(catalog, test_module)
coverable_resources = catalog.to_a.reject { |resource| !test_module.nil? && filter_resource?(resource, test_module) }
coverable_resources.each do |resource|
add(resource)
end
end | ruby | {
"resource": ""
} |
q17638 | RSpec::Puppet.Coverage.filter_resource? | train | def filter_resource?(resource, test_module)
if @filters.include?(resource.to_s)
return true
end
if resource.type == 'Class'
module_name = resource.title.split('::').first.downcase
if module_name != test_module
return true
end
end
if resource.file
paths = module_paths(test_module)
unless paths.any? { |path| resource.file.include?(path) }
return true
end
end
return false
end | ruby | {
"resource": ""
} |
q17639 | RSpec::Puppet.Coverage.module_paths | train | def module_paths(test_module)
adapter = RSpec.configuration.adapter
paths = adapter.modulepath.map do |dir|
File.join(dir, test_module, 'manifests')
end
paths << adapter.manifest if adapter.manifest
paths
end | ruby | {
"resource": ""
} |
q17640 | RSpec::Puppet.FunctionExampleGroup.build_compiler | train | def build_compiler
node_name = nodename(:function)
fact_values = facts_hash(node_name)
trusted_values = trusted_facts_hash(node_name)
# Allow different Hiera configurations:
HieraPuppet.instance_variable_set('@hiera', nil) if defined? HieraPuppet
# if we specify a pre_condition, we should ensure that we compile that
# code into a catalog that is accessible from the scope where the
# function is called
Puppet[:code] = pre_cond
node_facts = Puppet::Node::Facts.new(node_name, fact_values.dup)
node_options = {
:parameters => fact_values,
:facts => node_facts
}
stub_facts! fact_values
node = build_node(node_name, node_options)
if Puppet::Util::Package.versioncmp(Puppet.version, '4.3.0') >= 0
Puppet.push_context(
{
:trusted_information => Puppet::Context::TrustedInformation.new('remote', node_name, trusted_values)
},
"Context for spec trusted hash"
)
end
compiler = Puppet::Parser::Compiler.new(node)
compiler.compile
if Puppet::Util::Package.versioncmp(Puppet.version, '4.0.0') >= 0
loaders = Puppet::Pops::Loaders.new(adapter.current_environment)
Puppet.push_context(
{
:loaders => loaders,
:global_scope => compiler.context_overrides[:global_scope]
},
"set globals")
end
compiler
end | ruby | {
"resource": ""
} |
q17641 | Lvm2Thin.SuperBlock.device_to_data | train | def device_to_data(device_id, pos, len)
dev_blk = device_block(pos)
dev_off = device_block_offset(pos)
data_map = data_mapping.map_for(device_id)
total_len = 0
data_blks = []
num_data_blks = (len / data_block_size).to_i + 1
0.upto(num_data_blks - 1) do |i|
current_blk = dev_blk + i
blk_len = 0
if data_map.block?(current_blk)
data_blk = data_map.data_block(current_blk)
blk_start = data_blk * data_block_size
if i.zero?
blk_start += dev_off
blk_len = data_block_size - dev_off - 1
elsif i == num_data_blks - 1
blk_len = len - total_len
else
blk_len = data_block_size
end
data_blks << [current_blk, data_blk, blk_start, blk_len]
# Missing block may be caused by trying to read beyond end of
# LVM device (too large pos or len):
else
remaining = (len - total_len)
blk_len = remaining > data_block_size ? data_block_size : remaining
data_blks << [current_blk, nil, nil, blk_len]
end
total_len += blk_len
end
data_blks
end | ruby | {
"resource": ""
} |
q17642 | NTFS.AttributeList.loadAttributes | train | def loadAttributes(attribType)
result = []
# ad is an attribute descriptor.
@list.each do |ad|
next unless ad['attrib_type'] == attribType
# Load referenced attribute and add it to parent.
result += @boot_sector.mftEntry(ad['mft']).loadAttributes(attribType)
end
result
end | ruby | {
"resource": ""
} |
q17643 | NTFS.AttribData.read | train | def read(bytes = @length)
return nil if @pos >= @length
bytes = @length - @pos if bytes.nil?
bytes = @length - @pos if @pos + bytes > @length
out = @data[@pos, bytes] if @data.kind_of?(String)
out = @data.read(bytes) if @data.kind_of?(NTFS::DataRun)
@pos += out.size
out
end | ruby | {
"resource": ""
} |
q17644 | Fat32.BootSect.getNextCluster | train | def getNextCluster(clus)
nxt = getFatEntry(clus)
return nil if nxt > CC_END_OF_CHAIN
raise "Damaged cluster in cluster chain" if nxt == CC_DAMAGED
[nxt, getCluster(nxt)]
end | ruby | {
"resource": ""
} |
q17645 | Fat32.BootSect.countContigClusters | train | def countContigClusters(clus)
cur = clus; nxt = 0
loop do
nxt = getFatEntry(cur)
break if nxt != cur + 1
cur = nxt; redo
end
raise "Damaged cluster in cluster chain" if nxt == CC_DAMAGED
cur - clus + 1
end | ruby | {
"resource": ""
} |
q17646 | Fat32.BootSect.wipeChain | train | def wipeChain(clus)
loop do
nxt = getFatEntry(clus)
putFatEntry(clus, 0)
break if nxt == 0 # A 0 entry means FAT is inconsistent. Chkdsk may report lost clusters.
break if nxt == CC_DAMAGED # This should never happen but if it does allow clusters to become lost.
break if nxt > CC_END_OF_CHAIN
clus = nxt
end
end | ruby | {
"resource": ""
} |
q17647 | Fat32.BootSect.writeClusters | train | def writeClusters(start, buf, len = buf.length)
clus = start; num, leftover = len.divmod(@bytesPerCluster); num += 1 if leftover > 0
0.upto(num - 1) do |offset|
local = buf[offset * @bytesPerCluster, @bytesPerCluster]
if local.length < @bytesPerCluster then local += ("\0" * (@bytesPerCluster - local.length)) end
@stream.seek(clusToByte(clus), IO::SEEK_SET)
@stream.write(local, @bytesPerCluster)
break if offset == num - 1 # ugly hack to prevent allocating more than needed.
nxt = getFatEntry(clus)
nxt = allocClusters(clus) if nxt > CC_END_OF_CHAIN
clus = nxt
end
end | ruby | {
"resource": ""
} |
q17648 | Fat32.BootSect.putFatEntry | train | def putFatEntry(clus, value)
raise "DONT TOUCH THIS CLUSTER: #{clus}" if clus < 3
@stream.seek(@fatBase + FAT_ENTRY_SIZE * clus)
@stream.write([value].pack('L'), FAT_ENTRY_SIZE)
end | ruby | {
"resource": ""
} |
q17649 | NTFS.BootSect.clusterInfo | train | def clusterInfo
return @clusterInfo unless @clusterInfo.nil?
# MFT Entry 6 ==> BITMAP Information
ad = mftEntry(6).attributeData
data = ad.read(ad.length)
ad.rewind
c = data.unpack("b#{data.length * 8}")[0]
nclusters = c.length
on = c.count("1")
uclusters = on
fclusters = c.length - on
@clusterInfo = {"total" => nclusters, "free" => fclusters, "used" => uclusters}
end | ruby | {
"resource": ""
} |
q17650 | NTFS.BootSect.mftRecToBytePos | train | def mftRecToBytePos(recno)
# Return start of mft if rec 0 (no point in the rest of this).
return mftLoc if recno == 0
# Find which fragment contains the target mft record.
start = fragTable[0]; last_clusters = 0; target_cluster = recno * @bytesPerFileRec / @bytesPerCluster
if (recno > @bytesPerCluster / @bytesPerFileRec) && (fragTable.size > 2)
total_clusters = 0
fragTable.each_slice(2) do |vcn, len|
start = vcn # These are now absolute clusters, not offsets.
total_clusters += len
break if total_clusters > target_cluster
last_clusters += len
end
# Toss if we haven't found the fragment.
raise "MIQ(NTFS::BootSect.mftRecToBytePos) Can't find MFT record #{recno} in data run.\ntarget = #{target_cluster}\ntbl = #{fragTable.inspect}" if total_clusters < target_cluster
end
# Calculate offset in target cluster & final byte position.
offset = (recno - (last_clusters * @bytesPerCluster / @bytesPerFileRec)) * @bytesPerFileRec
start * @bytesPerCluster + offset
end | ruby | {
"resource": ""
} |
q17651 | NTFS.DirectoryIndexNode.dump | train | def dump
out = "\#<#{self.class}:0x#{'%08x' % object_id}>\n"
out << " Mft Ref : seq #{@refMft[0]}, entry #{@refMft[1]}\n"
out << " Length : #{@length}\n"
out << " Content : #{@contentLen}\n"
out << " Flags : 0x#{'%08x' % @flags}\n"
out << @afn.dump if @contentLen > 0
out << " Child ref: #{@child}\n" if NTFS::Utils.gotBit?(@flags, IN_HAS_CHILD)
out << "---\n"
end | ruby | {
"resource": ""
} |
q17652 | Ext4.Directory.globEntriesByHashTree | train | def globEntriesByHashTree
ents_by_name = {}
offset = 0
# Chomp fake '.' and '..' directories first
2.times do
de = DirectoryEntry.new(@data[offset..-1], @sb.isNewDirEnt?)
ents_by_name[de.name] ||= []
ents_by_name[de.name] << de
offset += 12
end
$log.info("Ext4::Directory.globEntriesByHashTree (inode=#{@inodeNum}) >>\n#{@data[0, 256].hex_dump}")
header = HashTreeHeader.new(@data[offset..-1])
$log.info("Ext4::Directory.globEntriesByHashTree --\n#{header.dump}")
$log.info("Ext4::Directory.globEntriesByHashTree (inode=#{@inodeNum}) <<#{ents_by_name.inspect}")
offset += header.length
root = HashTreeEntry.new(@data[offset..-1], true)
$log.info("Ext4::Directory.globEntriesByHashTree --\n#{root.dump}")
ents_by_name
end | ruby | {
"resource": ""
} |
q17653 | MiqBerkeleyDB.MiqBdbPage.dump | train | def dump
out = ""
out << "Page #{current}\n"
out << " type: #{MiqBdbPage.type2string(ptype)}\n"
out << " prev: #{prev}\n"
out << " next: #{@header['next_pgno']}\n"
out << " log seq num: file=#{@header['lsn_file']} offset=#{@header['lsn_offset']}\n"
out << " level: #{level}\n"
if @header['p_type'] == P_OVERFLOW
out << " ref cnt: #{nentries}\n"
out << " len: #{offset}\n"
else
out << " entries: #{nentries}\n"
out << " offset: #{offset}\n"
end
out << " data size: #{@data.size}\n"
out << " data: "
@data.bytes.take(20).each do |c|
out << sprintf("%.2x ", c)
end
out << "..." if @data.size > 20
out << "\n\n"
out
end | ruby | {
"resource": ""
} |
q17654 | NTFS.IndexRoot.find | train | def find(name)
log_prefix = "MIQ(NTFS::IndexRoot.find)"
name = name.downcase
$log.debug "#{log_prefix} Searching for [#{name}]" if DEBUG_TRACE_FIND
if @foundEntries.key?(name)
$log.debug "#{log_prefix} Found [#{name}] (cached)" if DEBUG_TRACE_FIND
return @foundEntries[name]
end
found = findInEntries(name, @indexEntries)
if found.nil?
# Fallback to full directory search if not found
$log.debug "#{log_prefix} [#{name}] not found. Performing full directory scan." if $log
found = findBackup(name)
$log.send(found.nil? ? :debug : :warn, "#{log_prefix} [#{name}] #{found.nil? ? "not " : ""}found in full directory scan.") if $log
end
found
end | ruby | {
"resource": ""
} |
q17655 | NTFS.IndexRoot.globNames | train | def globNames
@globNames = globEntries.collect { |e| e.namespace == NTFS::FileName::NS_DOS ? nil : e.name.downcase }.compact if @globNames.nil?
@globNames
end | ruby | {
"resource": ""
} |
q17656 | ReiserFS.Block.getKey | train | def getKey(k)
return nil if k > @nitems || k <= 0
pos = SIZEOF_BLOCK_HEADER + (SIZEOF_KEY * (k - 1))
keydata = @data[pos, SIZEOF_KEY]
data2key(keydata)
end | ruby | {
"resource": ""
} |
q17657 | ReiserFS.Block.getPointer | train | def getPointer(p)
# puts "getPointer >> p=#{p}"
return nil if p > @nitems || p < 0
pos = SIZEOF_BLOCK_HEADER + (SIZEOF_KEY * @nitems) + (SIZEOF_POINTER * p)
ptrdata = @data[pos, SIZEOF_POINTER]
POINTER.decode(ptrdata)
end | ruby | {
"resource": ""
} |
q17658 | Iso9660.BootSector.dump | train | def dump
out = "\n"
out += "Type : #{@bs['desc_type']}\n"
out += "Record ID : #{@bs['id']}\n"
out += "Version : #{@bs['version']}\n"
out += "System ID : #{@bs['system_id'].strip}\n"
out += "Volume ID : #{@volName}\n"
out += "Vol space size : #{@bs["vol_space_size#{@suff}"]} (sectors)\n"
out += "Vol set size : #{@bs["vol_set_size#{@suff}"]}\n"
out += "Vol sequence num: #{@bs["vol_seq_number#{@suff}"]}\n"
out += "Logical blk size: #{@bs["log_block_size#{@suff}"]} (sector size)\n"
out += "Path table size : #{@bs["path_table_size#{@suff}"]}\n"
out += "Type 1 path tbl : #{@bs["type_1_path_table#{@suff}"]}\n"
out += "Opt type 1 pth : #{@bs["opt_type_1_path_table#{@suff}"]}\n"
out += "Type M path tbl : #{@bs["type_m_path_table#{@suff}"]}\n"
out += "Opt type M pth : #{@bs["opt_type_m_path_table#{@suff}"]}\n"
out += "Vol set ID : #{@bs['vol_set_id'].strip}\n"
out += "Publisher ID : #{@bs['publisher_id'].strip}\n"
out += "Preparer ID : #{@bs['preparer_id'].strip}\n"
out += "Application ID : #{@bs['application_id'].strip}\n"
out += "Copyright : #{@bs['copyright_file_id'].strip}\n"
out += "Abstract : #{@bs['abstract_file_id'].strip}\n"
out += "Biblographic : #{@bs['biblographic_file_id'].strip}\n"
out += "Creation date : #{@bs['creation_date'].strip} (#{@cTime}, tz = #{Util.GetTimezone(@bs['creation_date'])})\n"
out += "Mod date : #{@bs['modification_date'].strip} (#{@mTime}, tz = #{Util.GetTimezone(@bs['modification_date'])})\n"
out += "Expiration date : #{@bs['experation_date'].strip} (#{@expirationDate}, tz = #{Util.GetTimezone(@bs['experation_date'])})\n"
out += "Effective date : #{@bs['effective_date'].strip} (#{@effectiveDate}, tz = #{Util.GetTimezone(@bs['effective_date'])})\n"
out += "File strct ver : #{@bs['file_structure_version']}\n"
out += "Application data: #{@bs['application_data'].strip}\n"
end | ruby | {
"resource": ""
} |
q17659 | XFS.Inode.bmap_btree_record_to_block_pointers | train | def bmap_btree_record_to_block_pointers(record, block_pointers_length)
block_pointers = []
# Fill in the missing blocks with 0-blocks
block_pointers << 0 while (block_pointers_length + block_pointers.length) < record.start_offset
1.upto(record.block_count) { |i| block_pointers << record.start_block + i - 1 }
@block_offset += record.block_count
block_pointers
end | ruby | {
"resource": ""
} |
q17660 | MiqLinux.Packages.procRPM | train | def procRPM(dbDir)
$log.debug "Processing RPM package database"
rpmp = MiqRpmPackages.new(@fs, File.join(dbDir, "Packages"))
rpmp.each { |p| @packages << p }
rpmp.close
end | ruby | {
"resource": ""
} |
q17661 | MiqLinux.Packages.procConary | train | def procConary(dbFile)
$log.debug "Processing Conary package database"
rpmp = MiqConaryPackages.new(@fs, dbFile)
rpmp.each { |p| @packages << p }
rpmp.close
end | ruby | {
"resource": ""
} |
q17662 | MiqWin32.System.os_product_suite | train | def os_product_suite(hash)
eid = hash.delete(:edition_id)
ps = hash.delete(:product_suite)
# If edition_id is populated then the edition will already be part of the product_name string
if eid.nil? && !hash[:product_name].nil?
ps = ps.to_s.split("\n")
if ps.length > 1 && !hash[:product_name].include?(ps.first)
hash[:product_name] = "#{hash[:product_name].strip} #{ps.first} Edition"
end
end
end | ruby | {
"resource": ""
} |
q17663 | Wice.GridOutputBuffer.add_filter | train | def add_filter(detach_with_id, filter_code)
raise WiceGridException.new("Detached ID #{detach_with_id} is already used!") if @filters.key? detach_with_id
@filters[detach_with_id] = filter_code
end | ruby | {
"resource": ""
} |
q17664 | Wice.GridOutputBuffer.filter_for | train | def filter_for(detach_with_id)
unless @filters.key? detach_with_id
if @return_empty_strings_for_nonexistent_filters
return ''
else
raise WiceGridException.new("No filter with Detached ID '#{detach_with_id}'!")
end
end
unless @filters[detach_with_id]
raise WiceGridException.new("Filter with Detached ID '#{detach_with_id}' has already been requested once! There cannot be two instances of the same filter on one page")
end
res = @filters[detach_with_id]
@filters[detach_with_id] = false
res
end | ruby | {
"resource": ""
} |
q17665 | Wice.Controller.export_grid_if_requested | train | def export_grid_if_requested(opts = {})
grid = self.wice_grid_instances.detect(&:output_csv?)
if grid
template_name = opts[grid.name] || opts[grid.name.intern]
template_name ||= grid.name + '_grid'
temp_filename = render_to_string(partial: template_name)
temp_filename = temp_filename.strip
filename = (grid.csv_file_name || grid.name) + '.csv'
grid.csv_tempfile.close
send_file_rails2 temp_filename, filename: filename, type: "text/csv; charset=#{get_output_encoding grid.csv_encoding}"
grid.csv_tempfile = nil
true
else
yield if block_given?
false
end
end | ruby | {
"resource": ""
} |
q17666 | Wice.Controller.wice_grid_custom_filter_params | train | def wice_grid_custom_filter_params(opts = {})
options = {
grid_name: 'grid',
attribute: nil,
model: nil,
value: nil
}
options.merge!(opts)
[:attribute, :value].each do |key|
raise ::Wice::WiceGridArgumentError.new("wice_grid_custom_filter_params: :#{key} is a mandatory argument") unless options[key]
end
attr_name = if options[:model]
unless options[:model].nil?
options[:model] = options[:model].constantize if options[:model].is_a? String
raise Wice::WiceGridArgumentError.new('Option :model can be either a class or a string instance') unless options[:model].is_a? Class
end
options[:model].table_name + '.' + options[:attribute]
else
options[:attribute]
end
{ "#{options[:grid_name]}[f][#{attr_name}][]" => options[:value] }
end | ruby | {
"resource": ""
} |
q17667 | Wice.GridViewHelper.dump_filter_parameters_as_hidden_fields | train | def dump_filter_parameters_as_hidden_fields(grid)
unless grid.is_a? WiceGrid
raise WiceGridArgumentError.new('dump_filter_parameters_as_hidden_fields: the parameter must be a WiceGrid instance.')
end
grid.get_state_as_parameter_value_pairs(true).collect do|param_name, value|
hidden_field_tag(param_name, value, id: "hidden-#{param_name.gsub(/[\[\]]/, '-')}")
end.join("\n").html_safe
end | ruby | {
"resource": ""
} |
q17668 | Wice.GridViewHelper.filter_and_order_state_as_hash | train | def filter_and_order_state_as_hash(grid)
{
grid.name => {
'f' => grid.status[:f],
'order' => grid.status[:order],
'order_direction' => grid.status[:order_direction]
}
}
end | ruby | {
"resource": ""
} |
q17669 | Wice.GridViewHelper.scaffolded_grid | train | def scaffolded_grid(grid_obj, opts = {}) #:nodoc:
unless grid_obj.is_a? WiceGrid
raise WiceGridArgumentError.new('scaffolded_grid: the parameter must be a WiceGrid instance.')
end
# debug grid.klass.column_names
columns = grid_obj.klass.column_names
if opts[:reject_attributes].is_a? Proc
columns = columns.reject { |c| opts[:reject_attributes].call(c) }
opts.delete(:reject_attributes)
else
columns = columns.reject { |c| c =~ opts[:reject_attributes] }
opts.delete(:reject_attributes)
end
grid(grid_obj, opts) do |g|
columns.each do |column_name|
g.column name: column_name.humanize, attribute: column_name do |ar|
ar.send(column_name)
end
end
end
end | ruby | {
"resource": ""
} |
q17670 | Wice.GridRenderer.action_column | train | def action_column(opts = {}, &block)
if @action_column_present
raise Wice::WiceGridException.new('There can be only one action column in a WiceGrid')
end
options = {
param_name: :selected,
html: {},
select_all_buttons: true,
object_property: :id,
html_check_box: true
}
opts.assert_valid_keys(options.keys)
options.merge!(opts)
@action_column_present = true
column_processor_klass = Columns.get_view_column_processor(:action)
@columns << column_processor_klass.new(
@grid,
options[:html],
options[:param_name],
options[:select_all_buttons],
options[:object_property],
options[:html_check_box],
@view,
block
)
end | ruby | {
"resource": ""
} |
q17671 | Wice.WiceGrid.distinct_values_for_column | train | def distinct_values_for_column(column) #:nodoc:
column.model.select("distinct #{column.name}").order("#{column.name} asc").collect do|ar|
ar[column.name]
end.reject(&:blank?).map { |i| [i, i] }
end | ruby | {
"resource": ""
} |
q17672 | Wice.GridViewHelper.grid | train | def grid(grid, opts = {}, &block)
raise WiceGridArgumentError.new('Missing block for the grid helper.' \
' For detached filters use first define_grid with the same API as grid, ' \
'then grid_filter to add filters, and then render_grid to actually show the grid') if block.nil?
define_grid(grid, opts, &block)
render_grid(grid)
end | ruby | {
"resource": ""
} |
q17673 | SmartListing.Helper.smart_listing_for | train | def smart_listing_for name, *args, &block
raise ArgumentError, "Missing block" unless block_given?
name = name.to_sym
options = args.extract_options!
bare = options.delete(:bare)
builder = Builder.new(name, @smart_listings[name], self, options, block)
output = ""
data = {}
data[smart_listing_config.data_attributes(:max_count)] = @smart_listings[name].max_count if @smart_listings[name].max_count && @smart_listings[name].max_count > 0
data[smart_listing_config.data_attributes(:item_count)] = @smart_listings[name].count
data[smart_listing_config.data_attributes(:href)] = @smart_listings[name].href if @smart_listings[name].href
data[smart_listing_config.data_attributes(:callback_href)] = @smart_listings[name].callback_href if @smart_listings[name].callback_href
data.merge!(options[:data]) if options[:data]
if bare
output = capture(builder, &block)
else
output = content_tag(:div, :class => smart_listing_config.classes(:main), :id => name, :data => data) do
concat(content_tag(:div, "", :class => smart_listing_config.classes(:loading)))
concat(content_tag(:div, :class => smart_listing_config.classes(:content)) do
concat(capture(builder, &block))
end)
end
end
output
end | ruby | {
"resource": ""
} |
q17674 | Coach.Handler.build_request_chain | train | def build_request_chain(sequence, context)
sequence.reverse.reduce(nil) do |successor, item|
item.build_middleware(context, successor)
end
end | ruby | {
"resource": ""
} |
q17675 | Coach.Handler.dedup_sequence | train | def dedup_sequence(sequence)
sequence.uniq { |item| [item.class, item.middleware, item.config] }
end | ruby | {
"resource": ""
} |
q17676 | Coach.Router.action_traits | train | def action_traits(list_of_actions)
*list_of_actions, traits = list_of_actions if list_of_actions.last.is_a?(Hash)
list_of_actions.reduce(traits || {}) do |memo, action|
trait = ACTION_TRAITS.fetch(action) do
raise Errors::RouterUnknownDefaultAction, action
end
memo.merge(action => trait)
end
end | ruby | {
"resource": ""
} |
q17677 | Coach.Middleware.provide | train | def provide(args)
args.each do |name, value|
unless self.class.provides?(name)
raise NameError, "#{self.class} does not provide #{name}"
end
@_context[name] = value
end
end | ruby | {
"resource": ""
} |
q17678 | Coach.Middleware.instrument | train | def instrument
proc do
publish_start
if ActiveSupport::Notifications.notifier.listening?("coach.middleware.finish")
instrument_deprecated { call }
else
ActiveSupport::Notifications.
instrument("finish_middleware.coach", middleware_event) { call }
end
end
end | ruby | {
"resource": ""
} |
q17679 | Coach.MiddlewareValidator.validated_provides! | train | def validated_provides!
if missing_requirements.any?
raise Coach::Errors::MiddlewareDependencyNotMet.new(
@middleware, @previous_middlewares, missing_requirements
)
end
@middleware.provided + provided_by_chain
end | ruby | {
"resource": ""
} |
q17680 | Coach.RequestBenchmark.stats | train | def stats
{
endpoint_name: @endpoint_name,
started_at: @start,
duration: format_ms(@duration),
chain: sorted_chain.map do |event|
{ name: event[:name], duration: format_ms(event[:duration]) }
end,
}
end | ruby | {
"resource": ""
} |
q17681 | Coach.Notifications.broadcast | train | def broadcast(event, benchmark)
serialized = RequestSerializer.new(event[:request]).serialize.
merge(benchmark.stats).
merge(event.slice(:response, :metadata))
if ActiveSupport::Notifications.notifier.listening?("coach.request")
ActiveSupport::Deprecation.warn("The 'coach.request' event has been renamed " \
"to 'request.coach' and the old name will be removed in a future version.")
ActiveSupport::Notifications.publish("coach.request", serialized)
end
ActiveSupport::Notifications.publish("request.coach", serialized)
end | ruby | {
"resource": ""
} |
q17682 | TwitterAds.LineItem.targeting_criteria | train | def targeting_criteria(id = nil, opts = {})
id ? TargetingCriteria.load(account, id, opts) : TargetingCriteria.all(account, @id, opts)
end | ruby | {
"resource": ""
} |
q17683 | TwitterAds.Cursor.each | train | def each(offset = 0)
return to_enum(:each, offset) unless block_given?
@collection[offset..-1].each { |element| yield(element) }
unless exhausted?
offset = [@collection.size, offset].max
fetch_next
each(offset, &Proc.new)
end
self
end | ruby | {
"resource": ""
} |
q17684 | TwitterAds.Metrics.filter | train | def filter(*line_items)
result = {}
params = { line_item_ids: line_items.join(','), with_deleted: true }
@account.line_items(nil, params).each do |line_item|
# filter by objective
objective = line_item.objective.downcase.to_sym
metrics = OBJECTIVES[objective].map { |family| METRIC_FAMILIES[family] }.flatten
# filter by placements
placements = line_item.placements.map { |p| p.downcase.to_sym }
filter_placements(metrics, placements)
# filter by product
product = line_item.product_type.downcase.to_sym
filter_product(metrics, product)
# filter LTV metrics unless account has a MACT partner setup
metrics.reject! { |m| m.include?('mobile_lifetime_value_') } unless @mact_enabled
result[line_item.id] = metrics
end
result
end | ruby | {
"resource": ""
} |
q17685 | TwitterAds.Account.features | train | def features
validate_loaded
resource = FEATURES % { id: @id }
response = Request.new(client, :get, resource).perform
response.body[:data]
end | ruby | {
"resource": ""
} |
q17686 | TwitterAds.Account.scoped_timeline | train | def scoped_timeline(ids, opts = {})
ids = ids.join(',') if ids.is_a?(Array)
params = { user_ids: ids }.merge!(opts)
resource = SCOPED_TIMELINE % { id: @id }
request = Request.new(client, :get, resource, params: params)
response = request.perform
response.body[:data]
end | ruby | {
"resource": ""
} |
q17687 | TwitterAds.Persistence.save | train | def save
if @id
resource = self.class::RESOURCE % { account_id: account.id, id: id }
response = Request.new(account.client, :put, resource, params: to_params).perform
else
resource = self.class::RESOURCE_COLLECTION % { account_id: account.id }
response = Request.new(account.client, :post, resource, params: to_params).perform
end
from_response(response.body[:data])
end | ruby | {
"resource": ""
} |
q17688 | TwitterAds.Persistence.delete! | train | def delete!
resource = self.class::RESOURCE % { account_id: account.id, id: id }
response = Request.new(account.client, :delete, resource).perform
from_response(response.body[:data])
end | ruby | {
"resource": ""
} |
q17689 | TwitterAds.TONUpload.perform | train | def perform
if @file_size < SINGLE_UPLOAD_MAX
resource = "#{DEFAULT_RESOURCE}#{@bucket}"
response = upload(resource, File.read(@file_path))
response.headers['location'][0]
else
response = init_chunked_upload
bytes_per_chunk_size = response.headers['x-ton-min-chunk-size'][0].to_i
location = response.headers['location'][0]
bytes_read = 0
chunk_bytes = bytes_per_chunk_size * DEFAULT_CHUNK_SIZE
File.open(@file_path) do |file|
while bytes = file.read(chunk_bytes)
bytes_start = bytes_read
bytes_read += bytes.size
upload_chunk(location, bytes, bytes_start, bytes_read) do |res|
# Determines the chunk bytes based on response times
response_time = res.headers['x-response-time'][0].to_f
response_based_chunk_size =
(DEFAULT_CHUNK_SIZE * (RESPONSE_TIME_MAX / response_time)).to_i
next_chunk_size = [DEFAULT_CHUNK_SIZE, [1, response_based_chunk_size].max].min
chunk_bytes = bytes_per_chunk_size * next_chunk_size
end
end
end
location.split('?')[0]
end
end | ruby | {
"resource": ""
} |
q17690 | TwitterAds.TONUpload.upload | train | def upload(resource, bytes)
headers = {
'x-ton-expires' => DEFAULT_EXPIRE,
'content-length' => @file_size,
'content-type' => content_type
}
TwitterAds::Request.new(
@client, :post, resource, domain: DEFAULT_DOMAIN, headers: headers, body: bytes).perform
end | ruby | {
"resource": ""
} |
q17691 | TwitterAds.TONUpload.init_chunked_upload | train | def init_chunked_upload
headers = {
'x-ton-content-type' => content_type,
'x-ton-content-length' => @file_size,
'x-ton-expires' => DEFAULT_EXPIRE,
'content-length' => 0,
'content-type' => content_type
}
resource = "#{DEFAULT_RESOURCE}#{@bucket}?resumable=true"
TwitterAds::Request.new(
@client, :post, resource, domain: DEFAULT_DOMAIN, headers: headers).perform
end | ruby | {
"resource": ""
} |
q17692 | TwitterAds.TONUpload.upload_chunk | train | def upload_chunk(resource, bytes, bytes_start, bytes_read)
headers = {
'content-type' => content_type,
'content-length' => bytes.size,
'content-range' => "bytes #{bytes_start}-#{bytes_read - 1}/#{@file_size}"
}
response = TwitterAds::Request.new(
@client, :put, resource, domain: DEFAULT_DOMAIN, headers: headers, body: bytes).perform
yield(response)
response
end | ruby | {
"resource": ""
} |
q17693 | TwitterAds.TailoredAudience.update | train | def update(file_path, list_type, operation = 'ADD')
upload = TwitterAds::TONUpload.new(account.client, file_path)
update_audience(self, upload.perform, list_type, operation)
reload!
end | ruby | {
"resource": ""
} |
q17694 | TwitterAds.TailoredAudience.status | train | def status
return nil unless id
resource = RESOURCE_UPDATE % { account_id: account.id }
request = Request.new(account.client, :get, resource, params: to_params)
Cursor.new(nil, request).to_a.select { |change| change[:tailored_audience_id] == id }
end | ruby | {
"resource": ""
} |
q17695 | TwitterAds.TailoredAudience.users | train | def users(params)
resource = RESOURCE_USERS % { account_id: account.id, id: id }
headers = { 'Content-Type' => 'application/json' }
response = TwitterAds::Request.new(account.client,
:post,
resource,
headers: headers,
body: params.to_json).perform
success_count = response.body[:data][:success_count]
total_count = response.body[:data][:total_count]
[success_count, total_count]
end | ruby | {
"resource": ""
} |
q17696 | TwitterAds.Client.accounts | train | def accounts(id = nil, opts = {})
id ? Account.load(self, id) : Account.all(self, opts)
end | ruby | {
"resource": ""
} |
q17697 | MetaSearch.Method.evaluate | train | def evaluate(relation, param)
if splat_param?
relation.send(name, *format_param(param))
else
relation.send(name, format_param(param))
end
end | ruby | {
"resource": ""
} |
q17698 | Webrat.Scope.fill_in | train | def fill_in(field_locator, options = {})
field = locate_field(field_locator, TextField, TextareaField, PasswordField)
field.raise_error_if_disabled
field.set(options[:with])
end | ruby | {
"resource": ""
} |
q17699 | Webrat.Matchers.assert_contain | train | def assert_contain(content)
hc = HasContent.new(content)
assert hc.matches?(current_dom), hc.failure_message
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.