_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q7500
|
Fbuser.Api::V1::UsersController.index_authorize
|
train
|
def index_authorize
if !::Authorization::Fbuser::V1::User.index?(current_user)
render :json => {errors: "User is not authorized for this action"}, status: :forbidden
end
end
|
ruby
|
{
"resource": ""
}
|
q7501
|
ThermalData.DataForANS_5_1_1979.thePi
|
train
|
def thePi
hash = HashWithThermalFission.new
hash.thermal_fission[:U235] = 0.98
hash.thermal_fission[:Pu239] = 0.01
hash.thermal_fission[:U238] = 0.01
hash.thermal_fission
end
|
ruby
|
{
"resource": ""
}
|
q7502
|
ThermalData.DataForANS_5_1_1979.theU235_alpha
|
train
|
def theU235_alpha
array = Array.new(23)
array[0] = 6.5057E-01
array[1] = 5.1264E-01
array[2] = 2.4384E-01
array[3] = 1.3850E-01
array[4] = 5.544E-02
array[5] = 2.2225E-02
array[6] = 3.3088E-03
array[7] = 9.3015E-04
array[8] = 8.0943E-04
array[9] = 1.9567E-04
array[10] = 3.2535E-05
array[11] = 7.5595E-06
array[12] = 2.5232E-06
array[13] = 4.9948E-07
array[14] = 1.8531E-07
array[15] = 2.6608E-08
array[16] = 2.2398E-09
array[17] = 8.1641E-12
array[18] = 8.7797E-11
array[19] = 2.5131E-14
array[20] = 3.2176E-16
array[21] = 4.5038E-17
array[22] = 7.4791E-17
array
end
|
ruby
|
{
"resource": ""
}
|
q7503
|
ThermalData.DataForANS_5_1_1979.theU235_lamda
|
train
|
def theU235_lamda
array = Array.new(23)
array[0] = 2.2138E+01
array[1] = 5.1587E-01
array[2] = 1.9594E-01
array[3] = 1.0314E-01
array[4] = 3.3656E-02
array[5] = 1.1681E-02
array[6] = 3.5870E-03
array[7] = 1.3930E-03
array[8] = 6.2630E-04
array[9] = 1.8906E-04
array[10] = 5.4988E-05
array[11] = 2.0958E-05
array[12] = 1.0010E-05
array[13] = 2.5438E-06
array[14] = 6.6361E-07
array[15] = 1.2290E-07
array[16] = 2.7213E-08
array[17] = 4.3714E-09
array[18] = 7.5780E-10
array[19] = 2.4786E-10
array[20] = 2.2384E-13
array[21] = 2.4600E-14
array[22] = 1.5699E-14
array
end
|
ruby
|
{
"resource": ""
}
|
q7504
|
ThermalData.DataForANS_5_1_1979.thePu239_alpha
|
train
|
def thePu239_alpha
array = Array.new(23)
array[0] = 2.083E-01
array[1] = 3.853E-01
array[2] = 2.213E-01
array[3] = 9.460E-02
array[4] = 3.531E-02
array[5] = 2.292E-02
array[6] = 3.946E-03
array[7] = 1.317E-03
array[8] = 7.052E-04
array[9] = 1.432E-04
array[10] = 1.765E-05
array[11] = 7.347E-06
array[12] = 1.747E-06
array[13] = 5.481E-07
array[14] = 1.671E-07
array[15] = 2.112E-08
array[16] = 2.996E-09
array[17] = 5.107E-11
array[18] = 5.730E-11
array[19] = 4.138E-14
array[20] = 1.088E-15
array[21] = 2.454E-17
array[22] = 7.557E-17
array
end
|
ruby
|
{
"resource": ""
}
|
q7505
|
ThermalData.DataForANS_5_1_1979.thePu239_lamda
|
train
|
def thePu239_lamda
array = Array.new(23)
array[0] = 1.002E+01
array[1] = 6.433E-01
array[2] = 2.186E-01
array[3] = 1.004E-01
array[4] = 3.728E-02
array[5] = 1.435E-02
array[6] = 4.549E-03
array[7] = 1.328E-03
array[8] = 5.356E-04
array[9] = 1.730E-04
array[10] = 4.881E-05
array[11] = 2.006E-05
array[12] = 8.319E-06
array[13] = 2.358E-06
array[14] = 6.450E-07
array[15] = 1.278E-07
array[16] = 2.466E-08
array[17] = 9.378E-09
array[18] = 7.450E-10
array[19] = 2.426E-10
array[20] = 2.210E-13
array[21] = 2.640E-14
array[22] = 1.380E-14
array
end
|
ruby
|
{
"resource": ""
}
|
q7506
|
ThermalData.DataForANS_5_1_1979.theU238_alpha
|
train
|
def theU238_alpha
array = Array.new(23)
array[0] = 1.2311E+0
array[1] = 1.1486E+0
array[2] = 7.0701E-01
array[3] = 2.5209E-01
array[4] = 7.187E-02
array[5] = 2.8291E-02
array[6] = 6.8382E-03
array[7] = 1.2322E-03
array[8] = 6.8409E-04
array[9] = 1.6975E-04
array[10] = 2.4182E-05
array[11] = 6.6356E-06
array[12] = 1.0075E-06
array[13] = 4.9894E-07
array[14] = 1.6352E-07
array[15] = 2.3355E-08
array[16] = 2.8094E-09
array[17] = 3.6236E-11
array[18] = 6.4577E-11
array[19] = 4.4963E-14
array[20] = 3.6654E-16
array[21] = 5.6293E-17
array[22] = 7.1602E-17
array
end
|
ruby
|
{
"resource": ""
}
|
q7507
|
ThermalData.DataForANS_5_1_1979.theU238_lamda
|
train
|
def theU238_lamda
array = Array.new(23)
array[0] = 3.2881E+0
array[1] = 9.3805E-01
array[2] = 3.7073E-01
array[3] = 1.1118E-01
array[4] = 3.6143E-02
array[5] = 1.3272E-02
array[6] = 5.0133E-03
array[7] = 1.3655E-03
array[8] = 5.5158E-04
array[9] = 1.7873E-04
array[10] = 4.9032E-05
array[11] = 1.7058E-05
array[12] = 7.0465E-06
array[13] = 2.3190E-06
array[14] = 6.4480E-07
array[15] = 1.2649E-07
array[16] = 2.5548E-08
array[17] = 8.4782E-09
array[18] = 7.5130E-10
array[19] = 2.4188E-10
array[20] = 2.2739E-13
array[21] = 9.0536E-14
array[22] = 5.6098E-15
array
end
|
ruby
|
{
"resource": ""
}
|
q7508
|
Rockit.Application.run
|
train
|
def run
rockit_file = CONFIG_FILES.select { |f| File.exists?(f) }.first
raise ArgumentError "No Rockitfile found (looking for: #{CONFIG_FILES.join(',')})" unless rockit_file
Dsl.new(self).instance_eval(File.read(rockit_file), rockit_file)
end
|
ruby
|
{
"resource": ""
}
|
q7509
|
Rockit.Application.if_string_digest_changed
|
train
|
def if_string_digest_changed(key, input, &block)
if_string_changed(key, Digest::SHA256.new.update(input.to_s).hexdigest.to_s, &block)
end
|
ruby
|
{
"resource": ""
}
|
q7510
|
Rockit.Application.if_file_changed
|
train
|
def if_file_changed(file, &block)
if_string_changed(file, Digest::SHA256.file(file).hexdigest.to_s, &block)
end
|
ruby
|
{
"resource": ""
}
|
q7511
|
Rockit.Application.if_string_changed
|
train
|
def if_string_changed(key, new_value, &block)
if new_value != @hash_store[key]
old_value = @hash_store[key]
@hash_store[key] = new_value
block.call(key, new_value, old_value) if block_given?
end
end
|
ruby
|
{
"resource": ""
}
|
q7512
|
Rockit.Application.system_exit_on_error
|
train
|
def system_exit_on_error(command, options={})
options = {'print_command' => true}.merge(string_keys(options))
output command if options['print_command'] || @debug
command_output = system_command(command)
output command_output if @debug
unless last_process.success?
result = options['on_failure'].call(command, options) if options['on_failure'].is_a?(Proc)
return true if result
output options['failure_message'] || command_output
return exit(last_process.exitstatus)
end
options['on_success'].call(command, options) if options['on_success'].is_a?(Proc)
true
end
|
ruby
|
{
"resource": ""
}
|
q7513
|
Bade.Renderer._find_file!
|
train
|
def _find_file!(name, reference_path)
sub_path = File.expand_path(name, File.dirname(reference_path))
if File.exist?(sub_path)
return if sub_path.end_with?('.rb') # handled in Generator
sub_path
else
bade_path = "#{sub_path}.bade"
rb_path = "#{sub_path}.rb"
bade_exist = File.exist?(bade_path)
rb_exist = File.exist?(rb_path)
relative = Pathname.new(reference_path).relative_path_from(Pathname.new(File.dirname(file_path))).to_s
if bade_exist && rb_exist
message = "Found both .bade and .rb files for `#{name}` in file #{relative}, "\
'change the import path so it references uniq file.'
raise LoadError.new(name, reference_path, message)
elsif bade_exist
return bade_path
elsif rb_exist
return # handled in Generator
else
message = "Can't find file matching name `#{name}` referenced from file #{relative}"
raise LoadError.new(name, reference_path, message)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7514
|
Myreplicator.ExportMetadata.equals
|
train
|
def equals object
if table == object.table && database == object.database
return true
end
return false
end
|
ruby
|
{
"resource": ""
}
|
q7515
|
Myreplicator.ExportMetadata.store!
|
train
|
def store!
Kernel.p "$$$$$$$$$$$$$$$$$$ @ssh CHECK $$$$$$$$$$$$$$$$$$"
cmd = "echo \"#{self.to_json.gsub("\"","\\\\\"")}\" > #{@filepath}.json"
puts cmd
result = @ssh.exec!(cmd)
puts result
end
|
ruby
|
{
"resource": ""
}
|
q7516
|
Spreadsheet.Row.formatted
|
train
|
def formatted
copy = dup
@formats.rcompact!
if copy.length < @formats.size
copy.concat Array.new(@formats.size - copy.length)
end
copy
end
|
ruby
|
{
"resource": ""
}
|
q7517
|
Dbox.Utils.relative_to_local_path
|
train
|
def relative_to_local_path(path)
if path && path.length > 0
case_insensitive_join(local_path, path)
else
case_insensitive_resolve(local_path)
end
end
|
ruby
|
{
"resource": ""
}
|
q7518
|
Dbox.Utils.relative_to_remote_path
|
train
|
def relative_to_remote_path(path)
if path && path.length > 0
File.join(remote_path, path)
else
remote_path
end
end
|
ruby
|
{
"resource": ""
}
|
q7519
|
Nake.AbstractTask.config
|
train
|
def config
@config ||= begin
Hash.new do |hash, key|
raise ConfigurationError, "Configuration key #{key} in task #{name} doesn't exist"
end.tap do |hash|
hash.define_singleton_method(:declare) do |*keys|
keys.each { |key| self[key] = nil unless self.has_key?(key) }
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7520
|
Apostle.Mail.deliver!
|
train
|
def deliver!
return true unless Apostle.deliver
unless template_id && template_id != ''
raise DeliveryError,
'No email template_id provided'
end
queue = Apostle::Queue.new
queue.add self
queue.deliver!
# Return true or false depending on successful delivery
if queue.results[:valid].include?(self)
return true
else
raise _exception
end
end
|
ruby
|
{
"resource": ""
}
|
q7521
|
RabbitHutch.Worker.start
|
train
|
def start
@exchange = @channel.topic(@exchange_name, :durable => true, :auto_delete => false, :internal => true)
@queue = @channel.queue(@queue_name, :durable => true, :auto_delete => false)
@queue.bind(@exchange, :routing_key => 'publish.#')
@queue.subscribe(&@consumer.method(:handle_message))
end
|
ruby
|
{
"resource": ""
}
|
q7522
|
CurrencyConvertible.Proxy.cached_rate
|
train
|
def cached_rate(original, target)
if defined?(Rails)
unless rate = Rails.cache.read("#{original}_#{target}_#{stringified_exchange_date}")
rate = (1.0 / Rails.cache.read("#{target}_#{original}_#{stringified_exchange_date}")) rescue nil
end
rate
end
end
|
ruby
|
{
"resource": ""
}
|
q7523
|
Ominous.Warning.pass_method_to_warning_closer
|
train
|
def pass_method_to_warning_closer(symbol, closer)
raise "A closer is needed to identify the warning_closer" unless closer.kind_of? Closer
warning_closer = warning_closers.where(:closer_id => closer.id).first
warning_closer.send(symbol) if warning_closer
end
|
ruby
|
{
"resource": ""
}
|
q7524
|
TreeRb.TreeNode.find
|
train
|
def find(content = nil, &block)
if content and block_given?
raise "TreeNode::find - passed content AND block"
end
if content
if content.class == Regexp
block = proc { |l| l.content =~ content }
else
block = proc { |l| l.content == content }
end
end
return self if block.call(self)
leaf = @leaves.find { |l| block.call(l) }
return leaf if leaf
@children.each do |child|
node = child.find &block
return node if node
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q7525
|
Rack::AcceptHeaders.Charset.matches
|
train
|
def matches(charset)
values.select {|v|
v == charset || v == '*'
}.sort {|a, b|
# "*" gets least precedence, any others should be equal.
a == '*' ? 1 : (b == '*' ? -1 : 0)
}
end
|
ruby
|
{
"resource": ""
}
|
q7526
|
NyplRepo.Client.get_capture_items
|
train
|
def get_capture_items(c_uuid)
url = "#{@server_url}/items/#{c_uuid}.json?per_page=500"
json = self.get_json(url)
captures = []
capture = json["nyplAPI"]["response"]["capture"]
captures << capture
totalPages = json["nyplAPI"]["request"]["totalPages"].to_i
if totalPages >= 2
puts "total pages " + totalPages.to_s if @debug
(2..totalPages).each do | page |
puts "page: "+page.to_s if @debug
newurl = url + "&page=#{page}"
json = self.get_json(newurl)
newcapture = json["nyplAPI"]["response"]["capture"]
captures << newcapture
end
end
captures.flatten!
captures
end
|
ruby
|
{
"resource": ""
}
|
q7527
|
NyplRepo.Client.get_mods_item
|
train
|
def get_mods_item(mods_uuid)
url = "#{@server_url}/items/mods/#{mods_uuid}.json"
json = self.get_json(url)
item = nil
if json["nyplAPI"]["response"]["mods"]
item = json["nyplAPI"]["response"]["mods"]
end
return item
end
|
ruby
|
{
"resource": ""
}
|
q7528
|
NyplRepo.Client.get_bibl_uuid
|
train
|
def get_bibl_uuid(image_id)
url = "#{@server_url}/items/local_image_id/#{image_id}.json"
json = self.get_json(url)
bibl_uuid = nil
if json["nyplAPI"]["response"]["numResults"].to_i > 0
bibl_uuid = json["nyplAPI"]["response"]["uuid"]
end
return bibl_uuid
end
|
ruby
|
{
"resource": ""
}
|
q7529
|
NyplRepo.Client.get_highreslink
|
train
|
def get_highreslink(bibl_uuid, image_id)
url = "#{@server_url}/items/#{bibl_uuid}.json?per_page=500"
json = self.get_json(url)
highreslink = nil
json["nyplAPI"]["response"]["capture"].each do | capture|
if capture["imageID"] == image_id
highreslink = capture["highResLink"]
break
end #if
end if json["nyplAPI"]["response"]["numResults"].to_i > 0
return highreslink
end
|
ruby
|
{
"resource": ""
}
|
q7530
|
Kalimba.AttributeAssignment.assign_attributes
|
train
|
def assign_attributes(new_attributes = {}, options = {})
return if new_attributes.blank?
attributes = new_attributes.stringify_keys
multi_parameter_attributes = []
nested_parameter_attributes = []
attributes.each do |k, v|
if k.include?("(")
multi_parameter_attributes << [ k, v ]
elsif respond_to?("#{k}=")
if v.is_a?(Hash)
nested_parameter_attributes << [ k, v ]
else
send("#{k}=", v)
end
else
raise UnknownAttributeError, "unknown attribute: #{k}"
end
end
# assign any deferred nested attributes after the base attributes have been set
nested_parameter_attributes.each do |k,v|
send("#{k}=", v)
end
assign_multiparameter_attributes(multi_parameter_attributes)
end
|
ruby
|
{
"resource": ""
}
|
q7531
|
Twinfield.Customer.find_by_code
|
train
|
def find_by_code(code)
Twinfield::Process.new(@session).
request(:process_xml_document, get_dimension_xml(@company, 'DEB', { code: code })).
body[:process_xml_document_response][:process_xml_document_result][:dimension]
end
|
ruby
|
{
"resource": ""
}
|
q7532
|
Twinfield.Customer.find_by_name
|
train
|
def find_by_name(name)
Twinfield::Finder.new(@session).
search(Twinfield::FinderSearch.new('DIM', name, 0, 1, 0, { office: @company, dimtype: 'DEB'} )).
body[:search_response][:data]
end
|
ruby
|
{
"resource": ""
}
|
q7533
|
Twinfield.Customer.get_dimension_xml
|
train
|
def get_dimension_xml(office, dimtype, opts = {})
xml = Builder::XmlMarkup.new
xml = xml.read do
xml.type('dimensions')
xml.office(office)
xml.dimtype(dimtype)
xml.code(opts.fetch(:code){})
end
end
|
ruby
|
{
"resource": ""
}
|
q7534
|
Emotions.Emotional.express!
|
train
|
def express!(emotion, emotive)
emotion = _emotions_about(emotive).where(emotion: emotion).first_or_initialize
begin
emotion.tap(&:save!)
rescue ActiveRecord::RecordInvalid => e
raise InvalidEmotion.new(e.record)
end
end
|
ruby
|
{
"resource": ""
}
|
q7535
|
Emotions.Emotional.no_longer_express!
|
train
|
def no_longer_express!(emotion, emotive)
_emotions_about(emotive).where(emotion: emotion).first.tap { |e| e.try(:destroy) }
end
|
ruby
|
{
"resource": ""
}
|
q7536
|
TreeRb.DirCatVisitor.add_entry
|
train
|
def add_entry(e)
@entries.push(e)
if @md5_to_entries.has_key?(e.md5)
@md5_to_entries[e.md5].push(e)
else
@md5_to_entries[e.md5] = [e]
end
end
|
ruby
|
{
"resource": ""
}
|
q7537
|
Stars.Favstar.parse_title
|
train
|
def parse_title(title)
strip = title.split(':').first
title = title.gsub(strip,'')
title = title[2..-1] if title[0..1] == ": "
title
end
|
ruby
|
{
"resource": ""
}
|
q7538
|
Bisques.Client.create_queue
|
train
|
def create_queue(name, attributes = {})
response = action("CreateQueue", {"QueueName" => Queue.sanitize_name("#{queue_prefix}#{name}")}.merge(attributes))
if response.success?
Queue.new(self, response.doc.xpath("//QueueUrl").text)
else
raise "Could not create queue #{name}"
end
rescue AwsActionError => error
if error.code == "AWS.SimpleQueueService.QueueDeletedRecently"
raise QueueDeletedRecentlyError, error.message
else
raise error
end
end
|
ruby
|
{
"resource": ""
}
|
q7539
|
Bisques.Client.get_queue
|
train
|
def get_queue(name, options = {})
response = action("GetQueueUrl", {"QueueName" => Queue.sanitize_name("#{queue_prefix}#{name}")}.merge(options))
if response.success?
Queue.new(self, response.doc.xpath("//QueueUrl").text)
end
rescue Bisques::AwsActionError => e
raise unless e.code == "AWS.SimpleQueueService.NonExistentQueue"
end
|
ruby
|
{
"resource": ""
}
|
q7540
|
Bisques.Client.list_queues
|
train
|
def list_queues(prefix = "")
response = action("ListQueues", "QueueNamePrefix" => "#{queue_prefix}#{prefix}")
response.doc.xpath("//ListQueuesResult/QueueUrl").map(&:text).map do |url|
Queue.new(self, url)
end
end
|
ruby
|
{
"resource": ""
}
|
q7541
|
Bisques.Client.send_message
|
train
|
def send_message(queue_url, message_body, delay_seconds=nil)
options = {"MessageBody" => message_body}
options["DelaySeconds"] = delay_seconds if delay_seconds
tries = 0
md5 = Digest::MD5.hexdigest(message_body)
begin
tries += 1
response = action("SendMessage", queue_url, options)
returned_md5 = response.doc.xpath("//MD5OfMessageBody").text
raise MessageHasWrongMd5Error.new(message_body, md5, returned_md5) unless md5 == returned_md5
rescue MessageHasWrongMd5Error
if tries < 2
retry
else
raise
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7542
|
UPnP.UPnP.externalIP
|
train
|
def externalIP()
joinThread()
external_ip = getCString()
r = MiniUPnP.UPNP_GetExternalIPAddress(@urls.controlURL,
@data.servicetype,external_ip)
if r != 0 then
raise UPnPException.new, "Error while retriving the external ip address. #{code2error(r)}."
end
return external_ip.rstrip()
end
|
ruby
|
{
"resource": ""
}
|
q7543
|
UPnP.UPnP.status
|
train
|
def status()
joinThread()
lastconnerror = getCString()
status = getCString()
uptime = 0
begin
uptime_uint = MiniUPnP.new_uintp()
r = MiniUPnP.UPNP_GetStatusInfo(@urls.controlURL,
@data.servicetype, status, uptime_uint,
lastconnerror)
if r != 0 then
raise UPnPException.new, "Error while retriving status info. #{code2error(r)}."
end
uptime = MiniUPnP.uintp_value(uptime_uint)
rescue
raise
ensure
MiniUPnP.delete_uintp(uptime_uint)
end
return status.rstrip,lastconnerror.rstrip,uptime
end
|
ruby
|
{
"resource": ""
}
|
q7544
|
UPnP.UPnP.connectionType
|
train
|
def connectionType()
joinThread()
type = getCString()
if MiniUPnP.UPNP_GetConnectionTypeInfo(@urls.controlURL,
@data.servicetype,type) != 0 then
raise UPnPException.new, "Error while retriving connection info."
end
type.rstrip
end
|
ruby
|
{
"resource": ""
}
|
q7545
|
UPnP.UPnP.totalBytesSent
|
train
|
def totalBytesSent()
joinThread()
v = MiniUPnP.UPNP_GetTotalBytesSent(@urls.controlURL_CIF,
@data.servicetype_CIF)
if v < 0 then
raise UPnPException.new, "Error while retriving total bytes sent."
end
return v
end
|
ruby
|
{
"resource": ""
}
|
q7546
|
UPnP.UPnP.totalBytesReceived
|
train
|
def totalBytesReceived()
joinThread()
v = MiniUPnP.UPNP_GetTotalBytesReceived(@urls.controlURL_CIF,
@data.servicetype_CIF)
if v < 0 then
raise UPnPException.new, "Error while retriving total bytes received."
end
return v
end
|
ruby
|
{
"resource": ""
}
|
q7547
|
UPnP.UPnP.totalPacketsSent
|
train
|
def totalPacketsSent()
joinThread()
v = MiniUPnP.UPNP_GetTotalPacketsSent(@urls.controlURL_CIF,
@data.servicetype_CIF);
if v < 0 then
raise UPnPException.new, "Error while retriving total packets sent."
end
return v
end
|
ruby
|
{
"resource": ""
}
|
q7548
|
UPnP.UPnP.portMappings
|
train
|
def portMappings()
joinThread()
i, r = 0, 0
mappings = Array.new
while r == 0
rhost = getCString()
enabled = getCString()
duration = getCString()
description = getCString()
nport = getCString()
lport = getCString()
duration = getCString()
client = getCString()
protocol = getCString()
r = MiniUPnP.UPNP_GetGenericPortMappingEntry(@urls.controlURL,
@data.servicetype,i.to_s,nport,client,lport,
protocol,description,enabled,rhost,duration)
if r != 0 then
break;
end
i = i+1
mappings << PortMapping.new(client.rstrip,lport.rstrip.to_i,
nport.rstrip.to_i,protocol.rstrip,
description.rstrip,enabled.rstrip,
rhost.rstrip,duration.rstrip)
end
return mappings
end
|
ruby
|
{
"resource": ""
}
|
q7549
|
UPnP.UPnP.portMapping
|
train
|
def portMapping(nport,proto)
checkProto(proto)
checkPort(nport)
if nport.to_i == 0 then
raise ArgumentError, "Port must be an int value and greater then 0."
end
joinThread()
client = getCString()
lport = getCString()
if MiniUPnP.UPNP_GetSpecificPortMappingEntry(@urls.controlURL,
@data.servicetype, nport.to_s,proto,
client,lport) != 0 then
raise UPnPException.new, "Error while retriving the port mapping."
end
return client.rstrip, lport.rstrip.to_i
end
|
ruby
|
{
"resource": ""
}
|
q7550
|
UPnP.UPnP.deletePortMapping
|
train
|
def deletePortMapping(nport,proto)
checkProto(proto)
checkPort(nport)
joinThread()
r = MiniUPnP.UPNP_DeletePortMapping(@urls.controlURL,@data.servicetype,
nport.to_s,proto)
if r != 0 then
raise UPnPException.new , "Failed delete mapping: #{code2error(r)}."
end
end
|
ruby
|
{
"resource": ""
}
|
q7551
|
UPnP.UPnP.checkProto
|
train
|
def checkProto(proto)
if proto != Protocol::UDP && proto != Protocol::TCP then
raise ArgumentError, "Unknown protocol #{proto}, only Protocol::TCP and Protocol::UDP are valid."
end
end
|
ruby
|
{
"resource": ""
}
|
q7552
|
Akasha.CommandRouter.register
|
train
|
def register(command, aggregate_class = nil, &block)
raise ArgumentError, 'Pass either aggregate class or block' if aggregate_class && block
handler = aggregate_class || block
@routes[command] = handler
end
|
ruby
|
{
"resource": ""
}
|
q7553
|
Akasha.CommandRouter.route!
|
train
|
def route!(command, aggregate_id, options = {}, **data)
handler = @routes[command]
case handler
when Class
transactor = options.fetch(:transactor, default_transactor)
transactor.call(handler, command, aggregate_id, options, **data)
when handler.respond_to?(:call)
handler.call(command, aggregate_id, options, **data)
when Proc
handler.call(command, aggregate_id, options, **data)
when nil
raise HandlerNotFoundError, "Handler for command #{command.inspect} not found"
else
raise UnsupportedHandlerError, "Unsupported command handler #{handler.inspect}"
end
end
|
ruby
|
{
"resource": ""
}
|
q7554
|
Ork::Model.ClassMethods.attribute
|
train
|
def attribute(name, options = {})
attributes << name unless attributes.include?(name)
defaults[name] = options[:default] if options.has_key?(:default)
if options.has_key?(:accessors)
to_define = Array(options[:accessors]) & accessor_options
else # Default methods
to_define = [:reader, :writer]
end
to_define.each{|m| send("#{m}_for", name) }
end
|
ruby
|
{
"resource": ""
}
|
q7555
|
Ork::Model.ClassMethods.index
|
train
|
def index(name)
indices[name] = Index.new(name) unless indices.include?(name)
end
|
ruby
|
{
"resource": ""
}
|
q7556
|
StatefulLink.ActionAnyOf.action_any_of?
|
train
|
def action_any_of?(*actions)
actions.any? do |sub_ca|
if sub_ca.present?
sub_controller, sub_action = extract_controller_and_action(sub_ca)
((self.controller_path == sub_controller) || (sub_controller.blank?)) && (self.action_name == sub_action || (sub_action == '' || sub_action == '*'))
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7557
|
StatefulLink.ActionAnyOf.extract_controller_and_action
|
train
|
def extract_controller_and_action(ca)
raise ArgumentError, "Pass the string" if ca.nil?
slash_pos = ca.rindex('#')
raise ArgumentError, "Invalid action: #{ca}" if slash_pos.nil?
controller = ca[0, slash_pos]
action = ca[slash_pos+1..-1] || ""
raise ArgumentError, "Invalid action or controller" if action.nil?
[controller, action]
end
|
ruby
|
{
"resource": ""
}
|
q7558
|
ValidationRage.FnordMetricNotifier.call
|
train
|
def call(event_name, payload)
return unless data_present?(payload)
# global validation error event
self.fnord.event({
:_type => event_name,
:payload => payload
})
# class level validation error event
self.fnord.event({
:_type => "validation_rage_error.#{payload.keys.first.to_s.downcase}",
:payload => payload.values.first.keys
})
# two events are enough for now
## attribute level validation error event
#payload.values.first.each do |attribute, error_messages|
# self.fnord.event({
# :_type => "validation_rage_error.#{payload.keys.first.to_s.downcase}.#{attribute}",
# :payload => error_messages
# })
#end
end
|
ruby
|
{
"resource": ""
}
|
q7559
|
Vidibus.Tempfile.make_tmpname
|
train
|
def make_tmpname(basename, n)
extension = File.extname(basename)
sprintf("%s,%d,%d%s", File.basename(basename, extension), $$, n.to_i, extension)
end
|
ruby
|
{
"resource": ""
}
|
q7560
|
LitmosClient.API.get
|
train
|
def get(path, params={})
dont_parse_response = params.delete(:dont_parse_response)
options = {
:content_type => :json,
:accept => :json,
:params => params.merge(:apikey => @api_key, :source => @source)
}
RestClient.get("#{@litmosURL}/#{path}", options) do |response, request, result|
case response.code
when 200, 201
# 200 Success. User/Course etc updated, deleted or retrieved
# 201 Success. User/Course etc created
if response.blank?
true
else
if dont_parse_response
response
else
parse_response(response)
end
end
when 404 # 404 Not Found. The User/Course etc that you requested does not exist
raise NotFound.new(response)
else
# 400 Bad Request. Check that your Uri and request body is well formed
# 403 Forbidden. Check your API key, HTTPS setting, Source parameter etc
# 409 Conflict. Often occurs when trying to create an item that already exists
raise ApiError.new(response)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7561
|
DCMetro.Information.station_time
|
train
|
def station_time(station)
# If a station has multiple stations codes we join the codes together
@station_code = station['Code']
if !station['StationTogether1'].empty?
@station_code += ",#{station['StationTogether1']}"
end
if !station['StationTogether2'].empty?
@station_code += ",#{station['StationTogether2']}"
end
# The call to the api is made and the prediction times are returned
@metro_time = RestClient.get "#{BASE_URL}/StationPrediction.svc/json/GetPrediction/#{@station_code}", :params => {
"api_key" => API_KEY,
"subscription-key" => API_KEY
}
@metro_time
end
|
ruby
|
{
"resource": ""
}
|
q7562
|
Campaigning.Template.update!
|
train
|
def update!(params)
response = @@soap.updateTemplate(
:apiKey => params[:apiKey] || CAMPAIGN_MONITOR_API_KEY,
:templateID => @templateID,
:templateName => params[:templateName],
:hTMLPageURL => params[:htmlPageURL],
:zipFileURL => params[:zipFileURL],
:screenshotURL => params[:screenshotURL]
)
handle_response response.template_UpdateResult
end
|
ruby
|
{
"resource": ""
}
|
q7563
|
Jellyfish.NormalizedParams.force_encoding
|
train
|
def force_encoding(data, encoding=Encoding.default_external)
return data if data.respond_to?(:rewind) # e.g. Tempfile, File, etc
if data.respond_to?(:force_encoding)
data.force_encoding(encoding).encode!
elsif data.respond_to?(:each_value)
data.each_value{ |v| force_encoding(v, encoding) }
elsif data.respond_to?(:each)
data.each{ |v| force_encoding(v, encoding) }
end
data
end
|
ruby
|
{
"resource": ""
}
|
q7564
|
Codeqa.Installer.install_codeqa_git_hook
|
train
|
def install_codeqa_git_hook
git_root = app_path.join('.git')
pre_commit_path = git_root.join 'hooks', 'pre-commit'
return false unless File.exist?(git_root)
return false if File.exist?(pre_commit_path)
# an alternative would be to backup the old hook
# FileUtils.mv(pre_commit_path,
# git_root.join('hooks', 'pre-commit.bkp'),
# :force => true)
pre_commit_path.make_symlink('../../.codeqa/git_hook.rb') # relative path!
true
end
|
ruby
|
{
"resource": ""
}
|
q7565
|
KnifePlugins.Ec2AmisUbuntu.list_amis
|
train
|
def list_amis(distro)
amis = Hash.new
Ubuntu.release(distro).amis.each do |ami|
amis[build_type(ami.region, ami.arch, ami.root_store, ami.virtualization_type)] = ami.name
end
amis
end
|
ruby
|
{
"resource": ""
}
|
q7566
|
TreeRb.TreeNodeVisitor.exit_node
|
train
|
def exit_node(tree_node)
parent = @stack.last
if @delegate
@delegate.exit_node(tree_node) if @delegate.respond_to? :exit_node
else
@on_exit_tree_node_blocks.each do |b|
if b.arity == 1
b.call(tree_node)
elsif b.arity == 2
b.call(tree_node, parent)
end
end
end
@stack.pop
end
|
ruby
|
{
"resource": ""
}
|
q7567
|
TreeRb.TreeNodeVisitor.visit_leaf
|
train
|
def visit_leaf(leaf_node)
parent = @stack.last
if @delegate
@delegate.visit_leaf(leaf_node) if @delegate.respond_to? :visit_leaf
else
@on_visit_leaf_node_blocks.each do |b|
if b.arity == 1
b.call(leaf_node)
elsif b.arity == 2
b.call(leaf_node, parent)
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7568
|
Rdoc2md.Document.to_md
|
train
|
def to_md
# Usually ruby is extremely readable, but I think "-1" means "give me all the
# trailing blank lines" is surprisingly opaque. That's what the -1 does...
lines = @text.split("\n", -1)
lines.collect do |line|
result = line
# Leave lines that start with 4 spaces alone. These are code blocks and
# should pass through unchanged.
unless result =~ /^\s{4,}/
# Convert headers
result.sub!(/^(=){1,6}/) { |s| "#" * s.length} unless result =~ /^={7,}/
# Convert strong to have two stars
# The matching pair of stars should start with a single star that is either at
# the beginning of the line or not following a backslash, have at least one
# non-star and non-backslash in between, then end in one star
result.gsub!(/(\A|[^\\\*])\*([^\*]*[^\*\\])\*/, '\1**\2**')
# Convert inline code spans to use backticks
result.gsub!(/(\A|[^\\])\+([^\+]+)\+/, '\1`\2`')
# Convert bare http:, mailto: and ftp: links
result.gsub!(/(\A|\s)(http:|https:|mailto:|ftp:)(\S*)/, '\1[\2\3](\2\3)')
# Convert bare www to an http: link
result.gsub!(/(\A|\s)www\.(\S*)/, '\1[www.\2](http://www.\2)')
# Convert link: links to refer to local files
result.gsub!(/(\A|\s)link:(\S*)/, '\1[\2](\2)')
# Convert multi word labels surrounded by {} with a url
result.gsub!(/\{([^\}]*)\}\[(\S*)\]/, '[\1](\2)')
# Convert one word labels with a url
result.gsub!(/(\A|\s)([^\{\s]\S*)\[(\S*)\]/, '\1[\2](\3)')
end
result
end.join("\n")
end
|
ruby
|
{
"resource": ""
}
|
q7569
|
Campaigning.Client.templates
|
train
|
def templates
response = @@soap.getClientTemplates(:apiKey => @apiKey, :clientID => @clientID)
templates = handle_response response.client_GetTemplatesResult
templates.collect {|template| Template.new(template.templateID, template.name, template.previewURL, template.screenshotURL, :apiKey=> @apiKey)}
end
|
ruby
|
{
"resource": ""
}
|
q7570
|
Campaigning.Client.lists
|
train
|
def lists
response = @@soap.getClientLists(:apiKey => @apiKey, :clientID => @clientID)
lists = handle_response response.client_GetListsResult
lists.collect {|list| List.new(list.listID, list.name, :apiKey=> @apiKey)}
end
|
ruby
|
{
"resource": ""
}
|
q7571
|
Campaigning.Client.campaigns
|
train
|
def campaigns
response = @@soap.getClientCampaigns(:apiKey => @apiKey, :clientID => @clientID )
campaign_list = handle_response response.client_GetCampaignsResult
campaign_list.collect do |campaign|
Campaign.new(campaign.campaignID, campaign.subject, campaign.name, campaign.sentDate, campaign.totalRecipients, :apiKey=> @apiKey)
end
end
|
ruby
|
{
"resource": ""
}
|
q7572
|
Campaigning.Client.update_access_and_billing!
|
train
|
def update_access_and_billing!(params)
response = @@soap.updateClientAccessAndBilling(
:apiKey => @apiKey,
:clientID => @clientID,
:accessLevel => params[:accessLevel],
:username => params.fetch(:username, ""),
:password => params.fetch(:password, ""),
:billingType => params.fetch(:billingType, ""),
:currency => params.fetch(:currency, ""),
:deliveryFee => params.fetch(:deliveryFee, ""),
:costPerRecipient => params.fetch(:costPerRecipient, ""),
:designAndSpamTestFee => params.fetch(:designAndSpamTestFee, "")
)
handle_response response.client_UpdateAccessAndBillingResult
end
|
ruby
|
{
"resource": ""
}
|
q7573
|
Campaigning.Client.update_basics!
|
train
|
def update_basics!(params)
response = @@soap.updateClientBasics(
:apiKey => @apiKey,
:clientID => @clientID,
:companyName => params[:companyName],
:contactName => params[:contactName],
:emailAddress => params[:emailAddress],
:country => params[:country],
:timezone => params[:timezone]
)
handle_response response.client_UpdateBasicsResult
end
|
ruby
|
{
"resource": ""
}
|
q7574
|
Akasha.Repository.save_aggregate
|
train
|
def save_aggregate(aggregate, concurrency: :none)
changeset = aggregate.changeset
events = changeset.events.map { |event| event.with_metadata(namespace: @namespace) }
revision = aggregate.revision if concurrency == :optimistic
stream(aggregate.class, changeset.aggregate_id).write_events(events, revision: revision)
notify_subscribers(changeset.aggregate_id, events)
end
|
ruby
|
{
"resource": ""
}
|
q7575
|
GcmHelper.Sender.update_status
|
train
|
def update_status(unsent_reg_ids, all_results, multicast_result)
results = multicast_result.results
raise RuntimeError, "Internal error: sizes do not match. currentResults: #{results}; unsentRegIds: #{unsent_reg_ids}" unless results.size==unsent_reg_ids.size
new_unsent_reg_ids = []
unsent_reg_ids.each_with_index {|reg_id, index|
result = results[index]
all_results[reg_id]= result
new_unsent_reg_ids << reg_id unless (result.error_code.nil? || result.error_code.eql?(ERROR_UNAVAILABLE))
}
new_unsent_reg_ids
end
|
ruby
|
{
"resource": ""
}
|
q7576
|
Selections.FormBuilderExtensions.selections
|
train
|
def selections(field, options = {}, html_options = {})
SelectionTag.new(self, object, field, options, html_options).select_tag
end
|
ruby
|
{
"resource": ""
}
|
q7577
|
Selections.FormBuilderExtensions.radios
|
train
|
def radios(field, options = {})
html_options = options.clone
html_options.delete_if {|key, value| key == :system_code}
SelectionTag.new(self, object, field, options, html_options).radio_tag
end
|
ruby
|
{
"resource": ""
}
|
q7578
|
Rattler::Util.Node.method_missing
|
train
|
def method_missing(symbol, *args)
(args.empty? and attrs.has_key?(symbol)) ? attrs[symbol] : super
end
|
ruby
|
{
"resource": ""
}
|
q7579
|
DirtyAssociations.ClassMethods.monitor_association_changes
|
train
|
def monitor_association_changes(association)
define_method "#{association}=" do |value|
attribute_will_change!(association.to_s) if _association_will_change?(association, value)
super(value)
end
ids = "#{association.to_s.singularize}_ids"
define_method "#{ids}=" do |value|
attribute_will_change!(association.to_s) if _ids_will_change?(ids, value)
super(value)
end
define_method "#{association}_attributes=" do |value|
attribute_will_change!(association.to_s) if _nested_attributes_will_change?(value)
super(value)
end
[association, ids].each do |name|
define_method "#{name}_change" do
changes[name]
end
define_method "#{name}_changed?" do
changes.has_key?(association.to_s)
end
define_method "#{name}_previously_changed?" do
previous_changes.has_key?(association.to_s)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7580
|
BorrowDirect.Encryption.encode_with_ts
|
train
|
def encode_with_ts(value)
# Not sure if this object is thread-safe, so we re-create
# each time.
public_key = OpenSSL::PKey::RSA.new(self.public_key_str)
payload = "#{value}|#{self.now_timestamp}"
return Base64.encode64(public_key.public_encrypt(payload))
end
|
ruby
|
{
"resource": ""
}
|
q7581
|
Weather.Actions.moon_phase
|
train
|
def moon_phase(location)
response = get('astronomy', location)
{
age: response['moon_phase']['ageOfMoon'].to_i,
illumination: response['moon_phase']['percentIlluminated'].to_i
}
end
|
ruby
|
{
"resource": ""
}
|
q7582
|
Weather.Actions.sun_info
|
train
|
def sun_info(location)
response = get('astronomy', location)
{
rise: {
hour: response['moon_phase']['sunrise']['hour'].to_i,
minute: response['moon_phase']['sunrise']['minute'].to_i
},
set: {
hour: response['moon_phase']['sunset']['hour'].to_i,
minute: response['moon_phase']['sunset']['minute'].to_i
}
}
end
|
ruby
|
{
"resource": ""
}
|
q7583
|
Weather.Actions.parse_simple_forecast
|
train
|
def parse_simple_forecast(response)
ret = {}
response['forecast']['txt_forecast']['forecastday'].each do |f|
ret[f['period']] = {
weekday_name: f['title'],
text: f['fcttext'],
text_metric: f['fcttext_metric'],
image_url: f['icon_url']
}
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q7584
|
Weather.Actions.parse_complex_forecast
|
train
|
def parse_complex_forecast(response)
ret = {}
response['forecast']['simpleforecast']['forecastday'].each do |f|
date = f['date']
ret[f['period'] - 1] = {
date: DateTime.new(date['year'], date['month'], date['day'], date['hour'], date['min'].to_i, date['sec'], date['tz_short']),
weekday_name: date['weekday'],
high_f: f['high']['fahrenheit'].to_i,
high_c: f['high']['celsius'].to_i,
low_f: f['low']['fahrenheit'].to_i,
low_c: f['low']['celsius'].to_i,
conditions: f['conditions'].to_i,
image_url: f['icon_url'],
snow: {
snow_total_in: f['snow_allday']['in'],
snow_total_cm: f['snow_allday']['cm'],
snow_night_in: f['snow_night']['in'],
snow_night_cm: f['snow_night']['cm'],
snow_day_in: f['snow_day']['in'],
snow_day_cm: f['snow_day']['cm']
},
quantative_precipitation: {
qpf_total_in: f['qpf_allday']['in'],
qpf_total_cm: f['qpf_allday']['cm'],
qpf_night_in: f['qpf_night']['in'],
qpf_night_cm: f['qpf_night']['cm'],
qpf_day_in: f['qpf_day']['in'],
qpf_day_cm: f['qpf_day']['cm']
},
wind: {
average_mph: f['avewind']['mph'],
average_kph: f['avewind']['kph'],
average_dir: f['avewind']['dir'],
average_temp: f['avewind']['degrees'],
max_mph: f['maxwind']['mph'],
max_kph: f['maxwind']['kph'],
max_dir: f['maxwind']['dir'],
max_temp: f['maxwind']['degrees']
}
}
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q7585
|
Revenant.Manager.reopen_io
|
train
|
def reopen_io(io, path, mode = nil)
begin
if mode
io.reopen(path, mode)
else
io.reopen(path)
end
io.binmode
rescue ::Exception
end
end
|
ruby
|
{
"resource": ""
}
|
q7586
|
Gxapi.ControllerMethods.gxapi_get_variant
|
train
|
def gxapi_get_variant(identifier, ivar_name = :variant)
# handle override
if params[ivar_name]
val = Gxapi::Ostruct.new(
value: {
index: -1,
experiment_id: nil,
name: params[ivar_name]
}
)
else
val = self.gxapi_base.get_variant(identifier)
end
return instance_variable_set("@#{ivar_name}", val)
end
|
ruby
|
{
"resource": ""
}
|
q7587
|
Rack.WebProfiler.process
|
train
|
def process(request, body, status, headers, exception = nil)
request.env[ENV_RUNTIME] = Time.now.to_f - request.env[ENV_RUNTIME_START]
request.env[ENV_EXCEPTION] = nil
if !exception.nil?
request.env[ENV_EXCEPTION] = exception
WebProfiler::Engine.process_exception(request).finish
else
WebProfiler::Engine.process(request, body, status, headers).finish
end
end
|
ruby
|
{
"resource": ""
}
|
q7588
|
Pirata.Search.search
|
train
|
def search(page = 0)
#build URL ex: http://thepiratebay.se/search/cats/0/99/0
url = Pirata.config[:base_url] + "/search/#{URI.escape(@query)}" + "/#{page.to_s}" + "/#{@sort_type}" + "/#{@category}"
html = Pirata::Search.parse_html(url)
Pirata::Search::parse_search_page(html, self)
end
|
ruby
|
{
"resource": ""
}
|
q7589
|
Pirata.Search.search_page
|
train
|
def search_page(page)
raise "Search must be multipage to search pages" if !multipage?
raise "Page must be a valid, positive integer" if page.class != Fixnum || page < 0
raise "Invalid page range" if page > @pages
self.search(page)
end
|
ruby
|
{
"resource": ""
}
|
q7590
|
Pirata.Search.parse_html
|
train
|
def parse_html(url)
response = open(url, :allow_redirections => Pirata.config[:redirect])
Nokogiri::HTML(response)
end
|
ruby
|
{
"resource": ""
}
|
q7591
|
ParcelApi.ShippingOptions.get_international
|
train
|
def get_international(parcel_params)
response = @connection.get INTERNATIONAL_URL, params: parcel_params
options = response.parsed.tap do |so|
so.delete('success')
so.delete('message_id')
end
RecursiveOpenStruct.new(options, recurse_over_arrays: true)
end
|
ruby
|
{
"resource": ""
}
|
q7592
|
TreeRb.DirTreeWalker.run
|
train
|
def run(dirname = nil, tree_node_visitor = nil, &block)
#
# args detection
#
if dirname and dirname.respond_to?(:enter_node)
tree_node_visitor = dirname
dirname = nil
end
#
# check dirname
#
if @dirname.nil? and dirname.nil?
raise 'missing starting directory'
end
@dirname = dirname if dirname
#
# check visitor
#
if tree_node_visitor and block
raise 'cannot use block and parameter together'
end
if tree_node_visitor
@visitor = tree_node_visitor
end
if block
@visitor = TreeNodeVisitor.new(&block)
end
unless @visitor
raise 'missing visitor'
end
#
# finally starts to process
#
process_directory(File.expand_path(@dirname))
@visitor
end
|
ruby
|
{
"resource": ""
}
|
q7593
|
TreeRb.DirTreeWalker.process_directory
|
train
|
def process_directory(dirname, level=1)
begin
entries = Dir.entries(dirname).sort
rescue Errno::EACCES => e
$stderr.puts e
@visitor.cannot_enter_node(dirname, e)
return
rescue Errno::EPERM => e
$stderr.puts e
@visitor.cannot_enter_node(dirname, e)
return
rescue Errno::ENOENT => e
$stderr.puts e
@visitor.cannot_enter_node(dirname, e)
return
end
@visitor.enter_node(dirname)
entries.each do |basename|
begin
next if basename == '.' or basename == '..' # ignore always "." and ".."
pathname = File.join(dirname, basename)
if File.directory?(pathname)
if not ignore_dir?(basename) and (@max_level.nil? or @max_level > level)
process_directory(pathname, level+1)
end
else
if !!@visit_file && match?(basename) && !ignore_file?(basename)
@visitor.visit_leaf(pathname)
end
end
rescue Errno::EACCES => e
$stderr.puts e
rescue Errno::EPERM => e
$stderr.puts e
rescue Errno::ENOENT => e
$stderr.puts e
end
end
@visitor.exit_node(dirname)
end
|
ruby
|
{
"resource": ""
}
|
q7594
|
Cxxproject.PluginContext.cxx_plugin
|
train
|
def cxx_plugin(&blk)
if blk.arity != @expected_arity
return
end
case blk.arity
when 0
blk.call()
when 3
blk.call(@cxxproject2rake, @building_blocks, @log)
end
end
|
ruby
|
{
"resource": ""
}
|
q7595
|
Podbay.Consul.node_healthy?
|
train
|
def node_healthy?(hostname, services = [], iterations = 60)
print "Waiting for #{hostname} to become healthy"
services = services.reject { |s| get_service_check(s).empty? }.freeze
iterations.times do
print '.'
checks = hostname_health_checks(hostname)
has_services = (services - checks.map { |c| c['ServiceName'] }).empty?
passing_checks = checks.all? { |c| c['Status'] == 'passing' }
if !checks.empty? && has_services && passing_checks
unless services.empty?
print " Services: #{services.join(', ').inspect} Healthy and".green
end
puts ' Node Healthy!'.green
return true
end
sleep(6)
end
false
end
|
ruby
|
{
"resource": ""
}
|
q7596
|
Podbay.Consul.available_services
|
train
|
def available_services(index = nil)
loop do
begin
resp, nindex = _service.get_all(index: index)
return [resp.keys - ['consul'], nindex] if nindex != index
rescue Diplomat::Timeout
# Continue waiting
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7597
|
Podbay.Consul.service_addresses
|
train
|
def service_addresses(service, index = nil)
loop do
addresses, nindex = service_addresses!(service, index)
return [addresses, nindex] if addresses && nindex
end
end
|
ruby
|
{
"resource": ""
}
|
q7598
|
Podbay.Consul.service_addresses!
|
train
|
def service_addresses!(service, index = nil)
meta = {}
resp = _service.get(service, :all, {index: index, wait: '2s'}, meta)
if (nindex = meta[:index]) != index
addresses = resp.map do |address|
{
id: address.ServiceID,
ip: address.ServiceAddress,
node: address.Node,
port: address.ServicePort
}
end
[addresses, nindex]
end
end
|
ruby
|
{
"resource": ""
}
|
q7599
|
Rack::AcceptHeaders.MediaType.matches
|
train
|
def matches(media_type)
type, subtype, params = parse_media_type(media_type)
values.select {|v|
if v == media_type || v == '*/*'
true
else
t, s, p = parse_media_type(v)
t == type && (s == '*' || s == subtype) && (p == '' || params_match?(params, p))
end
}.sort_by {|v|
# Most specific gets precedence.
v.length
}.reverse
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.