_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q7300
|
PMP.CollectionDocument.request
|
train
|
def request(method, url, body=nil) # :nodoc:
unless ['/', ''].include?(URI::parse(url).path)
setup_oauth_token
end
begin
raw = connection(current_options.merge({url: url})).send(method) do |req|
if [:post, :put].include?(method.to_sym) && !body.blank?
req.body = PMP::CollectionDocument.to_persist_json(body)
end
end
rescue Faraday::Error::ResourceNotFound => not_found_ex
if (method.to_sym == :get)
raw = OpenStruct.new(body: nil, status: 404)
else
raise not_found_ex
end
end
# may not need this, but remember how we made this response
PMP::Response.new(raw, {method: method, url: url, body: body})
end
|
ruby
|
{
"resource": ""
}
|
q7301
|
Mongoid.Liker.like
|
train
|
def like(likeable)
if !self.disliked?(likeable) and @@likeable_model_names.include? likeable.class.to_s.downcase
likeable.likers << self
self.save
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q7302
|
Mongoid.Liker.unlike
|
train
|
def unlike(likeable)
if @@likeable_model_names.include? likeable.class.to_s.downcase
likeable.likers.delete self
self.save
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q7303
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.birth
|
train
|
def birth
birth = births.find{|b|!b.selected.nil?}
birth ||= births[0]
birth
end
|
ruby
|
{
"resource": ""
}
|
q7304
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.death
|
train
|
def death
death = deaths.find{|b|!b.selected.nil?}
death ||= deaths[0]
death
end
|
ruby
|
{
"resource": ""
}
|
q7305
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.select_mother_summary
|
train
|
def select_mother_summary(person_id)
add_parents!
couple = parents[0] || ParentsReference.new
couple.select_parent(person_id,'Female')
parents[0] = couple
end
|
ruby
|
{
"resource": ""
}
|
q7306
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.select_father_summary
|
train
|
def select_father_summary(person_id)
add_parents!
couple = parents[0] || ParentsReference.new
couple.select_parent(person_id,'Male')
parents[0] = couple
end
|
ruby
|
{
"resource": ""
}
|
q7307
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.select_spouse_summary
|
train
|
def select_spouse_summary(person_id)
add_families!
family = FamilyReference.new
family.select_spouse(person_id)
families << family
end
|
ruby
|
{
"resource": ""
}
|
q7308
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.add_sealing_to_parents
|
train
|
def add_sealing_to_parents(options)
raise ArgumentError, ":mother option is required" if options[:mother].nil?
raise ArgumentError, ":father option is required" if options[:father].nil?
add_assertions!
options[:type] = OrdinanceType::Sealing_to_Parents
assertions.add_ordinance(options)
end
|
ruby
|
{
"resource": ""
}
|
q7309
|
Org::Familysearch::Ws::Familytree::V2::Schema.Person.select_relationship_ordinances
|
train
|
def select_relationship_ordinances(options)
raise ArgumentError, ":id required" if options[:id].nil?
if self.relationships
spouse_relationship = self.relationships.spouses.find{|s|s.id == options[:id]}
if spouse_relationship && spouse_relationship.assertions && spouse_relationship.assertions.ordinances
spouse_relationship.assertions.ordinances
else
[]
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7310
|
EM.File.read
|
train
|
def read(length = nil, filter = nil, &block)
buffer = ""
pos = 0
# Arguments
if length.kind_of? Proc
filter = length
end
worker = Proc::new do
# Sets length for read
if not length.nil?
rlen = length - buffer.length
if rlen > @rw_len
rlen = @rw_len
end
else
rlen = @rw_len
end
# Reads
begin
chunk = @native.read(rlen)
if not filter.nil?
chunk = filter.call(chunk)
end
buffer << chunk
rescue Errno::EBADF
if @native.kind_of? ::File
self.reopen!
@native.seek(pos)
redo
else
raise
end
end
pos = @native.pos
# Returns or continues work
if @native.eof? or (buffer.length == length)
if not block.nil?
yield buffer # returns result
end
else
EM::next_tick { worker.call() } # continues work
end
end
worker.call()
end
|
ruby
|
{
"resource": ""
}
|
q7311
|
EM.File.write
|
train
|
def write(data, filter = nil, &block)
pos = 0
if data.kind_of? IO
io = data
else
io = StringIO::new(data)
end
worker = Proc::new do
# Writes
begin
chunk = io.read(@rw_len)
if not filter.nil?
chunk = filter.call(chunk)
end
@native.write(chunk)
rescue Errno::EBADF
if @native.kind_of? File
self.reopen!
@native.seek(pos)
redo
else
raise
end
end
pos = @native.pos
# Returns or continues work
if io.eof?
if not block.nil?
yield pos # returns result
end
else
EM::next_tick { worker.call() } # continues work
end
end
worker.call()
end
|
ruby
|
{
"resource": ""
}
|
q7312
|
SimpleMock.Tracer.verify
|
train
|
def verify
differences = expected_calls.keys - actual_calls.keys
if differences.any?
raise MockExpectationError, "expected #{differences.first.inspect}"
else
true
end
end
|
ruby
|
{
"resource": ""
}
|
q7313
|
BBLib.TaskTimer.time
|
train
|
def time(task = :default, type = :current)
return nil unless tasks.keys.include?(task)
numbers = tasks[task][:history].map { |v| v[:time] }
case type
when :current
return nil unless tasks[task][:current]
Time.now.to_f - tasks[task][:current]
when :min, :max, :first, :last
numbers.send(type)
when :avg
numbers.size.zero? ? nil : numbers.inject { |sum, n| sum + n }.to_f / numbers.size
when :sum
numbers.inject { |sum, n| sum + n }
when :all
numbers
when :count
numbers.size
end
end
|
ruby
|
{
"resource": ""
}
|
q7314
|
BBLib.TaskTimer.clear
|
train
|
def clear(task = :default)
return nil unless tasks.keys.include?(task)
stop task
tasks[task][:history].clear
end
|
ruby
|
{
"resource": ""
}
|
q7315
|
BBLib.TaskTimer.start
|
train
|
def start(task = :default)
tasks[task] = { history: [], current: nil } unless tasks.keys.include?(task)
stop task if tasks[task][:current]
tasks[task][:current] = Time.now.to_f
0
end
|
ruby
|
{
"resource": ""
}
|
q7316
|
BBLib.TaskTimer.stop
|
train
|
def stop(task = :default)
return nil unless tasks.keys.include?(task) && active?(task)
time_taken = Time.now.to_f - tasks[task][:current].to_f
tasks[task][:history] << { start: tasks[task][:current], stop: Time.now.to_f, time: time_taken }
tasks[task][:current] = nil
if retention && tasks[task][:history].size > retention then tasks[task][:history].shift end
time_taken
end
|
ruby
|
{
"resource": ""
}
|
q7317
|
RailsSetup.Environment.create_file
|
train
|
def create_file(file, name, requires_edit=false)
FileUtils.cp(file + '.example', file)
if requires_edit
puts "Update #{file} and run `bundle exec rake setup` to continue".color(:red)
system(ENV['EDITOR'], file) unless ENV['EDITOR'].blank?
exit
end
end
|
ruby
|
{
"resource": ""
}
|
q7318
|
RailsSetup.Environment.find_or_create_file
|
train
|
def find_or_create_file(file, name, requires_edit=false)
if File.exists?(file)
file
else
create_file(file, name, requires_edit)
end
end
|
ruby
|
{
"resource": ""
}
|
q7319
|
AvoDeploy.Config.inherit_strategy
|
train
|
def inherit_strategy(strategy)
AvoDeploy::Deployment.instance.log.debug "Loading deployment strategy #{strategy.to_s}..."
strategy_file_path = File.dirname(__FILE__) + "/strategy/#{strategy.to_s}.rb"
if File.exist?(strategy_file_path)
require strategy_file_path
else
raise RuntimeError, "The requested strategy '#{strategy.to_s}' does not exist"
end
end
|
ruby
|
{
"resource": ""
}
|
q7320
|
AvoDeploy.Config.task
|
train
|
def task(name, options = {}, &block)
AvoDeploy::Deployment.instance.log.debug "registering task #{name}..."
AvoDeploy::Deployment.instance.task_manager.add_task(name, options, &block)
end
|
ruby
|
{
"resource": ""
}
|
q7321
|
AvoDeploy.Config.setup_stage
|
train
|
def setup_stage(name, options = {}, &block)
stages[name] = ''
if options.has_key?(:desc)
stages[name] = options[:desc]
end
if name.to_s == get(:stage).to_s
@loaded_stage = name
instance_eval(&block)
end
end
|
ruby
|
{
"resource": ""
}
|
q7322
|
Aims.Vectorize.dot
|
train
|
def dot(a, b)
unless a.size == b.size
raise "Vectors must be the same length"
end
# Make element-by-element array of pairs
(a.to_a).zip(b.to_a).inject(0) {|tot, pair| tot = tot + pair[0]*pair[1]}
end
|
ruby
|
{
"resource": ""
}
|
q7323
|
Aims.Vectorize.cross
|
train
|
def cross(b,c)
unless b.size == 3 and c.size == 3
raise "Vectors must be of length 3"
end
Vector[b[1]*c[2] - b[2]*c[1], b[2]*c[0] - b[0]*c[2], b[0]*c[1] - b[1]*c[0]]
end
|
ruby
|
{
"resource": ""
}
|
q7324
|
Poster.Encoding.xml_encode
|
train
|
def xml_encode string
puts string.each_char.size
string.each_char.map do |p|
case
when CONVERTIBLES[p]
CONVERTIBLES[p]
when XML_ENTITIES[p]
"&##{XML_ENTITIES[p]};"
else
p
end
end.reduce(:+)
end
|
ruby
|
{
"resource": ""
}
|
q7325
|
ActionCommand.Result.debug
|
train
|
def debug(msg = nil)
if @logger
msg = build_log(msg || yield, ActionCommand::LOG_KIND_DEBUG)
@logger.info(format_log(msg))
end
end
|
ruby
|
{
"resource": ""
}
|
q7326
|
ActionCommand.Result.info
|
train
|
def info(msg = nil)
if @logger
msg = build_log(msg || yield, ActionCommand::LOG_KIND_INFO)
@logger.info(format_log(msg))
end
end
|
ruby
|
{
"resource": ""
}
|
q7327
|
ActionCommand.Result.error
|
train
|
def error(msg)
if @logger
msg = build_log(msg, ActionCommand::LOG_KIND_ERROR)
@logger.error(format_log(msg))
end
end
|
ruby
|
{
"resource": ""
}
|
q7328
|
ActionCommand.Result.push
|
train
|
def push(key, cmd)
return unless key
old_cur = current
if old_cur.key?(key)
@values << old_cur[key]
else
@values << {}
old_cur[key] = @values.last
end
@stack << { key: key, cmd: cmd } if @logger
end
|
ruby
|
{
"resource": ""
}
|
q7329
|
ActionCommand.Result.log_input
|
train
|
def log_input(params)
return unless @logger
output = params.reject { |k, _v| internal_key?(k) }
log_info_hash(output, ActionCommand::LOG_KIND_COMMAND_INPUT)
end
|
ruby
|
{
"resource": ""
}
|
q7330
|
ActionCommand.Result.log_output
|
train
|
def log_output
return unless @logger
# only log the first level parameters, subcommands will log
# their own output.
output = current.reject { |k, v| v.is_a?(Hash) || internal_key?(k) }
log_info_hash(output, ActionCommand::LOG_KIND_COMMAND_OUTPUT)
end
|
ruby
|
{
"resource": ""
}
|
q7331
|
Triglav::Agent.Status.merge!
|
train
|
def merge!(*args)
val = args.pop
keys = args.flatten
StorageFile.merge!(path, [*@parents, *keys], val)
end
|
ruby
|
{
"resource": ""
}
|
q7332
|
Mycroft.Helpers.parse_message
|
train
|
def parse_message(msg)
msg = msg.to_s
re = /([A-Z_]+) ({.*})$/
msg_split = re.match(msg)
if msg_split.nil?
re = /^([A-Z_]+)$/
msg_split = re.match(msg)
raise "Error: Malformed Message" if not msg_split
type = msg_split[1]
data = {}
else
type = msg_split[1]
data = JSON.parse(msg_split[2])
end
{type: type, data: data}
end
|
ruby
|
{
"resource": ""
}
|
q7333
|
Mycroft.Helpers.send_message
|
train
|
def send_message(type, message=nil)
message = message.nil? ? message = '' : message.to_json
body = type + ' ' + message
body.strip!
length = body.bytesize
@client.write("#{length}\n#{body}")
end
|
ruby
|
{
"resource": ""
}
|
q7334
|
Triglav::Agent.ApiClient.list_aggregated_resources
|
train
|
def list_aggregated_resources(uri_prefix)
$logger.debug { "ApiClient#list_aggregated_resources(#{uri_prefix.inspect})" }
resources_api = TriglavClient::ResourcesApi.new(@api_client)
handle_error { resources_api.list_aggregated_resources(uri_prefix) }
end
|
ruby
|
{
"resource": ""
}
|
q7335
|
ExactTargetSDK.APIObject.render_properties!
|
train
|
def render_properties!(xml)
self.class.properties.each do |property, options|
next unless instance_variable_get("@_set_#{property}")
property_value = self.send(property)
render_property!(property, property_value, xml, options)
end
end
|
ruby
|
{
"resource": ""
}
|
q7336
|
IRCSupport.Parser.compose
|
train
|
def compose(line)
raise ArgumentError, "You must specify a command" if !line.command
raw_line = ''
raw_line << ":#{line.prefix} " if line.prefix
raw_line << line.command
if line.args
line.args.each_with_index do |arg, idx|
raw_line << ' '
if idx != line.args.count-1 and arg.match(@@space)
raise ArgumentError, "Only the last argument may contain spaces"
end
if idx == line.args.count-1
raw_line << ':' if arg.match(@@space)
end
raw_line << arg
end
end
return raw_line
end
|
ruby
|
{
"resource": ""
}
|
q7337
|
IRCSupport.Parser.parse
|
train
|
def parse(raw_line)
line = decompose(raw_line)
if line.command =~ /^(PRIVMSG|NOTICE)$/ && line.args[1] =~ /\x01/
return handle_ctcp_message(line)
end
msg_class = case
when line.command =~ /^\d{3}$/
begin
constantize("IRCSupport::Message::Numeric#{line.command}")
rescue
constantize("IRCSupport::Message::Numeric")
end
when line.command == "MODE"
if @isupport['CHANTYPES'].include?(line.args[0][0])
constantize("IRCSupport::Message::ChannelModeChange")
else
constantize("IRCSupport::Message::UserModeChange")
end
when line.command == "NOTICE" && (!line.prefix || line.prefix !~ /!/)
constantize("IRCSupport::Message::ServerNotice")
when line.command == "CAP" && %w{LS LIST ACK}.include?(line.args[0])
constantize("IRCSupport::Message::CAP::#{line.args[0]}")
else
begin
constantize("IRCSupport::Message::#{line.command.capitalize}")
rescue
constantize("IRCSupport::Message")
end
end
message = msg_class.new(line, @isupport, @capabilities)
case message.type
when :'005'
@isupport.merge! message.isupport
when :cap_ack
message.capabilities.each do |capability, options|
if options.include?(:disable)
@capabilities = @capabilities - [capability]
elsif options.include?(:enable)
@capabilities = @capabilities + [capability]
end
end
end
return message
end
|
ruby
|
{
"resource": ""
}
|
q7338
|
Sumac.Handshake.send_initialization_message
|
train
|
def send_initialization_message
entry_properties = @connection.objects.convert_object_to_properties(@connection.local_entry)
message = Messages::Initialization.build(entry: entry_properties)
@connection.messenger.send(message)
end
|
ruby
|
{
"resource": ""
}
|
q7339
|
Sumac.Handshake.process_initialization_message
|
train
|
def process_initialization_message(message)
entry = @connection.objects.convert_properties_to_object(message.entry)
@connection.remote_entry.set(entry)
end
|
ruby
|
{
"resource": ""
}
|
q7340
|
StripeWebhooks.Callback.run_once
|
train
|
def run_once(event)
unless StripeWebhooks::PerformedCallback.exists?(stripe_event_id: event.id, label: label)
run(event)
StripeWebhooks::PerformedCallback.create(stripe_event_id: event.id, label: label)
end
end
|
ruby
|
{
"resource": ""
}
|
q7341
|
Jinx.Migrator.migrate
|
train
|
def migrate(&block)
unless block_given? then
return migrate { |tgt, row| tgt }
end
# If there is an extract, then wrap the migration in an extract
# writer block.
if @extract then
if String === @extract then
logger.debug { "Opening migration extract #{@extract}..." }
FileUtils::mkdir_p(File.dirname(@extract))
if @extract_hdrs then
logger.debug { "Migration extract headers: #{@extract_hdrs.join(', ')}." }
CsvIO.open(@extract, :mode => 'w', :headers => @extract_hdrs) do |io|
@extract = io
return migrate(&block)
end
else
File.open(@extract, 'w') do |io|
@extract = io
return migrate(&block)
end
end
end
# Copy the extract into a local variable and clear the extract i.v.
# prior to a recursive call with an extract writer block.
io, @extract = @extract, nil
return migrate do |tgt, row|
res = yield(tgt, row)
tgt.extract(io)
res
end
end
begin
migrate_rows(&block)
ensure
@rejects.close if @rejects
remove_migration_methods
end
end
|
ruby
|
{
"resource": ""
}
|
q7342
|
Jinx.Migrator.remove_migration_methods
|
train
|
def remove_migration_methods
# remove the migrate_<attribute> methods
@mgt_mths.each do | klass, hash|
hash.each_value do |sym|
while klass.method_defined?(sym)
klass.instance_method(sym).owner.module_eval { remove_method(sym) }
end
end
end
# remove the migrate method
@creatable_classes.each do |klass|
while (k = klass.instance_method(:migrate).owner) < Migratable
k.module_eval { remove_method(:migrate) }
end
end
# remove the target extract method
remove_extract_method(@target) if @extract
end
|
ruby
|
{
"resource": ""
}
|
q7343
|
Jinx.Migrator.load_shims
|
train
|
def load_shims(files)
logger.debug { "Loading the migration shims with load path #{$:.pp_s}..." }
files.enumerate do |file|
load file
logger.info { "The migrator loaded the shim file #{file}." }
end
end
|
ruby
|
{
"resource": ""
}
|
q7344
|
Jinx.Migrator.migrate_rows
|
train
|
def migrate_rows
# open an CSV output for rejects if the bad option is set
if @bad_file then
@rejects = open_rejects(@bad_file)
logger.info("Unmigrated records will be written to #{File.expand_path(@bad_file)}.")
end
@rec_cnt = mgt_cnt = 0
logger.info { "Migrating #{@input}..." }
puts "Migrating #{@input}..." if @verbose
@reader.each do |row|
# the one-based current record number
rec_no = @rec_cnt + 1
# skip if the row precedes the from option
if rec_no == @from and @rec_cnt > 0 then
logger.info("Skipped the initial #{@rec_cnt} records.")
elsif rec_no == @to then
logger.info("Ending the migration after processing record #{@rec_cnt}.")
return
elsif rec_no < @from then
@rec_cnt += 1
next
end
begin
# migrate the row
logger.debug { "Migrating record #{rec_no}..." }
tgt = migrate_row(row)
# call the block on the migrated target
if tgt then
logger.debug { "The migrator built #{tgt} with the following content:\n#{tgt.dump}" }
yield(tgt, row)
end
rescue Exception => e
logger.error("Migration error on record #{rec_no} - #{e.message}:\n#{e.backtrace.pp_s}")
# If there is a reject file, then don't propagate the error.
raise unless @rejects
# try to clear the migration state
clear(tgt) rescue nil
# clear the target
tgt = nil
end
if tgt then
# replace the log message below with the commented alternative to detect a memory leak
logger.info { "Migrated record #{rec_no}." }
#memory_usage = `ps -o rss= -p #{Process.pid}`.to_f / 1024 # in megabytes
#logger.debug { "Migrated rec #{@rec_cnt}; memory usage: #{sprintf("%.1f", memory_usage)} MB." }
mgt_cnt += 1
if @verbose then print_progress(mgt_cnt) end
# clear the migration state
clear(tgt)
elsif @rejects then
# If there is a rejects file then warn, write the reject and continue.
logger.warn("Migration not performed on record #{rec_no}.")
@rejects << row
@rejects.flush
logger.debug("Invalid record #{rec_no} was written to the rejects file #{@bad_file}.")
else
raise MigrationError.new("Migration not performed on record #{rec_no}")
end
# Bump the record count.
@rec_cnt += 1
end
logger.info("Migrated #{mgt_cnt} of #{@rec_cnt} records.")
if @verbose then
puts
puts "Migrated #{mgt_cnt} of #{@rec_cnt} records."
end
end
|
ruby
|
{
"resource": ""
}
|
q7345
|
Jinx.Migrator.open_rejects
|
train
|
def open_rejects(file)
# Make the parent directory.
FileUtils.mkdir_p(File.dirname(file))
# Open the file.
FasterCSV.open(file, 'w', :headers => true, :header_converters => :symbol, :write_headers => true)
end
|
ruby
|
{
"resource": ""
}
|
q7346
|
Jinx.Migrator.migrate_row
|
train
|
def migrate_row(row)
# create an instance for each creatable class
created = Set.new
# the migrated objects
migrated = @creatable_classes.map { |klass| create_instance(klass, row, created) }
# migrate each object from the input row
migrated.each do |obj|
# First uniquify the object if necessary.
if @unique and Unique === obj then
logger.debug { "The migrator is making #{obj} unique..." }
obj.uniquify
end
obj.migrate(row, migrated)
end
# the valid migrated objects
@migrated = migrate_valid_references(row, migrated)
# the candidate target objects
tgts = @migrated.select { |obj| @target_class === obj }
if tgts.size > 1 then
raise MigrationError.new("Ambiguous #{@target_class} targets #{tgts.to_series}")
end
target = tgts.first || return
logger.debug { "Migrated target #{target}." }
target
end
|
ruby
|
{
"resource": ""
}
|
q7347
|
Jinx.Migrator.migrate_valid_references
|
train
|
def migrate_valid_references(row, migrated)
# Split the valid and invalid objects. The iteration is in reverse dependency order,
# since invalidating a dependent can invalidate the owner.
ordered = migrated.transitive_closure(:dependents)
ordered.keep_if { |obj| migrated.include?(obj) }.reverse!
valid, invalid = ordered.partition do |obj|
if migration_valid?(obj) then
obj.migrate_references(row, migrated, @target_class, @attr_flt_hash[obj.class])
true
else
obj.class.owner_attributes.each { |pa| obj.clear_attribute(pa) }
false
end
end
# Go back through the valid objects in dependency order to invalidate dependents
# whose owner is invalid.
valid.reverse.each do |obj|
unless owner_valid?(obj, valid, invalid) then
invalid << valid.delete(obj)
logger.debug { "The migrator invalidated #{obj} since it does not have a valid owner." }
end
end
# Go back through the valid objects in reverse dependency order to invalidate owners
# created only to hold a dependent which was subsequently invalidated.
valid.reject do |obj|
if @owners.include?(obj.class) and obj.dependents.all? { |dep| invalid.include?(dep) } then
# clear all references from the invalidated owner
obj.class.domain_attributes.each { |pa| obj.clear_attribute(pa) }
invalid << obj
logger.debug { "The migrator invalidated #{obj.qp} since it was created solely to hold subsequently invalidated dependents." }
true
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7348
|
Jinx.Migrator.create_instance
|
train
|
def create_instance(klass, row, created)
# the new object
logger.debug { "The migrator is building #{klass.qp}..." }
created << obj = klass.new
migrate_properties(obj, row, created)
add_defaults(obj, row, created)
logger.debug { "The migrator built #{obj}." }
obj
end
|
ruby
|
{
"resource": ""
}
|
q7349
|
Jinx.Migrator.migrate_properties
|
train
|
def migrate_properties(obj, row, created)
# for each input header which maps to a migratable target attribute metadata path,
# set the target attribute, creating intermediate objects as needed.
@cls_paths_hash[obj.class].each do |path|
header = @header_map[path][obj.class]
# the input value
value = row[header]
value.strip! if String === value
next if value.nil?
# fill the reference path
ref = fill_path(obj, path[0...-1], row, created)
# set the attribute
migrate_property(ref, path.last, value, row)
end
end
|
ruby
|
{
"resource": ""
}
|
q7350
|
Jinx.Migrator.fill_path
|
train
|
def fill_path(obj, path, row, created)
# create the intermediate objects as needed (or return obj if path is empty)
path.inject(obj) do |parent, prop|
# the referenced object
parent.send(prop.reader) or create_reference(parent, prop, row, created)
end
end
|
ruby
|
{
"resource": ""
}
|
q7351
|
Jinx.Migrator.create_reference
|
train
|
def create_reference(obj, property, row, created)
if property.type.abstract? then
raise MigrationError.new("Cannot create #{obj.qp} #{property} with abstract type #{property.type}")
end
ref = property.type.new
ref.migrate(row, Array::EMPTY_ARRAY)
obj.send(property.writer, ref)
created << ref
logger.debug { "The migrator created #{obj.qp} #{property} #{ref}." }
ref
end
|
ruby
|
{
"resource": ""
}
|
q7352
|
Jinx.Migrator.load_defaults_files
|
train
|
def load_defaults_files(files)
# collect the class => path => value entries from each defaults file
hash = LazyHash.new { Hash.new }
files.enumerate { |file| load_defaults_file(file, hash) }
hash
end
|
ruby
|
{
"resource": ""
}
|
q7353
|
Jinx.Migrator.load_defaults_file
|
train
|
def load_defaults_file(file, hash)
begin
config = YAML::load_file(file)
rescue
raise MigrationError.new("Could not read defaults file #{file}: " + $!)
end
# collect the class => path => value entries
config.each do |path_s, value|
next if value.nil_or_empty?
klass, path = create_attribute_path(path_s)
hash[klass][path] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q7354
|
Jinx.Migrator.load_filter_files
|
train
|
def load_filter_files(files)
# collect the class => path => value entries from each defaults file
hash = {}
files.enumerate { |file| load_filter_file(file, hash) }
logger.debug { "The migrator loaded the filters #{hash.qp}." }
hash
end
|
ruby
|
{
"resource": ""
}
|
q7355
|
Jinx.Migrator.load_filter_file
|
train
|
def load_filter_file(file, hash)
# collect the class => attribute => filter entries
logger.debug { "Loading the migration filter configuration #{file}..." }
begin
config = YAML::load_file(file)
rescue
raise MigrationError.new("Could not read filter file #{file}: " + $!)
end
config.each do |path_s, flt|
next if flt.nil_or_empty?
klass, path = create_attribute_path(path_s)
if path.empty? then
raise MigrationError.new("Migration filter configuration path does not include a property: #{path_s}")
elsif path.size > 1 then
raise MigrationError.new("Migration filter configuration path with more than one property is not supported: #{path_s}")
end
pa = klass.standard_attribute(path.first.to_sym)
flt_hash = hash[klass] ||= {}
flt_hash[pa] = flt
end
end
|
ruby
|
{
"resource": ""
}
|
q7356
|
Cha.API.create_room
|
train
|
def create_room(name, members_admin_ids, params = {})
members_admin_ids = array_to_string(members_admin_ids)
if value = params[:members_member_ids]
params[:members_member_ids] = array_to_string(value)
end
if value = params[:members_readonly_ids]
params[:members_readonly_ids] = array_to_string(value)
end
params = params.merge(name: name, members_admin_ids: members_admin_ids)
post('rooms', params)
end
|
ruby
|
{
"resource": ""
}
|
q7357
|
Cha.API.update_room_members
|
train
|
def update_room_members(room_id, members_admin_ids, params = {})
members_admin_ids = array_to_string(members_admin_ids)
if value = params[:members_member_ids]
params[:members_member_ids] = array_to_string(value)
end
if value = params[:members_readonly_ids]
params[:members_readonly_ids] = array_to_string(value)
end
params = params.merge(members_admin_ids: members_admin_ids)
put("rooms/#{room_id}/members", params)
end
|
ruby
|
{
"resource": ""
}
|
q7358
|
Cha.API.create_room_task
|
train
|
def create_room_task(room_id, body, to_ids, params = {})
to_ids = array_to_string(to_ids)
if value = params[:limit]
params[:limit] = time_to_integer(value)
end
post("rooms/#{room_id}/tasks", params.merge(body: body, to_ids: to_ids))
end
|
ruby
|
{
"resource": ""
}
|
q7359
|
Cha.API.room_file
|
train
|
def room_file(room_id, file_id, params = {})
unless (value = params[:create_download_url]).nil?
params[:create_download_url] = boolean_to_integer(value)
end
get("rooms/#{room_id}/files/#{file_id}", params)
end
|
ruby
|
{
"resource": ""
}
|
q7360
|
UkWorkingDays.Easter.easter
|
train
|
def easter(year)
golden_number = (year % 19) + 1
if year <= 1752 then
# Julian calendar
dominical_number = (year + (year / 4) + 5) % 7
paschal_full_moon = (3 - (11 * golden_number) - 7) % 30
else
# Gregorian calendar
dominical_number = (year + (year / 4) - (year / 100) + (year / 400)) % 7
solar_correction = (year - 1600) / 100 - (year - 1600) / 400
lunar_correction = (((year - 1400) / 100) * 8) / 25
paschal_full_moon = (3 - 11 * golden_number + solar_correction - lunar_correction) % 30
end
dominical_number += 7 until dominical_number > 0
paschal_full_moon += 30 until paschal_full_moon > 0
paschal_full_moon -= 1 if paschal_full_moon == 29 or (paschal_full_moon == 28 and golden_number > 11)
difference = (4 - paschal_full_moon - dominical_number) % 7
difference += 7 if difference < 0
day_easter = paschal_full_moon + difference + 1
day_easter < 11 ? new(year, 3, day_easter + 21) : new(year, 4, day_easter - 10)
end
|
ruby
|
{
"resource": ""
}
|
q7361
|
Konfig.Evaluator.names_by_value
|
train
|
def names_by_value(a)
a = @data[a] if a.is_a?(String) || a.is_a?(Symbol)
Hash[ a.map { |i| i.reverse } ]
end
|
ruby
|
{
"resource": ""
}
|
q7362
|
Halffare.Stats.read
|
train
|
def read(filename, months=nil)
@orders = []
start = months ? (Date.today << months.to_i).strftime('%Y-%m-%d') : nil
file = File.open(filename, "r:UTF-8") do |f|
while line = f.gets
order = Halffare::Model::Order.new(line)
if (start.nil? || line[0,10] >= start) && (order.note != Fetch::ORDER_NOTE_FILE_CREATED)
@orders.push(order)
end
end
end
log_info "read #{@orders.length} orders from #{filename}"
if @orders.length == 0
if start.nil?
log_notice "no orders found"
else
log_notice "no orders found after #{start}, maybe tweak the --months param"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7363
|
Halffare.Stats.calculate
|
train
|
def calculate(strategy, halffare)
@halfprice = 0
@fullprice = 0
if halffare
log_info "assuming order prices as half-fare"
else
log_info "assuming order prices as full"
end
log_notice "please note that you are using a strategy that involves guessing the real price" if ['guess', 'sbbguess'].include? strategy
strategy = price_factory(strategy)
strategy.halffare = halffare
log_info "using price strategy: #{strategy.class}"
price = Price.new(strategy)
log_info "calculating prices..."
@date_min = false
@date_max = false
@orders.each do |order|
if Halffare.debug
log_order(order)
end
halfprice, fullprice = price.get(order)
if Halffare.debug
if halfprice != 0 && fullprice != 0
log_result "FOUND: #{order.description} (#{order.price}): half=#{currency(halfprice)}, full=#{currency(fullprice)}"
if halffare
log_emphasize "You would pay (full price): #{currency(fullprice)}, you save #{currency(fullprice - order.price)}"
else
log_emphasize "You would pay (half-fare): #{currency(halfprice)}, you pay #{currency(order.price - halfprice)} more"
end
end
end
@halfprice += halfprice
@fullprice += fullprice
@date_min = order.travel_date if !@date_min || order.travel_date < @date_min
@date_max = order.travel_date if !@date_max || order.travel_date > @date_max
end
end
|
ruby
|
{
"resource": ""
}
|
q7364
|
Shells.ShellBase.buffer_output
|
train
|
def buffer_output(&block) #:doc:
raise Shells::NotRunning unless running?
block ||= Proc.new { }
stdout_received do |data|
self.last_output = Time.now
append_stdout strip_ansi_escape(data), &block
end
stderr_received do |data|
self.last_output = Time.now
append_stderr strip_ansi_escape(data), &block
end
end
|
ruby
|
{
"resource": ""
}
|
q7365
|
Shells.ShellBase.push_buffer
|
train
|
def push_buffer
raise Shells::NotRunning unless running?
# push the buffer so we can get the output of a command.
debug 'Pushing buffer >>'
sync do
output_stack.push [ stdout, stderr, output ]
self.stdout = ''
self.stderr = ''
self.output = ''
end
end
|
ruby
|
{
"resource": ""
}
|
q7366
|
Shells.ShellBase.pop_merge_buffer
|
train
|
def pop_merge_buffer
raise Shells::NotRunning unless running?
# almost a standard pop, however we want to merge history with current.
debug 'Merging buffer <<'
sync do
hist_stdout, hist_stderr, hist_output = (output_stack.pop || [])
if hist_stdout
self.stdout = hist_stdout + stdout
end
if hist_stderr
self.stderr = hist_stderr + stderr
end
if hist_output
self.output = hist_output + output
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7367
|
Shells.ShellBase.pop_discard_buffer
|
train
|
def pop_discard_buffer
raise Shells::NotRunning unless running?
# a standard pop discarding current data and retrieving the history.
debug 'Discarding buffer <<'
sync do
hist_stdout, hist_stderr, hist_output = (output_stack.pop || [])
self.stdout = hist_stdout || ''
self.stderr = hist_stderr || ''
self.output = hist_output || ''
end
end
|
ruby
|
{
"resource": ""
}
|
q7368
|
Yummi.BlockHandler.block_call
|
train
|
def block_call (context, &block)
args = []
block.parameters.each do |parameter|
args << context[parameter[1]]
end
block.call(*args)
end
|
ruby
|
{
"resource": ""
}
|
q7369
|
Yummi.GroupedComponent.call
|
train
|
def call (*args)
result = nil
@components.each do |component|
break if result and not @call_all
result = component.send @message, *args
end
result
end
|
ruby
|
{
"resource": ""
}
|
q7370
|
CoinOp::Bit.Input.script_sig=
|
train
|
def script_sig=(blob)
# This is only a setter because of the initial choice to do things
# eagerly. Can become an attr_accessor when we move to lazy eval.
script = Script.new(:blob => blob)
@script_sig = script.to_s
@native.script_sig = blob
end
|
ruby
|
{
"resource": ""
}
|
q7371
|
Bade.Parser.append_node
|
train
|
def append_node(type, indent: @indents.length, add: false, value: nil)
# add necessary stack items to match required indent
@stacks << @stacks.last.dup while indent >= @stacks.length
parent = @stacks[indent].last
node = AST::NodeRegistrator.create(type, @lineno)
parent.children << node
node.value = value unless value.nil?
@stacks[indent] << node if add
node
end
|
ruby
|
{
"resource": ""
}
|
q7372
|
HotTub.KnownClients.clean_client
|
train
|
def clean_client(clnt)
if @clean_client
begin
perform_action(clnt,@clean_client)
rescue => e
HotTub.logger.error "[HotTub] There was an error cleaning one of your #{self.class.name} clients: #{e}" if HotTub.logger
end
end
clnt
end
|
ruby
|
{
"resource": ""
}
|
q7373
|
HotTub.KnownClients.close_client
|
train
|
def close_client(clnt)
@close_client = (known_client_action(clnt,:close) || false) if @close_client.nil?
if @close_client
begin
perform_action(clnt,@close_client)
rescue => e
HotTub.logger.error "[HotTub] There was an error closing one of your #{self.class.name} clients: #{e}" if HotTub.logger
end
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q7374
|
HotTub.KnownClients.reap_client?
|
train
|
def reap_client?(clnt)
rc = false
if @reap_client
begin
rc = perform_action(clnt,@reap_client)
rescue => e
HotTub.logger.error "[HotTub] There was an error reaping one of your #{self.class.name} clients: #{e}" if HotTub.logger
end
end
rc
end
|
ruby
|
{
"resource": ""
}
|
q7375
|
Bebox.CommandsHelper.get_environment
|
train
|
def get_environment(options)
environment = options[:environment]
# Ask for environment of node if flag environment not set
environment ||= choose_option(Environment.list(project_root), _('cli.choose_environment'))
# Check environment existence
Bebox::Environment.environment_exists?(project_root, environment) ? (return environment) : exit_now!(error(_('cli.not_exist_environment')%{environment: environment}))
end
|
ruby
|
{
"resource": ""
}
|
q7376
|
Bebox.CommandsHelper.default_environment
|
train
|
def default_environment
environments = Bebox::Environment.list(project_root)
if environments.count > 0
return environments.include?('vagrant') ? 'vagrant' : environments.first
else
return ''
end
end
|
ruby
|
{
"resource": ""
}
|
q7377
|
Formatafacil.ArtigoTarefa.converte_configuracao_para_latex
|
train
|
def converte_configuracao_para_latex
@artigo_latex.merge!(@artigo)
['resumo','abstract','bibliografia'].each {|key|
Open3.popen3("pandoc --smart -f markdown -t latex --no-wrap") {|stdin, stdout, stderr, wait_thr|
pid = wait_thr.pid # pid of the started process.
stdin.write @artigo[key]
stdin.close
@artigo_latex[key] = stdout.read
}
}
end
|
ruby
|
{
"resource": ""
}
|
q7378
|
Formatafacil.ArtigoTarefa.salva_configuracao_yaml_para_inclusao_em_pandoc
|
train
|
def salva_configuracao_yaml_para_inclusao_em_pandoc
File.open(@arquivo_saida_yaml, 'w'){ |file|
file.write("\n")
file.write @artigo_latex.to_yaml
file.write("---\n")
}
end
|
ruby
|
{
"resource": ""
}
|
q7379
|
Highwatermark.HighWaterMark.last_records
|
train
|
def last_records(tag=@state_tag)
if @state_type == 'file'
return @data['last_records'][tag]
elsif @state_type =='memory'
return @data['last_records'][tag]
elsif @state_type =='redis'
begin
alertStart=@redis.get(tag)
return alertStart
rescue Exception => e
puts e.message
puts e.backtrace.inspect
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7380
|
Montage.QueryParser.get_query_operator
|
train
|
def get_query_operator(part)
operator = Montage::Operators.find_operator(part)
[operator.operator, operator.montage_operator]
end
|
ruby
|
{
"resource": ""
}
|
q7381
|
Montage.QueryParser.parse_part
|
train
|
def parse_part(part)
parsed_part = JSON.parse(part) rescue part
if is_i?(parsed_part)
parsed_part.to_i
elsif is_f?(parsed_part)
parsed_part.to_f
elsif parsed_part =~ /\(.*\)/
to_array(parsed_part)
elsif parsed_part.is_a?(Array)
parsed_part
else
parsed_part.gsub(/('|')/, "")
end
end
|
ruby
|
{
"resource": ""
}
|
q7382
|
Montage.QueryParser.get_parts
|
train
|
def get_parts(str)
operator, montage_operator = get_query_operator(str)
fail QueryError, "Invalid Montage query operator!" unless montage_operator
column_name = get_column_name(str, operator)
fail QueryError, "Your query has an undetermined error" unless column_name
value = parse_part(parse_condition_set(str, operator))
[column_name, montage_operator, value]
end
|
ruby
|
{
"resource": ""
}
|
q7383
|
Montage.QueryParser.parse_hash
|
train
|
def parse_hash
query.map do |key, value|
new_value = value.is_a?(Array) ? ["$in", value] : value
[key.to_s, new_value]
end
end
|
ruby
|
{
"resource": ""
}
|
q7384
|
Montage.QueryParser.parse_string
|
train
|
def parse_string
query.split(/\band\b(?=(?:[^']|'[^']*')*$)/i).map do |part|
column_name, operator, value = get_parts(part)
if operator == ""
["#{column_name}", value]
else
["#{column_name}", ["#{operator}", value]]
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7385
|
Montage.QueryParser.to_array
|
train
|
def to_array(value)
values = value.gsub(/('|\(|\))/, "").split(',')
type = [:is_i?, :is_f?].find(Proc.new { :is_s? }) { |t| send(t, values.first) }
values.map { |v| v.send(TYPE_MAP[type]) }
end
|
ruby
|
{
"resource": ""
}
|
q7386
|
Evvnt.NestedResources.belongs_to
|
train
|
def belongs_to(parent_resource)
parent_resource = parent_resource.to_sym
parent_resources << parent_resource unless parent_resource.in?(parent_resources)
end
|
ruby
|
{
"resource": ""
}
|
q7387
|
KineticRuby.Client.receive
|
train
|
def receive(max_len=nil)
max_len ||= 1024
begin
data = @socket.recv(max_len)
rescue IO::WaitReadable
@logger.logv 'Retrying receive...'
IO.select([@socket])
retry
rescue Exception => e
if e.class != 'IOError' && e.message != 'closed stream'
@logger.log_exception(e, 'EXCEPTION during receive!')
end
end
if (data.nil? || data.empty?)
@logger.log "Client #{@socket.inspect} disconnected!"
data = ''
else
@logger.log "Received #{data.length} bytes"
end
return data
end
|
ruby
|
{
"resource": ""
}
|
q7388
|
Incline.IpAddressValidator.validate_each
|
train
|
def validate_each(record, attribute, value)
begin
unless value.blank?
IPAddr.new(value)
if options[:no_mask]
if value =~ /\//
record.errors[attribute] << (options[:message] || 'must not contain a mask')
end
elsif options[:require_mask]
unless value =~ /\//
record.errors[attribute] << (options[:message] || 'must contain a mask')
end
end
end
rescue IPAddr::InvalidAddressError
record.errors[attribute] << (options[:message] || 'is not a valid IP address')
end
end
|
ruby
|
{
"resource": ""
}
|
q7389
|
ActiveResource.Base.load_attributes_from_response
|
train
|
def load_attributes_from_response(response)
if (response['Transfer-Encoding'] == 'chunked' || (!response['Content-Length'].blank? && response['Content-Length'] != "0")) && !response.body.nil? && response.body.strip.size > 0
load( self.class.format.decode(response.body)[self.class.element_name] )
#fix double nested items .. active resource SUCKS soooo bad
if self.respond_to?(:items)
new_items = []
self.items.each { |item| new_items << item.attributes.first[1] }
self.items = new_items
end
@persisted = true
end
end
|
ruby
|
{
"resource": ""
}
|
q7390
|
EncryptedStore.Config.method_missing
|
train
|
def method_missing(meth, *args, &block)
meth_str = meth.to_s
if /^(\w+)\=$/.match(meth_str)
_set($1, *args, &block)
elsif args.length > 0 || block_given?
_add(meth, *args, &block)
elsif /^(\w+)\?$/.match(meth_str)
!!_get($1)
else
_get_or_create_namespace(meth)
end
end
|
ruby
|
{
"resource": ""
}
|
q7391
|
NRSER.AttrError.default_message
|
train
|
def default_message
message = []
if value? && name?
message << format_message( value.class, "object", value.inspect,
"has invalid ##{ name } attribute" )
end
if expected?
message << format_message( "expected", expected )
end
if actual?
message << format_message( "found", actual )
end
if message.empty?
super
else
message.join ', '
end
end
|
ruby
|
{
"resource": ""
}
|
q7392
|
AwsSnsManager.Client.message
|
train
|
def message(text, options = {}, env = :prod, type = :normal)
if type == :normal
data = normal_notification(text, options)
elsif type == :silent
data = silent_notification(text, options)
elsif type == :nosound
data = nosound_notification(text, options)
end
return dev_json(data) if env == :dev
prod_json(data)
end
|
ruby
|
{
"resource": ""
}
|
q7393
|
RightAMQP.BrokerClient.subscribe
|
train
|
def subscribe(queue, exchange = nil, options = {}, &block)
raise ArgumentError, "Must call this method with a block" unless block
return false unless usable?
return true unless @queues.select { |q| q.name == queue[:name] }.empty?
to_exchange = if exchange
if options[:exchange2]
" to exchanges #{exchange[:name]} and #{options[:exchange2][:name]}"
else
" to exchange #{exchange[:name]}"
end
end
queue_options = queue[:options] || {}
exchange_options = (exchange && exchange[:options]) || {}
begin
logger.info("[setup] Subscribing queue #{queue[:name]}#{to_exchange} on broker #{@alias}")
q = @channel.queue(queue[:name], queue_options)
@queues << q
if exchange
x = @channel.__send__(exchange[:type], exchange[:name], exchange_options)
binding = q.bind(x, options[:key] ? {:key => options[:key]} : {})
if (exchange2 = options[:exchange2])
q.bind(@channel.__send__(exchange2[:type], exchange2[:name], exchange2[:options] || {}))
end
q = binding
end
q.subscribe(options[:ack] ? {:ack => true} : {}) do |header, message|
begin
if (pool = (options[:fiber_pool] || @options[:fiber_pool]))
pool.spawn { receive(queue[:name], header, message, options, &block) }
else
receive(queue[:name], header, message, options, &block)
end
rescue StandardError => e
header.ack if options[:ack]
logger.exception("Failed setting up to receive message from queue #{queue.inspect} " +
"on broker #{@alias}", e, :trace)
@exception_stats.track("receive", e)
update_non_delivery_stats("receive failure", e)
end
end
rescue StandardError => e
logger.exception("Failed subscribing queue #{queue.inspect}#{to_exchange} on broker #{@alias}", e, :trace)
@exception_stats.track("subscribe", e)
false
end
end
|
ruby
|
{
"resource": ""
}
|
q7394
|
RightAMQP.BrokerClient.unsubscribe
|
train
|
def unsubscribe(queue_names, &block)
unless failed?
@queues.reject! do |q|
if queue_names.include?(q.name)
begin
logger.info("[stop] Unsubscribing queue #{q.name} on broker #{@alias}")
q.unsubscribe { block.call if block }
rescue StandardError => e
logger.exception("Failed unsubscribing queue #{q.name} on broker #{@alias}", e, :trace)
@exception_stats.track("unsubscribe", e)
block.call if block
end
true
else
false
end
end
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7395
|
RightAMQP.BrokerClient.queue_status
|
train
|
def queue_status(queue_names, &block)
return false unless connected?
@queues.each do |q|
if queue_names.include?(q.name)
begin
q.status { |messages, consumers| block.call(q.name, messages, consumers) if block }
rescue StandardError => e
logger.exception("Failed checking status of queue #{q.name} on broker #{@alias}", e, :trace)
@exception_stats.track("queue_status", e)
block.call(q.name, nil, nil) if block
end
end
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7396
|
RightAMQP.BrokerClient.publish
|
train
|
def publish(exchange, packet, message, options = {})
return false unless connected?
begin
exchange_options = exchange[:options] || {}
unless options[:no_serialize]
log_data = ""
unless options[:no_log] && logger.level != :debug
re = "RE-" if packet.respond_to?(:tries) && !packet.tries.empty?
log_filter = options[:log_filter] unless logger.level == :debug
log_data = "#{re}SEND #{@alias} #{packet.to_s(log_filter, :send_version)}"
if logger.level == :debug
log_data += ", publish options #{options.inspect}, exchange #{exchange[:name]}, " +
"type #{exchange[:type]}, options #{exchange[:options].inspect}"
end
log_data += ", #{options[:log_data]}" if options[:log_data]
logger.info(log_data) unless log_data.empty?
end
end
delete_amqp_resources(exchange[:type], exchange[:name]) if exchange_options[:declare]
@channel.__send__(exchange[:type], exchange[:name], exchange_options).publish(message, options)
true
rescue StandardError => e
logger.exception("Failed publishing to exchange #{exchange.inspect} on broker #{@alias}", e, :trace)
@exception_stats.track("publish", e)
update_non_delivery_stats("publish failure", e)
false
end
end
|
ruby
|
{
"resource": ""
}
|
q7397
|
RightAMQP.BrokerClient.close
|
train
|
def close(propagate = true, normal = true, log = true, &block)
final_status = normal ? :closed : :failed
if ![:closed, :failed].include?(@status)
begin
logger.info("[stop] Closed connection to broker #{@alias}") if log
update_status(final_status) if propagate
@connection.close do
@status = final_status
yield if block_given?
end
rescue StandardError => e
logger.exception("Failed to close broker #{@alias}", e, :trace)
@exception_stats.track("close", e)
@status = final_status
yield if block_given?
end
else
@status = final_status
yield if block_given?
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7398
|
RightAMQP.BrokerClient.connect
|
train
|
def connect(address, reconnect_interval)
begin
logger.info("[setup] Connecting to broker #{@identity}, alias #{@alias}")
@status = :connecting
@connection = AMQP.connect(:user => @options[:user],
:pass => @options[:pass],
:vhost => @options[:vhost],
:host => address[:host],
:port => address[:port],
:ssl => @options[:ssl],
:identity => @identity,
:insist => @options[:insist] || false,
:heartbeat => @options[:heartbeat],
:reconnect_delay => lambda { rand(reconnect_interval) },
:reconnect_interval => reconnect_interval)
@channel = MQ.new(@connection)
@channel.__send__(:connection).connection_status { |status| update_status(status) }
@channel.prefetch(@options[:prefetch]) if @options[:prefetch]
@channel.return_message { |header, message| handle_return(header, message) }
rescue StandardError => e
@status = :failed
@failure_stats.update
logger.exception("Failed connecting to broker #{@alias}", e, :trace)
@exception_stats.track("connect", e)
@connection.close if @connection
end
end
|
ruby
|
{
"resource": ""
}
|
q7399
|
RightAMQP.BrokerClient.receive
|
train
|
def receive(queue, header, message, options, &block)
begin
if options[:no_unserialize] || @serializer.nil?
execute_callback(block, @identity, message, header)
elsif message == "nil"
# This happens as part of connecting an instance agent to a broker prior to version 13
header.ack if options[:ack]
logger.debug("RECV #{@alias} nil message ignored")
elsif (packet = unserialize(queue, message, options))
execute_callback(block, @identity, packet, header)
elsif options[:ack]
# Need to ack empty packet since no callback is being made
header.ack
end
true
rescue StandardError => e
header.ack if options[:ack]
logger.exception("Failed receiving message from queue #{queue.inspect} on broker #{@alias}", e, :trace)
@exception_stats.track("receive", e)
update_non_delivery_stats("receive failure", e)
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.