_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q7400
|
RightAMQP.BrokerClient.unserialize
|
train
|
def unserialize(queue, message, options = {})
begin
received_at = Time.now.to_f
packet = @serializer.method(:load).arity.abs > 1 ? @serializer.load(message, queue) : @serializer.load(message)
if options.key?(packet.class)
unless options[:no_log] && logger.level != :debug
re = "RE-" if packet.respond_to?(:tries) && !packet.tries.empty?
packet.received_at = received_at if packet.respond_to?(:received_at)
log_filter = options[packet.class] unless logger.level == :debug
logger.info("#{re}RECV #{@alias} #{packet.to_s(log_filter, :recv_version)} #{options[:log_data]}")
end
packet
else
category = options[:category] + " " if options[:category]
logger.error("Received invalid #{category}packet type from queue #{queue} on broker #{@alias}: #{packet.class}\n" + caller.join("\n"))
nil
end
rescue StandardError => e
# TODO Taking advantage of Serializer knowledge here even though out of scope
trace, track = case e.class.name.sub(/^.*::/, "")
when "SerializationError" then [:caller, e.to_s !~ /MissingCertificate|MissingPrivateKey|InvalidSignature/]
when "ConnectivityFailure" then [:caller, false]
else [:trace, true]
end
logger.exception("Failed unserializing message from queue #{queue.inspect} on broker #{@alias}", e, trace)
@exception_stats.track("receive", e) if track
@options[:exception_on_receive_callback].call(message, e) if @options[:exception_on_receive_callback]
update_non_delivery_stats("receive failure", e)
nil
end
end
|
ruby
|
{
"resource": ""
}
|
q7401
|
RightAMQP.BrokerClient.handle_return
|
train
|
def handle_return(header, message)
begin
to = if header.exchange && !header.exchange.empty? then header.exchange else header.routing_key end
reason = header.reply_text
callback = @options[:return_message_callback]
logger.__send__(callback ? :debug : :info, "RETURN #{@alias} for #{to} because #{reason}")
callback.call(@identity, to, reason, message) if callback
rescue Exception => e
logger.exception("Failed return #{header.inspect} of message from broker #{@alias}", e, :trace)
@exception_stats.track("return", e)
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7402
|
RightAMQP.BrokerClient.execute_callback
|
train
|
def execute_callback(callback, *args)
(callback.arity == 2 ? callback.call(*args[0, 2]) : callback.call(*args)) if callback
end
|
ruby
|
{
"resource": ""
}
|
q7403
|
Crapi.Client.ensure_success!
|
train
|
def ensure_success!(response)
return if response.is_a? Net::HTTPSuccess
message = "#{response.code} - #{response.message}"
message += "\n#{response.body}" if response.body.present?
raise Crapi::BadHttpResponseError, message
end
|
ruby
|
{
"resource": ""
}
|
q7404
|
Crapi.Client.format_payload
|
train
|
def format_payload(payload, as: JSON_CONTENT_TYPE)
## Non-Hash payloads are passed through as-is.
return payload unless payload.is_a? Hash
## Massage Hash-like payloads into a suitable format.
case as
when JSON_CONTENT_TYPE
JSON.generate(payload.as_json)
when FORM_CONTENT_TYPE
payload.to_query
else
payload.to_s
end
end
|
ruby
|
{
"resource": ""
}
|
q7405
|
Crapi.Client.parse_response
|
train
|
def parse_response(response)
case response.content_type
when JSON_CONTENT_TYPE
JSON.parse(response.body, quirks_mode: true, symbolize_names: true)
else
response.body
end
end
|
ruby
|
{
"resource": ""
}
|
q7406
|
Jinx.Metadata.pretty_print
|
train
|
def pretty_print(q)
map = pretty_print_attribute_hash.delete_if { |k, v| v.nil_or_empty? }
# one indented line per entry, all but the last line ending in a comma
content = map.map { |label, value| " #{label}=>#{format_print_value(value)}" }.join(",\n")
# print the content to the log
q.text("#{qp} structure:\n#{content}")
end
|
ruby
|
{
"resource": ""
}
|
q7407
|
StalkClimber.ClimberEnumerable.each_threaded
|
train
|
def each_threaded(&block) # :yields: Object
threads = []
climber.connection_pool.connections.each do |connection|
threads << Thread.new { connection.send(self.class.enumerator_method, &block) }
end
threads.each(&:join)
return
end
|
ruby
|
{
"resource": ""
}
|
q7408
|
Sndacs.Object.temporary_url
|
train
|
def temporary_url(expires_at = Time.now + 3600)
url = URI.escape("#{protocol}#{host(true)}/#{path_prefix}#{key}")
signature = Signature.generate_temporary_url_signature(:bucket => name,
:resource => key,
:expires_at => expires_at,
:secret_access_key => secret_access_key)
"#{url}?SNDAAccessKeyId=#{access_key_id}&Expires=#{expires_at.to_i.to_s}&Signature=#{signature}"
end
|
ruby
|
{
"resource": ""
}
|
q7409
|
Log4rAuditor.Log4rAuditor.configuration_is_valid?
|
train
|
def configuration_is_valid?(configuration)
required_parameters = ['file_name', 'standard_stream']
required_parameters.each { |parameter| return false unless configuration.include?(parameter) }
return false if configuration['file_name'].empty?
return false unless ['stdout', 'stderr', 'none'].include?(configuration['standard_stream'])
return true
end
|
ruby
|
{
"resource": ""
}
|
q7410
|
Incline.AccessGroup.belongs_to?
|
train
|
def belongs_to?(group)
group = AccessGroup.get(group) unless group.is_a?(::Incline::AccessGroup)
return false unless group
safe_belongs_to?(group)
end
|
ruby
|
{
"resource": ""
}
|
q7411
|
Incline.AccessGroup.effective_groups
|
train
|
def effective_groups
ret = [ self ]
memberships.each do |m|
unless ret.include?(m) # prevent infinite recursion
tmp = m.effective_groups
tmp.each do |g|
ret << g unless ret.include?(g)
end
end
end
ret.sort{|a,b| a.name <=> b.name}
end
|
ruby
|
{
"resource": ""
}
|
q7412
|
Incline.AccessGroup.user_ids=
|
train
|
def user_ids=(values)
values ||= []
values = [ values ] unless values.is_a?(::Array)
values = values.reject{|v| v.blank?}.map{|v| v.to_i}
self.users = Incline::User.where(id: values).to_a
end
|
ruby
|
{
"resource": ""
}
|
q7413
|
Konfig.InitializeKonfig.load_settings
|
train
|
def load_settings(path)
# Load the data files
Konfig.load_directory(path)
# Load all adapters
built_in_adapters = File.join(File.dirname(__FILE__), 'adapters', '*.rb')
require_all built_in_adapters
user_adapters = File.join(path, 'adapters', '*_adapter.rb')
require_all user_adapters
# Apply the adapters to the data
Adapter.create_child_instances(Konfig.default_store.data)
Adapter.send_to_child_instances :adapt
end
|
ruby
|
{
"resource": ""
}
|
q7414
|
Hoodie.Crypto.encrypt
|
train
|
def encrypt(plain_text, password = nil, salt = nil)
password = password.nil? ? Hoodie.crypto.password : password
salt = salt.nil? ? Hoodie.crypto.salt : salt
cipher = new_cipher(:encrypt, password, salt)
cipher.iv = iv = cipher.random_iv
ciphertext = cipher.update(plain_text)
ciphertext << cipher.final
Base64.encode64(combine_iv_ciphertext(iv, ciphertext))
end
|
ruby
|
{
"resource": ""
}
|
q7415
|
Bebox.Environment.generate_hiera_template
|
train
|
def generate_hiera_template
ssh_key = Bebox::Project.public_ssh_key_from_file(self.project_root, self.name)
project_name = Bebox::Project.shortname_from_file(self.project_root)
Bebox::PROVISION_STEPS.each do |step|
step_dir = Bebox::Provision.step_name(step)
generate_file_from_template("#{templates_path}/puppet/#{step}/hiera/data/environment.yaml.erb", "#{self.project_root}/puppet/steps/#{step_dir}/hiera/data/#{self.name}.yaml", {step_dir: step_dir, ssh_key: ssh_key, project_name: project_name})
end
end
|
ruby
|
{
"resource": ""
}
|
q7416
|
CLIntegracon.Subject.replace_path
|
train
|
def replace_path(path, name=nil)
name ||= File.basename path
self.replace_pattern path, name
end
|
ruby
|
{
"resource": ""
}
|
q7417
|
CLIntegracon.Subject.run
|
train
|
def run(command_line)
require 'open3'
env = Hash[environment_vars.map { |k, v| [k.to_s, v.to_s] }]
Open3.capture2e(env, command_line.to_s)
end
|
ruby
|
{
"resource": ""
}
|
q7418
|
CLIntegracon.Subject.command_line
|
train
|
def command_line(head_arguments='', tail_arguments='')
args = [head_arguments, default_args, tail_arguments].flatten.compact.select { |s| s.length > 0 }.join ' '
"#{executable} #{args}"
end
|
ruby
|
{
"resource": ""
}
|
q7419
|
CLIntegracon.Subject.apply_replacements
|
train
|
def apply_replacements(output)
replace_patterns.reduce(output) do |output, replacement_pattern|
replacement_pattern.replace(output)
end
end
|
ruby
|
{
"resource": ""
}
|
q7420
|
Mova.Translator.get
|
train
|
def get(key, locale, opts = {})
keys = resolve_scopes(key)
locales = resolve_locales(locale)
read_first(locales, keys) || opts[:default] || default(locales, keys, opts)
end
|
ruby
|
{
"resource": ""
}
|
q7421
|
Mova.Translator.put
|
train
|
def put(translations)
Scope.flatten(translations).each do |key, value|
storage.write(key, value) unless storage.exist?(key)
end
end
|
ruby
|
{
"resource": ""
}
|
q7422
|
EmailDirect.ServiceProxyPatch.build_request
|
train
|
def build_request(method, options)
builder = underscore("build_#{method}")
self.respond_to?(builder) ? self.send(builder, options) :
soap_envelope(options).target!
end
|
ruby
|
{
"resource": ""
}
|
q7423
|
Hornetseye.InternalComplex.*
|
train
|
def *(other)
if other.is_a?(InternalComplex) or other.is_a?(Complex)
InternalComplex.new @real * other.real - @imag * other.imag,
@real * other.imag + @imag * other.real
elsif InternalComplex.generic? other
InternalComplex.new @real * other, @imag * other
else
x, y = other.coerce self
x * y
end
end
|
ruby
|
{
"resource": ""
}
|
q7424
|
Hornetseye.InternalComplex./
|
train
|
def /(other)
if other.is_a?(InternalComplex) or other.is_a?(Complex)
self * other.conj / other.abs2
elsif InternalComplex.generic? other
InternalComplex.new @real / other, @imag / other
else
x, y = other.coerce self
x / y
end
end
|
ruby
|
{
"resource": ""
}
|
q7425
|
Hornetseye.COMPLEX_.assign
|
train
|
def assign(value)
value = value.simplify
if @value.real.respond_to? :assign
@value.real.assign value.get.real
else
@value.real = value.get.real
end
if @value.imag.respond_to? :assign
@value.imag.assign value.get.imag
else
@value.imag = value.get.imag
end
value
end
|
ruby
|
{
"resource": ""
}
|
q7426
|
Hornetseye.Node.real_with_decompose
|
train
|
def real_with_decompose
if typecode == OBJECT or is_a?(Variable) or Thread.current[:lazy]
real_without_decompose
elsif typecode < COMPLEX_
decompose 0
else
self
end
end
|
ruby
|
{
"resource": ""
}
|
q7427
|
Hornetseye.Node.real=
|
train
|
def real=(value)
if typecode < COMPLEX_
decompose( 0 )[] = value
elsif typecode == OBJECT
self[] = Hornetseye::lazy do
value + imag * Complex::I
end
else
self[] = value
end
end
|
ruby
|
{
"resource": ""
}
|
q7428
|
Hornetseye.Node.imag_with_decompose
|
train
|
def imag_with_decompose
if typecode == OBJECT or is_a?(Variable) or Thread.current[:lazy]
imag_without_decompose
elsif typecode < COMPLEX_
decompose 1
else
Hornetseye::lazy( *shape ) { typecode.new( 0 ) }
end
end
|
ruby
|
{
"resource": ""
}
|
q7429
|
Balancer.Core.set_profile
|
train
|
def set_profile(value)
path = "#{root}/.balancer/profiles/#{value}.yml"
unless File.exist?(path)
puts "The profile file #{path} does not exist. Exiting.".colorize(:red)
exit 1
end
ENV['BALANCER_PROFILE'] = value
end
|
ruby
|
{
"resource": ""
}
|
q7430
|
Doublylinkedlist.Doublylinkedlist.to_s
|
train
|
def to_s
actual = @inicio
cadena = "|"
while !actual.nil?
cadena << actual[:valor].to_s
if !actual[:sig].nil?
cadena << ", "
end
actual = actual[:sig]
end
cadena << "|"
return cadena
end
|
ruby
|
{
"resource": ""
}
|
q7431
|
Doublylinkedlist.Doublylinkedlist.insertar_inicio
|
train
|
def insertar_inicio(val)
if @inicio.nil?
@inicio = Struct::Nodo.new(nil, val, nil)
@final = @inicio
else
copia = @inicio
@inicio = Struct::Nodo.new(nil, val, copia)
copia[:ant] = @inicio
end
end
|
ruby
|
{
"resource": ""
}
|
q7432
|
Doublylinkedlist.Doublylinkedlist.insertar_final
|
train
|
def insertar_final(val)
if @final.nil?
@inicio = Struct::Nodo.new(nil, val, nil)
@final = @inicio
else
copia = @final
@final[:sig] = Struct::Nodo.new(copia, val, nil)
copia2 = @final[:sig]
@final = copia2
end
end
|
ruby
|
{
"resource": ""
}
|
q7433
|
Doublylinkedlist.Doublylinkedlist.tamano
|
train
|
def tamano()
if !@inicio.nil?
contador = 1
copia = @inicio
while !copia[:sig].nil?
contador += 1
copia2 = copia[:sig]
copia = copia2
end
end
return contador
end
|
ruby
|
{
"resource": ""
}
|
q7434
|
Doublylinkedlist.Doublylinkedlist.posicion
|
train
|
def posicion (pos)
if @inicio.nil?
raise RuntimeError, "La lista esta vacia"
end
if pos<0 || pos>tamano-1
raise RuntimeError, "La posicion no es correcta"
end
contador=0
copia=@inicio
while contador<pos && !copia.nil?
copia2 = copia[:sig]
copia = copia2
contador += 1
end
return copia[:valor]
end
|
ruby
|
{
"resource": ""
}
|
q7435
|
Doublylinkedlist.Doublylinkedlist.ordenar!
|
train
|
def ordenar!
cambio = true
while cambio
cambio = false
i = @inicio
i_1 = @inicio[:sig]
while i_1 != nil
if(i[:valor] > i_1[:valor])
i[:valor], i_1[:valor] = i_1[:valor], i[:valor]
cambio = true
end
i = i_1
i_1 = i_1[:sig]
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7436
|
PhiltreRails.PhiltreViewHelpers.order_by
|
train
|
def order_by( filter, *fields, label: fields.first.to_s.titleize, order_link_class: default_order_link_class )
return label if filter.nil?
# current ordering from the filter
# each expr is a Sequel::SQL::Expression
exprs = Hash[ filter.order_expressions ]
# Invert each ordering for the generated link. Current sort order will be displayed.
order_links = fields.map do |field|
if exprs[field]
order_link_class.new exprs[field].invert, active: true
else
order_link_class.new Sequel.asc(field)
end
end
# filter params must have order in the right format
filter_params = filter.filter_parameters.dup
filter_params[:order] = unify_array( order_links.map( &:name ) )
params_hash = {filter.class::Model.model_name.param_key.to_sym => filter_params}
link_text = raw( [label, order_links.first.andand.icon].compact.join(' ') )
link_to link_text, params_hash, {class: order_links.first.andand.css_class}
end
|
ruby
|
{
"resource": ""
}
|
q7437
|
DataMapper::Adapters.BugzillaAdapter.delete
|
train
|
def delete(collection)
each_resource_with_edit_url(collection) do |resource, edit_url|
connection.delete(edit_url, 'If-Match' => "*")
end
# return count
collection.size
end
|
ruby
|
{
"resource": ""
}
|
q7438
|
Pocus.Session.send_request
|
train
|
def send_request(method, path, fields = {})
response = send_logged_request(URI(BASE_URL + path), method, request_data(fields))
fail UnexpectedHttpResponse, response unless response.is_a? Net::HTTPSuccess
JSON.parse(response.body)
end
|
ruby
|
{
"resource": ""
}
|
q7439
|
Aspire.UserLookup.[]
|
train
|
def [](uri, factory = nil)
data = store[uri]
data.nil? ? nil : Aspire::Object::User.new(uri, factory, json: data)
end
|
ruby
|
{
"resource": ""
}
|
q7440
|
Aspire.UserLookup.load
|
train
|
def load(filename = nil)
delim = /\s*;\s*/ # The delimiter for email and role lists
enum = Aspire::Enumerator::ReportEnumerator.new(filename).enumerator
enum.each do |row|
# Construct a JSON data structure for the user
uri = row[3]
data = csv_to_json_api(row, email_delim: delim, role_delim: delim)
csv_to_json_other(row, data)
# Store the JSON data in the lookup table
store[uri] = data
end
end
|
ruby
|
{
"resource": ""
}
|
q7441
|
Aspire.UserLookup.method_missing
|
train
|
def method_missing(method, *args, &block)
super unless store.respond_to?(method)
store.public_send(method, *args, &block)
end
|
ruby
|
{
"resource": ""
}
|
q7442
|
Aspire.UserLookup.csv_to_json_api
|
train
|
def csv_to_json_api(row, data = {}, email_delim: nil, role_delim: nil)
data['email'] = (row[4] || '').split(email_delim)
data['firstName'] = row[0]
data['role'] = (row[7] || '').split(role_delim)
data['surname'] = row[1]
data['uri'] = row[3]
data
end
|
ruby
|
{
"resource": ""
}
|
q7443
|
Aspire.UserLookup.csv_to_json_other
|
train
|
def csv_to_json_other(row, data = {})
# The following fields are not present in the JSON API response but are in
# the All User Profiles report - they are included for completeness.
data['jobRole'] = row[5] || ''
data['lastLogin'] = row[8]
data['name'] = row[2] || ''
data['visibility'] = row[6] || ''
data
end
|
ruby
|
{
"resource": ""
}
|
q7444
|
CouchbaseId.Generator.generate_id
|
train
|
def generate_id
if self.id.nil?
#
# Generate the id (incrementing values as required)
#
overflow = self.class.__overflow__ ||= self.class.bucket.get("#{self.class.design_document}:#{CLUSTER_ID}:overflow", :quiet => true) # Don't error if not there
count = self.class.bucket.incr("#{self.class.design_document}:#{CLUSTER_ID}:count", :create => true) # This models current id count
if count == 0 || overflow.nil?
overflow ||= 0
overflow += 1
# We shouldn't need to worry about concurrency here due to the size of count
# Would require ~18446744073709551615 concurrent writes
self.class.bucket.set("#{self.class.design_document}:#{CLUSTER_ID}:overflow", overflow)
self.class.__overflow__ = overflow
end
self.id = self.class.__class_id_generator__.call(overflow, count)
#
# So an existing id would only be present if:
# => something crashed before incrementing the overflow
# => this is another request was occurring before the overflow is incremented
#
# Basically only the overflow should be able to cause issues, we'll increment the count just to be sure
# One would hope this code only ever runs under high load during an overflow event
#
while self.class.bucket.get(self.id, :quiet => true).present?
# Set in-case we are here due to a crash (concurrency is not an issue)
# Note we are not incrementing the @__overflow__ variable
self.class.bucket.set("#{self.class.design_document}:#{CLUSTER_ID}:overflow", overflow + 1)
count = self.class.bucket.incr("#{self.class.design_document}:#{CLUSTER_ID}:count") # Increment just in case (attempt to avoid infinite loops)
# Reset the overflow
if self.class.__overflow__ == overflow
self.class.__overflow__ = nil
end
# Generate the new id
self.id = self.class.__class_id_generator__.call(overflow + 1, count)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7445
|
Wingtips.DSL.merge_template_options
|
train
|
def merge_template_options(default_options, template_key, custom_options = {})
template_options = configuration.template_options.fetch template_key, {}
options = Wingtips::HashUtils.deep_merge(default_options, template_options)
Wingtips::HashUtils.deep_merge(options, custom_options)
end
|
ruby
|
{
"resource": ""
}
|
q7446
|
TheArrayComparator.Cache.add
|
train
|
def add(cache, strategy)
c = cache.to_sym
s = strategy.to_sym
fail Exceptions::UnknownCachingStrategy, "Unknown caching strategy \":#{strategy}\" given. Did you register it in advance?" unless caching_strategies.key?(strategy)
caches[c] = caching_strategies[s].new
caches[c]
end
|
ruby
|
{
"resource": ""
}
|
q7447
|
Term.ANSIColor.uncolored
|
train
|
def uncolored(string = nil) # :yields:
if block_given?
yield.gsub(COLORED_REGEXP, '')
elsif string
string.gsub(COLORED_REGEXP, '')
elsif respond_to?(:to_str)
gsub(COLORED_REGEXP, '')
else
''
end
end
|
ruby
|
{
"resource": ""
}
|
q7448
|
Garcon.AtomicDirectUpdate.try_update
|
train
|
def try_update
old_value = get
new_value = yield old_value
unless compare_and_set(old_value, new_value)
raise ConcurrentUpdateError, "Update failed"
end
new_value
end
|
ruby
|
{
"resource": ""
}
|
q7449
|
Garcon.AtomicMutex._compare_and_set
|
train
|
def _compare_and_set(old_value, new_value)
return false unless @mutex.try_lock
begin
return false unless @value.equal? old_value
@value = new_value
ensure
@mutex.unlock
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7450
|
Whenner.Deferred.fulfill
|
train
|
def fulfill(value = nil)
raise CannotTransitionError if rejected?
return if fulfilled?
unless resolved?
self.value = value
resolve_to(:fulfilled)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q7451
|
Whenner.Deferred.reject
|
train
|
def reject(reason = nil)
raise CannotTransitionError if fulfilled?
return if rejected?
unless resolved?
self.reason = reason
resolve_to(:rejected)
end
self
end
|
ruby
|
{
"resource": ""
}
|
q7452
|
Whenner.Deferred.fail
|
train
|
def fail(&block)
cb = Callback.new(block)
rejected_callbacks << cb
cb.call(*callback_response) if rejected?
cb.promise
end
|
ruby
|
{
"resource": ""
}
|
q7453
|
Whenner.Deferred.always
|
train
|
def always(&block)
cb = Callback.new(block)
always_callbacks << cb
cb.call(*callback_response) if resolved?
cb.promise
end
|
ruby
|
{
"resource": ""
}
|
q7454
|
CapybaraObjects.ScopedFinders.get_component
|
train
|
def get_component(ctype, *args)
registry.lookup_ctype(ctype).new(*args).tap do |comp|
comp.scope = full_scope
comp.validate!
end
end
|
ruby
|
{
"resource": ""
}
|
q7455
|
ServiceJynx.Jynx.clean_aged
|
train
|
def clean_aged(time_now)
near_past = time_now - @time_window_in_seconds
@errors = @errors.reverse.select{|time_stamp| time_stamp > near_past }.reverse.to_a
end
|
ruby
|
{
"resource": ""
}
|
q7456
|
RSpec.Illustrate.illustrate
|
train
|
def illustrate(content, *args)
illustration = { :text => content.to_s,
:show_when_passed => true,
:show_when_failed => true,
:show_when_pending => true }
args.each{|arg|
illustration[arg] = true if arg.is_a?(Symbol)
illustration.merge!(arg) if arg.kind_of?(Hash)
}
RSpec.current_example.metadata[:illustrations] << illustration
content
end
|
ruby
|
{
"resource": ""
}
|
q7457
|
Observatory.Dispatcher.connect
|
train
|
def connect(signal, *args, &block)
# ugly argument parsing.
# Make sure that there is either a block given, or that the second argument is
# something callable. If there is a block given, the second argument, if given,
# must be a Hash which defaults to an empty Hash. If there is no block given,
# the third optional argument must be Hash.
if block_given?
observer = block
if args.size == 1 && args.first.is_a?(Hash)
options = args.first
elsif args.size == 0
options = {}
else
raise ArgumentError, 'When given a block, #connect only expects a signal and options hash as arguments'
end
else
observer = args.shift
raise ArgumentError, 'Use a block, method or proc to specify an observer' unless observer.respond_to?(:call)
if args.any?
options = args.shift
raise ArgumentError, '#connect only expects a signal, method and options hash as arguments' unless options.is_a?(Hash) || args.any?
else
options = {}
end
end
# Initialize the list of observers for this signal and add this observer
observers[signal] ||= Stack.new
observers[signal].push(observer, options[:priority])
end
|
ruby
|
{
"resource": ""
}
|
q7458
|
Observatory.Dispatcher.disconnect
|
train
|
def disconnect(signal, observer)
return nil unless observers.key?(signal)
observers[signal].delete(observer)
end
|
ruby
|
{
"resource": ""
}
|
q7459
|
Hornetseye.GCCType.identifier
|
train
|
def identifier
case @typecode
when nil
'void'
when BOOL
'char'
when BYTE
'char'
when UBYTE
'unsigned char'
when SINT
'short int'
when USINT
'unsigned short int'
when INT
'int'
when UINT
'unsigned int'
when SFLOAT
'float'
when DFLOAT
'double'
else
if @typecode < Pointer_
'unsigned char *'
elsif @typecode < INDEX_
'int'
else
raise "No identifier available for #{@typecode.inspect}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7460
|
Hornetseye.GCCType.identifiers
|
train
|
def identifiers
if @typecode < Composite
GCCType.new( @typecode.element_type ).identifiers * @typecode.num_elements
else
[ GCCType.new( @typecode ).identifier ]
end
end
|
ruby
|
{
"resource": ""
}
|
q7461
|
Hornetseye.GCCType.r2c
|
train
|
def r2c
case @typecode
when BOOL
[ proc { |expr| "( #{expr} ) != Qfalse" } ]
when BYTE, UBYTE, SINT, USINT, INT, UINT
[ proc { |expr| "NUM2INT( #{expr} )" } ]
when SFLOAT, DFLOAT
[ proc { |expr| "NUM2DBL( #{expr} )" } ]
else
if @typecode < Pointer_
[ proc { |expr| "(#{identifier})mallocToPtr( #{expr} )" } ]
elsif @typecode < Composite
GCCType.new( @typecode.element_type ).r2c * @typecode.num_elements
else
raise "No conversion available for #{@typecode.inspect}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7462
|
WeightedSelect.Selector.add
|
train
|
def add(item, weight)
delta = Integer(weight)
if delta > 0
new_weight = @total_weight + delta
weights[@total_weight...new_weight] = item
@total_weight = new_weight
end
end
|
ruby
|
{
"resource": ""
}
|
q7463
|
WeightedSelect.Selector.extract_item
|
train
|
def extract_item
weight = Random.rand(@total_weight)
@weights.each do |range, item|
return item if range === weight
end
end
|
ruby
|
{
"resource": ""
}
|
q7464
|
OffTheGrid.HostGroup.entries
|
train
|
def entries
extract_detail(:hostlist).map do |host|
host =~ /^@/ ? HostGroup.new(host) : ExecuteHost.new(host)
end
end
|
ruby
|
{
"resource": ""
}
|
q7465
|
OffTheGrid.HostGroup.hosts
|
train
|
def hosts
entries.map do |entry|
entry.is_a?(HostGroup) ? entry.hosts : entry
end.flatten.uniq
end
|
ruby
|
{
"resource": ""
}
|
q7466
|
MMETools.Webparse.datify
|
train
|
def datify(str)
pttrn = /(\d+)[\/-](\d+)[\/-](\d+)(\W+(\d+)\:(\d+))?/
day, month, year, dummy, hour, min = str.match(pttrn).captures.map {|d| d ? d.to_i : 0 }
case year
when 0..69
year += 2000
when 70..99
year += 1900
end
DateTime.civil year, month, day, hour, min
end
|
ruby
|
{
"resource": ""
}
|
q7467
|
Spreadsheet.Workbook.format
|
train
|
def format idx
case idx
when Integer
@formats[idx] || @default_format
when String
@formats.find do |fmt| fmt.name == idx end
end
end
|
ruby
|
{
"resource": ""
}
|
q7468
|
Sumac.DirectiveQueue.execute_next
|
train
|
def execute_next(&block)
@mutex.synchronize do
if @active_thread
condition_variable = ConditionVariable.new
@waiting_threads.unshift(condition_variable)
condition_variable.wait(@mutex)
end
@active_thread = true
end
return_value = yield
ensure
@mutex.synchronize do
@active_thread = false
next_waiting_thread = @waiting_threads.shift
next_waiting_thread&.signal
end
end
|
ruby
|
{
"resource": ""
}
|
q7469
|
FileTemplater.Template.transform_file_name
|
train
|
def transform_file_name(file)
if @bind
variables = file.scan(/{{([^}]*)}}/).flatten
variables.each do |v|
file.sub!("{{#{v}}}", @bind.get_binding.eval(v))
end
end
(!@nomodify && file.end_with?(".erb") && !File.directory?(file)) ? File.basename(file, ".*") : file
end
|
ruby
|
{
"resource": ""
}
|
q7470
|
Garcon.FileHelper.which
|
train
|
def which(prog, path = ENV['PATH'])
path.split(File::PATH_SEPARATOR).each do |dir|
file = File.join(dir, prog)
return file if File.executable?(file) && !File.directory?(file)
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q7471
|
Garcon.FileHelper.whereis
|
train
|
def whereis(prog, path = ENV['PATH'])
dirs = []
path.split(File::PATH_SEPARATOR).each do |dir|
f = File.join(dir,prog)
if File.executable?(f) && !File.directory?(f)
if block_given?
yield f
else
dirs << f
end
end
end
dirs.empty? ? nil : dirs
end
|
ruby
|
{
"resource": ""
}
|
q7472
|
Marmotta.Connection.get
|
train
|
def get(resource_uri)
result = connection.get("resource") do |request|
request.query[:uri] = resource_uri.to_s
request.query.delete(:graph)
end
MaybeGraphResult.new(result).value
end
|
ruby
|
{
"resource": ""
}
|
q7473
|
Marmotta.Connection.delete
|
train
|
def delete(resource_uri)
connection.delete("resource") do |request|
request.query[:uri] = resource_uri.to_s
request.query.delete(:graph)
end
end
|
ruby
|
{
"resource": ""
}
|
q7474
|
Incline.User.partial_email
|
train
|
def partial_email
@partial_email ||=
begin
uid,_,domain = email.partition('@')
if uid.length < 4
uid = '*' * uid.length
elsif uid.length < 8
uid = uid[0..2] + ('*' * (uid.length - 3))
else
uid = uid[0..2] + ('*' * (uid.length - 6)) + uid[-3..-1]
end
"#{uid}@#{domain}"
end
end
|
ruby
|
{
"resource": ""
}
|
q7475
|
Incline.User.effective_groups
|
train
|
def effective_groups(refresh = false)
@effective_groups = nil if refresh
@effective_groups ||= if system_admin?
AccessGroup.all.map{ |g| g.to_s.upcase }
else
groups
.collect{ |g| g.effective_groups }
.flatten
end
.map{ |g| g.to_s.upcase }
.uniq
.sort
end
|
ruby
|
{
"resource": ""
}
|
q7476
|
Incline.User.has_any_group?
|
train
|
def has_any_group?(*group_list)
return :system_admin if system_admin?
return false if anonymous?
r = group_list.select{|g| effective_groups.include?(g.upcase)}
r.blank? ? false : r
end
|
ruby
|
{
"resource": ""
}
|
q7477
|
Incline.User.remember
|
train
|
def remember
self.remember_token = Incline::User::new_token
update_attribute(:remember_digest, Incline::User::digest(self.remember_token))
end
|
ruby
|
{
"resource": ""
}
|
q7478
|
Incline.User.authenticated?
|
train
|
def authenticated?(attribute, token)
return false unless respond_to?("#{attribute}_digest")
digest = send("#{attribute}_digest")
return false if digest.blank?
BCrypt::Password.new(digest).is_password?(token)
end
|
ruby
|
{
"resource": ""
}
|
q7479
|
Incline.User.disable
|
train
|
def disable(other_user, reason)
return false unless other_user&.system_admin?
return false if other_user == self
update_columns(
disabled_by: other_user.email,
disabled_at: Time.now,
disabled_reason: reason,
enabled: false
) && refresh_comments
end
|
ruby
|
{
"resource": ""
}
|
q7480
|
Incline.User.create_reset_digest
|
train
|
def create_reset_digest
self.reset_token = Incline::User::new_token
update_columns(
reset_digest: Incline::User::digest(reset_token),
reset_sent_at: Time.now
)
end
|
ruby
|
{
"resource": ""
}
|
q7481
|
Incline.User.failed_login_streak
|
train
|
def failed_login_streak
@failed_login_streak ||=
begin
results = login_histories.where.not(successful: true)
if last_successful_login
results = results.where('created_at > ?', last_successful_login.created_at)
end
results.order(created_at: :desc)
end
end
|
ruby
|
{
"resource": ""
}
|
q7482
|
AssMaintainer.InfoBase.add_hook
|
train
|
def add_hook(hook, &block)
fail ArgumentError, "Invalid hook `#{hook}'" unless\
HOOKS.keys.include? hook
fail ArgumentError, 'Block require' unless block_given?
options[hook] = block
end
|
ruby
|
{
"resource": ""
}
|
q7483
|
AssMaintainer.InfoBase.make_infobase!
|
train
|
def make_infobase!
fail MethodDenied, :make_infobase! if read_only?
before_make.call(self)
maker.execute(self)
after_make.call(self)
self
end
|
ruby
|
{
"resource": ""
}
|
q7484
|
AssMaintainer.InfoBase.rm_infobase!
|
train
|
def rm_infobase!
fail MethodDenied, :rm_infobase! if read_only?
before_rm.call(self)
destroyer.execute(self)
after_rm.call(self)
end
|
ruby
|
{
"resource": ""
}
|
q7485
|
AssMaintainer.InfoBase.dump
|
train
|
def dump(path)
designer do
dumpIB path
end.run.wait.result.verify!
path
end
|
ruby
|
{
"resource": ""
}
|
q7486
|
AssMaintainer.InfoBase.restore!
|
train
|
def restore!(path)
fail MethodDenied, :restore! if read_only?
designer do
restoreIB path
end.run.wait.result.verify!
path
end
|
ruby
|
{
"resource": ""
}
|
q7487
|
Hoodie.Memoizable.memoize
|
train
|
def memoize(methods, cache = nil)
cache ||= Hoodie::Stash.new
methods.each do |name|
uncached_name = "#{name}_uncached".to_sym
singleton_class.class_eval do
alias_method uncached_name, name
define_method(name) do |*a, &b|
cache.cache(name) { send uncached_name, *a, &b }
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7488
|
Nucleon.Manager.parallel_finalize
|
train
|
def parallel_finalize
active_plugins.each do |namespace, namespace_plugins|
namespace_plugins.each do |plugin_type, type_plugins|
type_plugins.each do |instance_name, plugin|
remove(plugin)
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7489
|
Nucleon.Manager.define_plugin
|
train
|
def define_plugin(namespace, plugin_type, base_path, file, &code) # :yields: data
@@environments[@actor_id].define_plugin(namespace, plugin_type, base_path, file, &code)
myself
end
|
ruby
|
{
"resource": ""
}
|
q7490
|
ApiWarden.Authentication.authenticate!
|
train
|
def authenticate!
return unless @authenticated.nil?
id, access_token = @params.retrieve_id, @params.retrieve_access_token
@key_for_access_token = @scope.key_for_access_token(id, access_token)
if access_token && !access_token.empty?
ApiWarden.redis { |conn| @value_for_access_token = conn.get(@key_for_access_token) }
end
unless @value_for_access_token
@authenticated = false
raise AuthenticationError
end
@authenticated = true
@id = id
@access_token = access_token
self
end
|
ruby
|
{
"resource": ""
}
|
q7491
|
ApiWarden.Authentication.ttl_for_access_token=
|
train
|
def ttl_for_access_token=(seconds)
raise_if_authentication_failed!
key = @key_for_access_token
value = @value_for_access_token
ApiWarden.redis { |conn| conn.set(key, value, ex: seconds) }
end
|
ruby
|
{
"resource": ""
}
|
q7492
|
HelpfulComments.ControllerRoutes.build
|
train
|
def build
controller_name = @klass.name.gsub(/Controller$/, '').underscore
Rails.application.routes.routes.each_with_object({}) do |route, comments|
if route.defaults[:controller] == controller_name
verb_match = route.verb.to_s.match(/\^(.*)\$/)
verbs = verb_match.nil? ? '*' : verb_match[1]
(comments[route.defaults[:action]] ||= []) << "#{verbs} #{route.ast}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7493
|
I18n::Processes::Data::Tree.Siblings.set
|
train
|
def set(full_key, node)
fail 'value should be a I18n::Processes::Data::Tree::Node' unless node.is_a?(Node)
key_part, rest = split_key(full_key, 2)
child = key_to_node[key_part]
if rest
unless child
child = Node.new(
key: key_part,
parent: parent,
children: [],
warn_about_add_children_to_leaf: @warn_add_children_to_leaf
)
append! child
end
unless child.children
warn_add_children_to_leaf child if @warn_about_add_children_to_leaf
child.children = []
end
child.children.set rest, node
else
remove! child if child
append! node
end
dirty!
node
end
|
ruby
|
{
"resource": ""
}
|
q7494
|
IRCSupport.Encoding.decode_irc!
|
train
|
def decode_irc!(string, encoding = :irc)
if encoding == :irc
# If incoming text is valid UTF-8, it will be interpreted as
# such. If it fails validation, a CP1252 -> UTF-8 conversion
# is performed. This allows you to see non-ASCII from mIRC
# users (non-UTF-8) and other users sending you UTF-8.
#
# (from http://xchat.org/encoding/#hybrid)
string.force_encoding("UTF-8")
if !string.valid_encoding?
string.force_encoding("CP1252").encode!("UTF-8", {:invalid => :replace, :undef => :replace})
end
else
string.force_encoding(encoding).encode!({:invalid => :replace, :undef => :replace})
string = string.chars.select { |c| c.valid_encoding? }.join
end
return string
end
|
ruby
|
{
"resource": ""
}
|
q7495
|
IRCSupport.Encoding.encode_irc!
|
train
|
def encode_irc!(string, encoding = :irc)
if encoding == :irc
# If your text contains only characters that fit inside the CP1252
# code page (aka Windows Latin-1), the entire line will be sent
# that way. mIRC users should see it correctly. XChat users who
# are using UTF-8 will also see it correctly, because it will fail
# UTF-8 validation and will be assumed to be CP1252, even by older
# XChat versions.
#
# If the text doesn't fit inside the CP1252 code page, (for example if you
# type Eastern European characters, or Russian) it will be sent as UTF-8. Only
# UTF-8 capable clients will be able to see these characters correctly
#
# (from http://xchat.org/encoding/#hybrid)
begin
string.encode!("CP1252")
rescue ::Encoding::UndefinedConversionError
end
else
string.encode!(encoding, {:invalid => :replace, :undef => :replace}).force_encoding("ASCII-8BIT")
end
return string
end
|
ruby
|
{
"resource": ""
}
|
q7496
|
Luggage.Message.reload
|
train
|
def reload
fields = fetch_fields
@mail = Mail.new(fields["BODY[]"])
@flags = fields["FLAGS"]
@date = Time.parse(fields["INTERNALDATE"])
self
end
|
ruby
|
{
"resource": ""
}
|
q7497
|
Luggage.Message.save!
|
train
|
def save!
mailbox.select!
connection.append(mailbox.name, raw_message, flags, date)
end
|
ruby
|
{
"resource": ""
}
|
q7498
|
Luggage.Message.copy_to!
|
train
|
def copy_to!(mailbox_name)
mailbox.select!
connection.uid_copy([uid], Luggage::Mailbox.convert_mailbox_name(mailbox_name))
end
|
ruby
|
{
"resource": ""
}
|
q7499
|
Detroit.LOCat.generate
|
train
|
def generate
options = {}
options[:title] = title if title
options[:format] = format if format
options[:output] = output if output
options[:config] = config if config
options[:files] = collect_files
locat = ::LOCat::Command.new(options)
locat.run
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.