_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q6400
|
Shells.ShellBase.run_hook
|
train
|
def run_hook(hook_name, *args)
list = self.class.all_hooks(hook_name)
shell = self
list.each do |hook|
result = hook.call(shell, *args)
return :break if result == :break
end
list.any?
end
|
ruby
|
{
"resource": ""
}
|
q6401
|
Roroacms.ApplicationController.add_breadcrumb
|
train
|
def add_breadcrumb(name, url = 'javascript:;', atts = {})
hash = { name: name, url: url, atts: atts }
@breadcrumbs << hash
end
|
ruby
|
{
"resource": ""
}
|
q6402
|
Roroacms.ApplicationController.authorize_demo
|
train
|
def authorize_demo
if !request.xhr? && !request.get? && ( !current_user.blank? && current_user.username.downcase == 'demo' && Setting.get('demonstration_mode') == 'Y' )
redirect_to :back, flash: { error: I18n.t('generic.demo_notification') } and return
end
render :inline => 'demo' and return if params[:action] == 'save_menu' && !current_user.blank? && current_user.username.downcase == 'demo' && Setting.get('demonstration_mode') == 'Y'
end
|
ruby
|
{
"resource": ""
}
|
q6403
|
Roroacms.ApplicationController.mark_required
|
train
|
def mark_required(object, attribute)
"*" if object.class.validators_on(attribute).map(&:class).include? ActiveModel::Validations::PresenceValidator
end
|
ruby
|
{
"resource": ""
}
|
q6404
|
Chizuru.UserStream.connect
|
train
|
def connect
uri = URI.parse("https://userstream.twitter.com/2/user.json?track=#{@screen_name}")
https = Net::HTTP.new(uri.host, uri.port)
https.use_ssl = true
https.ca_file = @ca_file
https.verify_mode = OpenSSL::SSL::VERIFY_PEER
https.verify_depth = 5
https.start do |https|
request = Net::HTTP::Get.new(uri.request_uri,
"User-Agent" => @user_agent,
"Accept-Encoding" => "identity")
request.oauth!(https, @credential.consumer, @credential.access_token)
buf = ""
https.request(request) do |response|
response.read_body do |chunk|
buf << chunk
while ((line = buf[/.+?(\r\n)+/m]) != nil)
begin
buf.sub!(line, "")
line.strip!
status = Yajl::Parser.parse(line)
rescue
break
end
yield status
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6405
|
Gogcom.Game.fetch
|
train
|
def fetch()
name = urlfy(@name)
page = Net::HTTP.get(URI("http://www.gog.com/game/" + name))
data = JSON.parse(page[/(?<=var gogData = )(.*)(?=;)/,1])
data
end
|
ruby
|
{
"resource": ""
}
|
q6406
|
Gogcom.Game.parse
|
train
|
def parse(data)
game = GameItem.new(get_title(data), get_genres(data),
get_download_size(data), get_release_date(data), get_description(data),
get_price(data), get_avg_rating(data), get_avg_ratings_count(data),
get_platforms(data), get_languages(data), get_developer(data),
get_publisher(data), get_modes(data), get_bonus_content(data),
get_reviews(data), get_pegi_age(data))
game
end
|
ruby
|
{
"resource": ""
}
|
q6407
|
NRSER.NicerError.format_message_segment
|
train
|
def format_message_segment segment
return segment.to_summary if segment.respond_to?( :to_summary )
return segment if String === segment
# TODO Do better!
segment.inspect
end
|
ruby
|
{
"resource": ""
}
|
q6408
|
NRSER.NicerError.to_s
|
train
|
def to_s extended: nil
# The way to get the superclass' message
message = super()
# If `extended` is explicitly `false` then just return that
return message if extended == false
# Otherwise, see if the extended message was explicitly requested,
# of if we're configured to provide it as well.
#
# Either way, don't add it it's empty.
#
if (extended || add_extended_message?) &&
!extended_message.empty?
message + "\n\n" + extended_message
else
message
end
end
|
ruby
|
{
"resource": ""
}
|
q6409
|
Clin::CommandMixin::Core.ClassMethods.inherited
|
train
|
def inherited(subclass)
subclass._arguments = []
subclass._description = ''
subclass._abstract = false
subclass._skip_options = false
subclass._exe_name = @_exe_name
subclass._default_priority = @_default_priority.to_f / 2
subclass._priority = 0
super
end
|
ruby
|
{
"resource": ""
}
|
q6410
|
Clin::CommandMixin::Core.ClassMethods.arguments
|
train
|
def arguments(args = nil)
return @_arguments if args.nil?
@_arguments = []
[*args].map(&:split).flatten.each do |arg|
@_arguments << Clin::Argument.new(arg)
end
end
|
ruby
|
{
"resource": ""
}
|
q6411
|
Bridge.TCPSocket.verify
|
train
|
def verify()
send_bridge_request()
begin
line = gets()
match = line.match(%r{^HTTP/1\.[01] ([0-9]{3,3}) (.*)$})
if (!match)
raise "HTTP BRIDGE error: bridge server sent incorrect reply to bridge request."
end
case code = match[1].to_i
when 100, 101
return true
when 401 # 401 Access Denied, key wasn't right.
raise "HTTP BRIDGE error #{code}: host key was invalid or missing, but required."
when 503, 504 # 503 Service Unavailable or 504 Gateway Timeout
raise "HTTP BRIDGE error #{code}: could not verify server can handle requests because it's overloaded."
else
raise "HTTP BRIDGE error #{code}: #{match[2]} unknown error connecting to bridge server."
end
ensure
close() # once we do this, we just assume the connection is useless.
end
end
|
ruby
|
{
"resource": ""
}
|
q6412
|
Bridge.TCPSocket.setup
|
train
|
def setup()
send_bridge_request
code = nil
name = nil
headers = []
while (line = gets())
line = line.strip
if (line == "")
case code.to_i
when 100 # 100 Continue, just a ping. Ignore.
code = name = nil
headers = []
next
when 101 # 101 Upgrade, successfuly got a connection.
write("HTTP/1.1 100 Continue\r\n\r\n") # let the server know we're still here.
return self
when 401 # 401 Access Denied, key wasn't right.
close()
raise "HTTP BRIDGE error #{code}: host key was invalid or missing, but required."
when 503, 504 # 503 Service Unavailable or 504 Gateway Timeout, just retry.
close()
sleep_time = headers.find {|header| header["Retry-After"] } || 5
raise RetryError.new("BRIDGE server timed out or is overloaded, wait #{sleep_time}s to try again.", sleep_time)
else
raise "HTTP BRIDGE error #{code}: #{name} waiting for connection."
end
end
if (!code && !name) # This is the initial response line
if (match = line.match(%r{^HTTP/1\.[01] ([0-9]{3,3}) (.*)$}))
code = match[1]
name = match[2]
next
else
raise "Parse error in BRIDGE request reply."
end
else
if (match = line.match(%r{^(.+?):\s+(.+)$}))
headers.push({match[1] => match[2]})
else
raise "Parse error in BRIDGE request reply's headers."
end
end
end
return nil
end
|
ruby
|
{
"resource": ""
}
|
q6413
|
Attrtastic.SemanticAttributesHelper.semantic_attributes_for
|
train
|
def semantic_attributes_for(record, options = {}, &block)
options[:html] ||= {}
html_class = [ "attrtastic", record.class.to_s.underscore, options[:html][:class] ].compact.join(" ")
output = tag(:div, { :class => html_class}, true)
if block_given?
output << capture(SemanticAttributesBuilder.new(record, self), &block)
else
output << capture(SemanticAttributesBuilder.new(record, self)) do |attr|
attr.attributes
end
end
output.safe_concat("</div>")
end
|
ruby
|
{
"resource": ""
}
|
q6414
|
HornetQ::Client.RequestorPattern.wait_for_reply
|
train
|
def wait_for_reply(user_id, timeout)
# We only want the reply to the supplied message_id, so set filter on message id
filter = "#{Java::org.hornetq.api.core::FilterConstants::HORNETQ_USERID} = 'ID:#{user_id}'" if user_id
@session.consumer(:queue_name => @reply_queue, :filter=>filter) do |consumer|
consumer.receive(timeout)
end
end
|
ruby
|
{
"resource": ""
}
|
q6415
|
Worldly.Country.build_fields
|
train
|
def build_fields
if @data.key?(:fields)
@data[:fields].each do |k,v|
v[:required] = true unless v.key?(:required)
end
else
{city: {label:'City', required: true}, region: {label: 'Province', required: false}, postcode: {label: 'Post Code', required: false} }
end
end
|
ruby
|
{
"resource": ""
}
|
q6416
|
LitePage.ClassMethods.page_url
|
train
|
def page_url(url)
define_method(:page_url) do |query_params = {}|
uri = URI(url)
existing_params = URI.decode_www_form(uri.query || '')
new_params = query_params.to_a
unless existing_params.empty? && new_params.empty?
combined_params = existing_params.push(*new_params)
uri.query = URI.encode_www_form(combined_params)
end
uri.to_s
end
end
|
ruby
|
{
"resource": ""
}
|
q6417
|
Jinx.MergeVisitor.merge
|
train
|
def merge(source, target)
# trivial case
return target if source.equal?(target)
# the domain attributes to merge
mas = @mergeable.call(source)
unless mas.empty? then
logger.debug { "Merging #{source.qp} #{mas.to_series} into #{target.qp}..." }
end
# merge the non-domain attributes
target.merge_attributes(source)
# merge the source domain attributes into the target
target.merge_attributes(source, mas, @matches, &@filter)
end
|
ruby
|
{
"resource": ""
}
|
q6418
|
Bebox.Node.checkpoint_parameter_from_file
|
train
|
def checkpoint_parameter_from_file(node_type, parameter)
Bebox::Node.checkpoint_parameter_from_file(self.project_root, self.environment, self.hostname, node_type, parameter)
end
|
ruby
|
{
"resource": ""
}
|
q6419
|
Bebox.Node.prepare
|
train
|
def prepare
started_at = DateTime.now.to_s
prepare_deploy
prepare_common_installation
puppet_installation
create_prepare_checkpoint(started_at)
end
|
ruby
|
{
"resource": ""
}
|
q6420
|
Bebox.Node.create_hiera_template
|
train
|
def create_hiera_template
options = {ssh_key: Bebox::Project.public_ssh_key_from_file(project_root, environment), project_name: Bebox::Project.shortname_from_file(project_root)}
Bebox::Provision.generate_hiera_for_steps(self.project_root, "node.yaml.erb", self.hostname, options)
end
|
ruby
|
{
"resource": ""
}
|
q6421
|
Bebox.Node.create_node_checkpoint
|
train
|
def create_node_checkpoint
# Set the creation time for the node
self.created_at = DateTime.now.to_s
# Create the checkpoint file from template
Bebox::Environment.create_checkpoint_directories(project_root, environment)
generate_file_from_template("#{Bebox::FilesHelper::templates_path}/node/node.yml.erb", "#{project_root}/.checkpoints/environments/#{environment}/nodes/#{hostname}.yml", {node: self})
end
|
ruby
|
{
"resource": ""
}
|
q6422
|
M2MFastInsert.HasAndBelongsToManyOverride.define_fast_methods_for_model
|
train
|
def define_fast_methods_for_model(name, options)
join_table = options[:join_table]
join_column_name = name.to_s.downcase.singularize
define_method "fast_#{join_column_name}_ids_insert" do |*args|
table_name = self.class.table_name.singularize
insert = M2MFastInsert::Base.new id, join_column_name, table_name, join_table, *args
insert.fast_insert
end
end
|
ruby
|
{
"resource": ""
}
|
q6423
|
Yargi.Random.execute
|
train
|
def execute
graph = Digraph.new{|g|
vertex_count.times do |i|
vertex = g.add_vertex
vertex_builder.call(vertex,i) if vertex_builder
end
edge_count.times do |i|
source = g.ith_vertex(Kernel.rand(vertex_count))
target = g.ith_vertex(Kernel.rand(vertex_count))
edge = g.connect(source, target)
edge_builder.call(edge,i) if edge_builder
end
}
strip ? _strip(graph) : graph
end
|
ruby
|
{
"resource": ""
}
|
q6424
|
Myreplicator.Log.kill
|
train
|
def kill
return false unless hostname == Socket.gethostname
begin
Process.kill('KILL', pid)
self.state = "killed"
self.save!
rescue Errno::ESRCH
puts "pid #{pid} does not exist!"
mark_dead
end
end
|
ruby
|
{
"resource": ""
}
|
q6425
|
Myreplicator.Log.running?
|
train
|
def running?
logs = Log.where(:file => file,
:job_type => job_type,
:state => "running",
:export_id => export_id,
:hostname => hostname)
if logs.count > 0
logs.each do |log|
begin
Process.getpgid(log.pid)
puts "still running #{log.file}"
return true
rescue Errno::ESRCH
log.mark_dead
end
end
end
return false
end
|
ruby
|
{
"resource": ""
}
|
q6426
|
TheArrayComparator.StrategyDispatcher.register
|
train
|
def register(name, klass)
if valid_strategy? klass
available_strategies[name.to_sym] = klass
else
fail exception_to_raise_for_invalid_strategy, "Registering #{klass} failed. It does not support \"#{class_must_have_methods.join('-, ')}\"-method"
end
end
|
ruby
|
{
"resource": ""
}
|
q6427
|
Furnish.RangeSet.assign_group_items
|
train
|
def assign_group_items(name, items, raise_if_exists=false)
group = group_items(name)
if items.kind_of?(Array)
items = Set[*items]
elsif !items.kind_of?(Set)
items = Set[items]
end
c_allocated = allocated.count
c_group = group.count
items.each do |item|
utf8_item = item.encode("UTF-8")
allocated.add(utf8_item)
group.add(utf8_item)
end
if raise_if_exists
raise unless group.count == c_group + items.count && allocated.count == c_allocated + items.count
end
replace_group(name, group)
return items
end
|
ruby
|
{
"resource": ""
}
|
q6428
|
Furnish.RangeSet.remove_from_group
|
train
|
def remove_from_group(name, items)
group = group_items(name)
items.each do |item|
utf8_item = item.encode("UTF-8")
deallocate(utf8_item)
group.delete(utf8_item)
end
replace_group(name, group)
return items
end
|
ruby
|
{
"resource": ""
}
|
q6429
|
Furnish.RangeSet.decommission_group
|
train
|
def decommission_group(name)
group = group_items(name)
group.each do |item|
deallocate(item)
end
groups.delete(name)
return name
end
|
ruby
|
{
"resource": ""
}
|
q6430
|
EncryptedStore.Instance.preload_keys
|
train
|
def preload_keys(amount = 12)
keys = EncryptedStore::ActiveRecord.preload_keys(amount)
keys.each { |k| (@_decrypted_keys ||= {})[k.id] = k.decrypted_key }
end
|
ruby
|
{
"resource": ""
}
|
q6431
|
RForce.Binding.login
|
train
|
def login(user, password)
@user = user
@password = password
response = call_remote(:login, [:username, user, :password, password])
raise "Incorrect user name / password [#{response.fault}]" unless response.loginResponse
result = response[:loginResponse][:result]
@session_id = result[:sessionId]
init_server(result[:serverUrl])
response
end
|
ruby
|
{
"resource": ""
}
|
q6432
|
RForce.Binding.login_with_oauth
|
train
|
def login_with_oauth
result = @server.post @oauth[:login_url], '', {}
case result
when Net::HTTPSuccess
doc = REXML::Document.new result.body
@session_id = doc.elements['*/sessionId'].text
server_url = doc.elements['*/serverUrl'].text
init_server server_url
return {:sessionId => @sessionId, :serverUrl => server_url}
when Net::HTTPUnauthorized
raise 'Invalid OAuth tokens'
else
raise 'Unexpected error: #{response.inspect}'
end
end
|
ruby
|
{
"resource": ""
}
|
q6433
|
RForce.Binding.method_missing
|
train
|
def method_missing(method, *args)
unless args.size == 1 && [Hash, Array].include?(args[0].class)
raise 'Expected 1 Hash or Array argument'
end
call_remote method, args[0]
end
|
ruby
|
{
"resource": ""
}
|
q6434
|
Shells.ShellBase.teardown
|
train
|
def teardown #:doc:
unless options[:quit].to_s.strip == ''
self.ignore_io_error = true
exec_ignore_code options[:quit], command_timeout: 1, timeout_error: false
end
end
|
ruby
|
{
"resource": ""
}
|
q6435
|
Lightstreamer.PostRequest.execute_multiple
|
train
|
def execute_multiple(url, bodies)
response = Excon.post url, body: bodies.join("\r\n"), expects: 200, connect_timeout: 15
response_lines = response.body.split("\n").map(&:strip)
errors = []
errors << parse_error(response_lines) until response_lines.empty?
raise LightstreamerError if errors.size != bodies.size
errors
rescue Excon::Error => error
raise Errors::ConnectionError, error.message
end
|
ruby
|
{
"resource": ""
}
|
q6436
|
Lightstreamer.PostRequest.request_body
|
train
|
def request_body(query)
params = {}
query.each do |key, value|
next if value.nil?
value = value.map(&:to_s).join(' ') if value.is_a? Array
params[key] = value
end
URI.encode_www_form params
end
|
ruby
|
{
"resource": ""
}
|
q6437
|
Lightstreamer.PostRequest.parse_error
|
train
|
def parse_error(response_lines)
first_line = response_lines.shift
return nil if first_line == 'OK'
return Errors::SyncError.new if first_line == 'SYNC ERROR'
if first_line == 'ERROR'
error_code = response_lines.shift
LightstreamerError.build response_lines.shift, error_code
else
LightstreamerError.new first_line
end
end
|
ruby
|
{
"resource": ""
}
|
q6438
|
GACollectorPusher.Instance.add_exception
|
train
|
def add_exception description: nil, is_fatal: false
is_fatal_int = is_fatal ? 1 : 0
@params = {
t: "exception",
exd: description,
exf: is_fatal_int
}
send_to_ga
end
|
ruby
|
{
"resource": ""
}
|
q6439
|
Aspire.Util.child_url?
|
train
|
def child_url?(url1, url2, api = nil, strict: false)
parent_url?(url2, url1, api, strict: strict)
end
|
ruby
|
{
"resource": ""
}
|
q6440
|
Aspire.Util.linked_data
|
train
|
def linked_data(uri, ld)
uri = linked_data_path(uri)
return nil unless uri && ld
# The URI used to retrieve the data may be the canonical URI or a
# tenancy aliases. We ignore the host part of the URIs and match just
# the path
ld.each { |u, data| return data if uri == linked_data_path(u) }
# No match was found
nil
end
|
ruby
|
{
"resource": ""
}
|
q6441
|
Aspire.Util.linked_data_path
|
train
|
def linked_data_path(uri)
URI.parse(uri).path
rescue URI::InvalidComponentError, URI::InvalidURIError
nil
end
|
ruby
|
{
"resource": ""
}
|
q6442
|
Aspire.Util.list_url?
|
train
|
def list_url?(u = nil, parsed: nil)
return false if (u.nil? || u.empty?) && parsed.nil?
parsed ||= parse_url(u)
child_type = parsed[:child_type]
parsed[:type] == 'lists' && (child_type.nil? || child_type.empty?)
end
|
ruby
|
{
"resource": ""
}
|
q6443
|
Aspire.Util.parent_url?
|
train
|
def parent_url?(url1, url2, api = nil, strict: false)
u1 = url_for_comparison(url1, api, parsed: true)
u2 = url_for_comparison(url2, api, parsed: true)
# Both URLs must have the same parent
return false unless u1[:type] == u2[:type] && u1[:id] == u2[:id]
# Non-strict comparison requires only the same parent object
return true unless strict
# Strict comparison requires that this entry is a child of the URL
u1[:child_type].nil? && !u2[:child_type].nil? ? true : false
end
|
ruby
|
{
"resource": ""
}
|
q6444
|
Aspire.Util.url_for_comparison
|
train
|
def url_for_comparison(url, api = nil, parsed: false)
if url.is_a?(MatchData) && parsed
url
elsif parsed && url.respond_to?(:parsed_url)
url.parsed_url
elsif !parsed && url.respond_to?(url)
url.url
else
result = api.nil? ? url.to_s : api.canonical_url(url.to_s)
parsed ? parse_url(result) : result
end
end
|
ruby
|
{
"resource": ""
}
|
q6445
|
Aspire.Util.url_path
|
train
|
def url_path
# Get the path component of the URL as a relative path
filename = URI.parse(url).path
filename.slice!(0) # Remove the leading /
# Return the path with '.json' extension if not already present
filename.end_with?('.json') ? filename : "#{filename}.json"
rescue URI::InvalidComponentError, URI::InvalidURIError
# Return nil if the URL is invalid
nil
end
|
ruby
|
{
"resource": ""
}
|
q6446
|
SwaggerDocsGenerator.Generator.import_documentations
|
train
|
def import_documentations
require SwaggerDocsGenerator.file_base
SwaggerDocsGenerator.file_docs.each { |rb| require rb }
end
|
ruby
|
{
"resource": ""
}
|
q6447
|
SwaggerDocsGenerator.Generator.generate_swagger_file
|
train
|
def generate_swagger_file
delete_file_before
File.open(@swagger_file, 'a+') do |file|
file.write(if SwaggerDocsGenerator.configure.compress
write_in_swagger_file.to_json
else
JSON.pretty_generate write_in_swagger_file
end)
end
end
|
ruby
|
{
"resource": ""
}
|
q6448
|
Parse.Card.mod_for_phrases
|
train
|
def mod_for_phrases(raw_cell, method_name_or_ability_class, card_to_setup)
return unless raw_cell
#puts [raw,cat,card_class,name].inspect
raw_cell.split(/[,;]/).each do |raw_cell_part|
p = make_parsed_phrase_obj(raw_cell_part,method_name_or_ability_class)
p.mod_card(card_to_setup) if p
end
end
|
ruby
|
{
"resource": ""
}
|
q6449
|
Zog.Heart.method_missing
|
train
|
def method_missing(meth, *args, &block)
if @categories.include?(meth)
if block_given?
args[0] = yield block
end
self::msg(meth, args[0])
else
super
end
end
|
ruby
|
{
"resource": ""
}
|
q6450
|
Associates.ClassMethods.associate
|
train
|
def associate(model, options = {})
options[:only] = Array(options[:only])
options[:except] = Array(options[:except])
options[:depends_on] = Array(options[:depends_on])
options = {
delegate: true
}.merge(options)
associate = build_associate(model, options)
self.associates << associate
define_associate_delegation(associate) if options[:delegate]
define_associate_instance_setter_method(associate)
define_associate_instance_getter_method(associate)
end
|
ruby
|
{
"resource": ""
}
|
q6451
|
Associates.ClassMethods.build_associate
|
train
|
def build_associate(model, options = {})
model_name = model.to_s.underscore
model_klass = (options[:class_name] || model).to_s.classify.constantize
dependent_associate_names = options[:depends_on].map(&:to_s)
attribute_names = extract_attribute_names(model_klass, options)
ensure_name_uniqueness(associates.map(&:name), model_name)
ensure_attribute_uniqueness(associates.map(&:attribute_names), attribute_names) if options[:delegate]
ensure_dependent_names_existence(associates.map(&:name), dependent_associate_names)
Item.new(model_name, model_klass, attribute_names, dependent_associate_names, options)
end
|
ruby
|
{
"resource": ""
}
|
q6452
|
Associates.ClassMethods.ensure_attribute_uniqueness
|
train
|
def ensure_attribute_uniqueness(associates_attribute_names, attribute_names)
attribute_names.each do |attribute_name|
if associates_attribute_names.include?(attribute_name)
raise NameError, "already defined attribute name '#{attribute_name}' for #{name}(#{object_id})"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6453
|
Associates.ClassMethods.ensure_dependent_names_existence
|
train
|
def ensure_dependent_names_existence(associates_names, dependent_associate_names)
dependent_associate_names.each do |dependent_name|
unless associates_names.include?(dependent_name)
raise NameError, "undefined associated model '#{dependent_name}' for #{name}(#{object_id})"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6454
|
Associates.ClassMethods.define_associate_delegation
|
train
|
def define_associate_delegation(associate)
methods = [associate.attribute_names, associate.attribute_names.map { |attr| "#{attr}=" }].flatten
send(:delegate, *methods, to: associate.name)
end
|
ruby
|
{
"resource": ""
}
|
q6455
|
Associates.ClassMethods.define_associate_instance_setter_method
|
train
|
def define_associate_instance_setter_method(associate)
define_method "#{associate.name}=" do |object|
unless object.is_a?(associate.klass)
raise ArgumentError, "#{associate.klass}(##{associate.klass.object_id}) expected, got #{object.class}(##{object.class.object_id})"
end
instance = instance_variable_set("@#{associate.name}", object)
depending = associates.select { |_associate| _associate.dependent_names.include?(associate.name) }
depending.each do |_associate|
send(_associate.name).send("#{associate.name}=", instance)
end
instance
end
end
|
ruby
|
{
"resource": ""
}
|
q6456
|
Associates.ClassMethods.define_associate_instance_getter_method
|
train
|
def define_associate_instance_getter_method(associate)
define_method associate.name do
instance = instance_variable_get("@#{associate.name}") || instance_variable_set("@#{associate.name}", associate.klass.new)
depending = associates.select { |_associate| _associate.dependent_names.include?(associate.name) }
depending.each do |_associate|
existing = send(_associate.name).send(associate.name)
send(_associate.name).send("#{associate.name}=", instance) unless existing
end
instance
end
end
|
ruby
|
{
"resource": ""
}
|
q6457
|
Incline.UserMailer.account_activation
|
train
|
def account_activation(data = {})
@data = {
user: nil,
client_ip: '0.0.0.0'
}.merge(data || {})
raise unless data[:user]
mail to: data[:user].email, subject: 'Account activation'
end
|
ruby
|
{
"resource": ""
}
|
q6458
|
Incline.UserMailer.invalid_password_reset
|
train
|
def invalid_password_reset(data = {})
@data = {
email: nil,
message: 'This email address is not associated with an existing account.',
client_ip: '0.0.0.0'
}.merge(data || {})
raise unless data[:email]
mail to: data[:email], subject: 'Password reset request'
end
|
ruby
|
{
"resource": ""
}
|
q6459
|
ConfigurationBlocks.ClassMethods.configuration_module
|
train
|
def configuration_module(base = self)
Module.new.tap do |cm|
delegators = get_configuration_methods
base = send(base) if base.is_a?(Symbol)
cm.extend Module.new {
delegators.each do |method|
module_eval do
define_method(method) do |*args|
base.send(method, *args)
end
end
end
}
end
end
|
ruby
|
{
"resource": ""
}
|
q6460
|
ConfigurationBlocks.ClassMethods.get_configuration_methods
|
train
|
def get_configuration_methods(local_only = false)
all_delegators = singleton_class.send(:cf_block_delegators) + cf_block_delegators
return all_delegators if local_only
ancestors.each_with_object(all_delegators) do |ancestor, all|
all.merge(ancestor.send(__method__, true)) if ancestor.respond_to?(__method__)
end
end
|
ruby
|
{
"resource": ""
}
|
q6461
|
ConfigurationBlocks.ClassMethods.configuration_block_delegate
|
train
|
def configuration_block_delegate(*methods)
methods.flatten.each { |m| cf_block_delegators.add(m.to_sym) }
@cb_conf_module = nil if @cb_conf_module
nil
end
|
ruby
|
{
"resource": ""
}
|
q6462
|
ConfigurationBlocks.ClassMethods.configuration_block_core
|
train
|
def configuration_block_core(conf_module, &block)
return conf_module unless block_given?
return conf_module.tap(&block) unless block.arity == 0
conf_module.module_eval(&block)
conf_module
end
|
ruby
|
{
"resource": ""
}
|
q6463
|
BarkestCore.StatusHelper.show_system_status
|
train
|
def show_system_status(options = {})
options = {
url_on_completion: nil,
completion_button: 'Continue',
main_status: 'System is busy'
}.merge(options || {})
if block_given?
clear_system_status
Spawnling.new do
status = BarkestCore::GlobalStatus.new
if status.acquire_lock
status.set_message options[:main_status]
begin
yield status
ensure
status.release_lock
end
else
yield false
end
end
end
session[:status_comp_url] = options[:url_on_completion]
session[:status_comp_lbl] = options[:completion_button]
redirect_to status_current_url
end
|
ruby
|
{
"resource": ""
}
|
q6464
|
BarkestCore.StatusHelper.clear_system_status
|
train
|
def clear_system_status
unless BarkestCore::GlobalStatus.locked?
# open, truncate, and close.
File.open(BarkestCore::WorkPath.system_status_file,'w').close
end
end
|
ruby
|
{
"resource": ""
}
|
q6465
|
BBLib.NumericEnhancements.to_delimited_s
|
train
|
def to_delimited_s(delim = ',')
split = self.to_s.split('.')
split[0] = split.first.reverse.gsub(/(\d{3})/, "\\1#{delim}").reverse
split.join('.').uncapsulate(',')
end
|
ruby
|
{
"resource": ""
}
|
q6466
|
Incline.JsonLogFormatter.call
|
train
|
def call(sev, time, _, msg) #:nodoc:
level = ({
Logger::DEBUG => 'DEBUG',
Logger::INFO => 'INFO',
Logger::WARN => 'WARN',
Logger::ERROR => 'ERROR',
Logger::FATAL => 'FATAL',
}[sev] || sev.to_s).upcase
if msg.present? && AUTO_DEBUG_PATTERNS.find{|pattern| msg =~ pattern}
return '' if debug_skip?
level = 'DEBUG'
end
if msg.present?
# And we'll expand exceptions so we get as much info as possible.
# If you just want the message, make sure you just pass the message.
if msg.is_a?(::Exception)
msg = "#{msg.message} (#{msg.class})\n#{(msg.backtrace || []).join("\n")}"
elsif !msg.is_a?(::String)
msg = msg.inspect
end
msg = rm_fmt msg
{
level: level,
time: time.strftime('%Y-%m-%d %H:%M:%S'),
message: msg,
app_name: app_name,
app_version: app_version,
process_id: Process.pid,
}.to_json + "\r\n"
else
''
end
end
|
ruby
|
{
"resource": ""
}
|
q6467
|
Shoehorn.BusinessCards.estimate_pdf_business_card_report
|
train
|
def estimate_pdf_business_card_report
xml = Builder::XmlMarkup.new
xml.instruct!
xml.Request(:xmlns => "urn:sbx:apis:SbxBaseComponents") do |xml|
connection.requester_credentials_block(xml)
xml.EstimatePdfBusinessCardReport
end
response = connection.post_xml(xml)
document = REXML::Document.new(response)
number_of_cards = document.elements["EstimatePdfBusinessCardReportCallResponse"].elements["NumberOfBusinessCards"].text.to_i rescue 0
number_of_pages = document.elements["EstimatePdfBusinessCardReportCallResponse"].elements["NumberOfPages"].text.to_i rescue 0
return number_of_cards, number_of_pages
end
|
ruby
|
{
"resource": ""
}
|
q6468
|
Shoehorn.BusinessCards.generate_pdf_business_card_report
|
train
|
def generate_pdf_business_card_report
xml = Builder::XmlMarkup.new
xml.instruct!
xml.Request(:xmlns => "urn:sbx:apis:SbxBaseComponents") do |xml|
connection.requester_credentials_block(xml)
xml.GeneratePdfBusinessCardReport
end
response = connection.post_xml(xml)
document = REXML::Document.new(response)
document.elements["GeneratePdfBusinessCardReportCallResponse"].elements["URL"].text
end
|
ruby
|
{
"resource": ""
}
|
q6469
|
Shoehorn.BusinessCards.notify_preference
|
train
|
def notify_preference
xml = Builder::XmlMarkup.new
xml.instruct!
xml.Request(:xmlns => "urn:sbx:apis:SbxBaseComponents") do |xml|
connection.requester_credentials_block(xml)
xml.GetBusinessCardNotifyPreferenceCall
end
response = connection.post_xml(xml)
document = REXML::Document.new(response)
document.elements["GetBusinessCardNotifyPreferenceCallResponse"].elements["BusinessCardNotifyPreference"].text == "1"
end
|
ruby
|
{
"resource": ""
}
|
q6470
|
Shoehorn.BusinessCards.notify_preference=
|
train
|
def notify_preference=(value)
if value
translated_value = "1"
else
translated_value = "0"
end
xml = Builder::XmlMarkup.new
xml.instruct!
xml.Request(:xmlns => "urn:sbx:apis:SbxBaseComponents") do |xml|
connection.requester_credentials_block(xml)
xml.SetBusinessCardNotifyPreferenceCall do |xml|
xml.BusinessCardNotifyPreference(translated_value)
end
end
response = connection.post_xml(xml)
# TODO: Retrieve the new value to make sure it worked?
value
end
|
ruby
|
{
"resource": ""
}
|
q6471
|
Shoehorn.BusinessCards.auto_share_contact_details
|
train
|
def auto_share_contact_details
xml = Builder::XmlMarkup.new
xml.instruct!
xml.Request(:xmlns => "urn:sbx:apis:SbxBaseComponents") do |xml|
connection.requester_credentials_block(xml)
xml.GetAutoShareContactDetailsCall
end
response = connection.post_xml(xml)
details = Hash.new
document = REXML::Document.new(response)
details_element = document.elements["GetAutoShareContactDetailsCallResponse"]
details[:first_name] = details_element.elements["FirstName"].text
details[:last_name] = details_element.elements["LastName"].text
details[:email] = details_element.elements["Email"].text
details[:additional_contact_info] = details_element.elements["AdditionalContactInfo"].text
details
end
|
ruby
|
{
"resource": ""
}
|
q6472
|
Yummi.Colorizer.color_for
|
train
|
def color_for (arg)
arg = Yummi::Context::new(arg) unless arg.is_a? Context
call(arg)
end
|
ruby
|
{
"resource": ""
}
|
q6473
|
Remi.Parser::CsvFile.parse
|
train
|
def parse(data)
# Assumes that each file has exactly the same structure
result_df = nil
Array(data).each_with_index do |filename, idx|
filename = filename.to_s
logger.info "Converting #{filename} to a dataframe"
processed_filename = preprocess(filename)
csv_df = Daru::DataFrame.from_csv processed_filename, @csv_options
# Daru 0.1.4 doesn't add vectors if it's a headers-only file
if csv_df.vectors.size == 0
headers_df = Daru::DataFrame.from_csv processed_filename, @csv_options.merge(return_headers: true)
csv_df = Daru::DataFrame.new([], order: headers_df.vectors.to_a)
end
csv_df[@filename_field] = Daru::Vector.new([filename] * csv_df.size, index: csv_df.index) if @filename_field
if idx == 0
result_df = csv_df
else
result_df = result_df.concat csv_df
end
end
Remi::DataFrame.create(:daru, result_df)
end
|
ruby
|
{
"resource": ""
}
|
q6474
|
Remi.Encoder::CsvFile.encode
|
train
|
def encode(dataframe)
logger.info "Writing CSV file to temporary location #{@working_file}"
label_columns = self.fields.reduce({}) { |h, (k, v)|
if v[:label]
h[k] = v[:label].to_sym
end
h
}
dataframe.rename_vectors label_columns
dataframe.write_csv @working_file, @csv_options
@working_file
end
|
ruby
|
{
"resource": ""
}
|
q6475
|
Garcon.ImmediateExecutor.post
|
train
|
def post(*args, &task)
raise ArgumentError, 'no block given' unless block_given?
return false unless running?
task.call(*args)
true
end
|
ruby
|
{
"resource": ""
}
|
q6476
|
HtmlMockup.Template.target_extension
|
train
|
def target_extension
return @target_extension if @target_extension
if type = MIME::Types[self.target_mime_type].first
# Dirty little hack to enforce the use of .html instead of .htm
if type.sub_type == "html"
@target_extension = "html"
else
@target_extension = type.extensions.first
end
else
@target_extension = File.extname(self.source_path.to_s).sub(/^\./, "")
end
end
|
ruby
|
{
"resource": ""
}
|
q6477
|
Dragonfly.AzureDataStore.update_metadata
|
train
|
def update_metadata(uid)
return false unless store_meta
path = full_path(uid)
meta = storage(:get_blob, container_name, path)[0].metadata
return false if meta.present?
meta = meta_from_file(path)
return false if meta.blank?
storage(:set_blob_metadata, container_name, path, meta)
storage(:delete_blob, container_name, meta_path(path))
true
rescue Azure::Core::Http::HTTPError
nil
end
|
ruby
|
{
"resource": ""
}
|
q6478
|
Ruta.Router.map
|
train
|
def map ref,route, options={}
context = Context.collection[get_context]
context.routes[ref]= Route.new(route, context,options)
end
|
ruby
|
{
"resource": ""
}
|
q6479
|
Ruta.Router.root_to
|
train
|
def root_to reference
Router.set_root_to reference
context = Context.collection[reference]
context.routes[:root]= Route.new('/', context,{ context: reference})
end
|
ruby
|
{
"resource": ""
}
|
q6480
|
Kenna.Api.fakeUser
|
train
|
def fakeUser
@roles = ['administrator', 'normal user', 'Linux Test Environment']
@role = @roles[rand(0..2)]
@fake_user = {
"user":
{
"firstname": Faker::Name.first_name,
"lastname": Faker::Name.last_name,
"email": Faker::Internet.email,
"role": @role
}
}
end
|
ruby
|
{
"resource": ""
}
|
q6481
|
ActionCommand.LogMessage.populate
|
train
|
def populate(line, msg)
@line = line
@sequence = msg['sequence']
@depth = msg['depth']
@cmd = msg['cmd']
@kind = msg['kind']
@msg = msg['msg']
@key = msg['key']
end
|
ruby
|
{
"resource": ""
}
|
q6482
|
ActionCommand.LogParser.next
|
train
|
def next(msg)
# be tolerant of the fact that there might be other
# stuff in the log file.
next_line do |input, line|
if input.key?('sequence')
msg.populate(line, input) unless @sequence && @sequence != input['sequence']
return true
end
end
return false
end
|
ruby
|
{
"resource": ""
}
|
q6483
|
Bebox.Project.generate_ruby_version
|
train
|
def generate_ruby_version
ruby_version = (RUBY_PATCHLEVEL == 0) ? RUBY_VERSION : "#{RUBY_VERSION}-p#{RUBY_PATCHLEVEL}"
File.open("#{self.path}/.ruby-version", 'w') do |f|
f.write ruby_version
end
end
|
ruby
|
{
"resource": ""
}
|
q6484
|
Bebox.Project.generate_steps_templates
|
train
|
def generate_steps_templates
Bebox::PROVISION_STEPS.each do |step|
ssh_key = ''
step_dir = Bebox::Provision.step_name(step)
templates_path = Bebox::Project::templates_path
# Generate site.pp template
generate_file_from_template("#{templates_path}/puppet/#{step}/manifests/site.pp.erb", "#{self.path}/puppet/steps/#{step_dir}/manifests/site.pp", {nodes: []})
# Generate hiera.yaml template
generate_file_from_template("#{templates_path}/puppet/#{step}/hiera/hiera.yaml.erb", "#{self.path}/puppet/steps/#{step_dir}/hiera/hiera.yaml", {step_dir: step_dir})
# Generate common.yaml template
generate_file_from_template("#{templates_path}/puppet/#{step}/hiera/data/common.yaml.erb", "#{self.path}/puppet/steps/#{step_dir}/hiera/data/common.yaml", {step_dir: step_dir, ssh_key: ssh_key, project_name: self.shortname})
end
end
|
ruby
|
{
"resource": ""
}
|
q6485
|
Bebox.ProvisionWizard.apply_step
|
train
|
def apply_step(project_root, environment, step)
# Check if environment has configured the ssh keys
(return warn _('wizard.provision.ssh_key_advice')%{environment: environment}) unless Bebox::Environment.check_environment_access(project_root, environment)
nodes_to_step = Bebox::Node.nodes_in_environment(project_root, environment, previous_checkpoint(step))
# Check if there are nodes for provisioning step-N
(return warn _('wizard.provision.no_provision_nodes')%{step: step}) unless nodes_to_step.count > 0
nodes_for_provisioning(nodes_to_step, step)
# Apply the nodes provisioning for step-N
in_step_nodes = Bebox::Node.list(project_root, environment, "steps/#{step}")
outputs = []
nodes_to_step.each do |node|
next unless check_node_to_step(node, in_step_nodes, step)
outputs << provision_step_in_node(project_root, environment, step, in_step_nodes, node)
end
return outputs
end
|
ruby
|
{
"resource": ""
}
|
q6486
|
CoinOp::Bit.Fee.estimate
|
train
|
def estimate(unspents, payees, network:, tx_size: nil, fee_per_kb: nil)
# https://en.bitcoin.it/wiki/Transaction_fees
# dupe because we'll need to add a change output
payees = payees.dup
unspent_total = unspents.inject(0) { |sum, output| sum += output.value }
payee_total = payees.inject(0) { |sum, payee| sum += payee.value }
nominal_change = unspent_total - payee_total
payees << Output.new(value: nominal_change, network: network) if nominal_change > 0
tx_size ||= estimate_tx_size(unspents, payees)
# conditions for 0-fee transactions
small = tx_size < 1000
min_payee = payees.min_by { |payee| payee.value }
big_outputs = min_payee.value > 1_000_000
high_priority = priority(
size: tx_size,
unspents: unspents.map { |output| { value: output.value, age: output.confirmations } }
) > PRIORITY_THRESHOLD
# 0-fee requirements met
return 0 if small && big_outputs && high_priority
# Otherwise, calculate the fee by size
fee_for_bytes(tx_size, network: network, fee_per_kb: fee_per_kb)
end
|
ruby
|
{
"resource": ""
}
|
q6487
|
HttpMagic.Api.post
|
train
|
def post(data = {})
request = Request.new(@uri,
headers: @headers,
data: data,
)
request.post
end
|
ruby
|
{
"resource": ""
}
|
q6488
|
HttpMagic.Api.put
|
train
|
def put(data = {})
request = Request.new(@uri,
headers: @headers,
data: data,
)
request.put
end
|
ruby
|
{
"resource": ""
}
|
q6489
|
Chizuru.Bot.consumer
|
train
|
def consumer(cons, *init_args, &block)
if cons.instance_of? Class
cons = cons.new(*init_args)
end
ch = ConsumerHelper.new(cons, credential)
ch.instance_eval &block
provider.add_consumer(ch.consumer)
end
|
ruby
|
{
"resource": ""
}
|
q6490
|
HasMetadataColumn.Extensions.attribute
|
train
|
def attribute(attr)
return super unless self.class.metadata_column_fields.include?(attr.to_sym)
options = self.class.metadata_column_fields[attr.to_sym] || {}
default = options.include?(:default) ? options[:default] : nil
_metadata_hash.include?(attr) ? HasMetadataColumn.metadata_typecast(_metadata_hash[attr], options[:type]) : default
end
|
ruby
|
{
"resource": ""
}
|
q6491
|
Hoodie.ANSI.build_ansi_methods
|
train
|
def build_ansi_methods(hash)
hash.each do |key, value|
define_method(key) do |string=nil, &block|
result = Array.new
result << %(\e[#{value}m)
if block_given?
result << block.call
elsif string.respond_to?(:to_str)
result << string.to_str
elsif respond_to?(:to_str)
result << to_str
else
return result
end
result << %(\e[0m)
result.join
end
end
true
end
|
ruby
|
{
"resource": ""
}
|
q6492
|
Hoodie.ANSI.uncolor
|
train
|
def uncolor(string = nil, &block)
if block_given?
block.call.to_str.gsub(ANSI_REGEX, '')
elsif string.respond_to?(:to_str)
string.to_str.gsub(ANSI_REGEX, '')
elsif respond_to?(:to_str)
to_str.gsub(ANSI_REGEX, '')
else
''
end
end
|
ruby
|
{
"resource": ""
}
|
q6493
|
Incline.RecaptchaValidator.validate_each
|
train
|
def validate_each(record, attribute, value)
# Do NOT raise an error if nil.
return if value.blank?
# Make sure the response only gets processed once.
return if value == :verified
# Automatically skip validation if paused.
return if Incline::Recaptcha::paused?
# If the user form includes the recaptcha field, then something will come in
# and then we want to check it.
remote_ip, _, response = value.partition('|')
if remote_ip.blank? || response.blank?
record.errors[:base] << (options[:message] || 'Requires reCAPTCHA challenge to be completed')
else
if Incline::Recaptcha::verify(response: response, remote_ip: remote_ip)
record.send "#{attribute}=", :verified
else
record.errors[:base] << (options[:message] || 'Invalid response from reCAPTCHA challenge')
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6494
|
Aliasable.ControllingClassMethods.included
|
train
|
def included( klass )
klass.extend AliasingClassMethods
klass.extend UniversalClassMethods
# Hoo boy. We need to set the @@classy_aliases class variable in the
# including class to point to the same actual hash object that the
# @@classy_aliases variable on the controlling module points to. When
# everything is class based, this is done automatically, since
# sub-classes share class variables.
#
klass.send(:class_variable_set, :@@classy_aliases, self.send(:class_variable_get, :@@classy_aliases))
super
end
|
ruby
|
{
"resource": ""
}
|
q6495
|
DaFace.Utilities.symbolize_keys
|
train
|
def symbolize_keys keys, hash
new_hash = {}
keys.each do |key|
if hash[key].kind_of? Hash
new_hash[key.to_sym] = symbolize_keys(hash[key].keys, hash[key])
elsif hash[key].kind_of? Array
new_hash[key.to_sym] = []
hash[key].each do |item|
if item.kind_of? Hash
new_hash[key.to_sym] << symbolize_keys(item.keys, item)
else
new_hash[key.to_sym] << item
end
end
else
new_hash[key.to_sym] = hash[key]
end
end
return new_hash
end
|
ruby
|
{
"resource": ""
}
|
q6496
|
Hornetseye.MultiArrayConstructor.constructor_shortcut
|
train
|
def constructor_shortcut( target )
define_method target.to_s.downcase do |*args|
new target, *args
end
end
|
ruby
|
{
"resource": ""
}
|
q6497
|
BetterRailsDebugger.Analyzer.collect_information
|
train
|
def collect_information(identifier, group_id)
group = ::BetterRailsDebugger::AnalysisGroup.find group_id
if not group.present?
Rails.logger.error "[BetterRailsDebugger] Group '#{recorded[:group_id]}' not found. Skiping..."
return
end
# Load Mongo db if required
if not Mongoid::Config.configured?
Mongoid.load!(BetterRailsDebugger::Configuration.instance.mongoid_config_file, Rails.env.to_sym)
Mongoid.logger.level = Logger::FATAL
end
instance = ::BetterRailsDebugger::GroupInstance.create identifier: identifier, analysis_group_id: group_id, caller_file: caller[3][/[^:]+/], status: 'pending'
collect_memory_information(instance)
collect_trace_point_history(instance)
# Now, it's time to analyze all collected data and generate a report
::BetterRailsDebugger::AnalysisRecorderJob.perform_later({ instance_id: instance.id.to_s })
end
|
ruby
|
{
"resource": ""
}
|
q6498
|
KeytechKit.ElementFileHandler.masterfile?
|
train
|
def masterfile?(element_key)
if Tools.class_type(element_key) == 'DO' # Only DO Types can have a file
file_list = load(element_key)
unless file_list.nil?
file_list.each do |file|
return true if file.fileStorageType.casecmp('master').zero?
end
end
end
false
end
|
ruby
|
{
"resource": ""
}
|
q6499
|
KeytechKit.ElementFileHandler.masterfile_name
|
train
|
def masterfile_name(element_key)
file_list = load(element_key)
unless file_list.nil?
file_list.each do |file|
return file.fileName if file.fileStorageType.downcase! == 'master'
end
end
''
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.