_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q9000 | Redstruct.Factory.delete | train | def delete(options = {})
return each({ match: '*', count: 500, max_iterations: 1_000_000, batch_size: 500 }.merge(options)) do |keys|
@connection.del(*keys)
end
end | ruby | {
"resource": ""
} |
q9001 | Redstruct.Factory.script | train | def script(script, **options)
return Redstruct::Script.new(script: script, connection: @connection, **options)
end | ruby | {
"resource": ""
} |
q9002 | FocuspointRails.UploaderAdditions.crop_with_focuspoint | train | def crop_with_focuspoint(width = nil, height = nil)
if self.respond_to? "resize_to_limit"
begin
x = model.focus_x || 0
y = -(model.focus_y || 0)
manipulate! do |img|
orig_w = img['width']
orig_h = img['height']
ratio = width.to_f / height
orig_ratio = orig_w.to_f / orig_h
x_offset = 0
y_offset = 0
w = orig_w
h = orig_h
if ratio < orig_ratio
w = orig_h * ratio
half_w = w / 2.0
half_orig_w = orig_w / 2.0
x_offset = x * half_orig_w
x_offset = (x <=> 0.0) * (half_orig_w - half_w) if x != 0 && x_offset.abs > half_orig_w - half_w
elsif ratio > orig_ratio
h = orig_w / ratio
half_h = h / 2.0
half_orig_h = orig_h / 2.0
y_offset = y * half_orig_h
y_offset = (y <=> 0.0) * (half_orig_h - half_h) if y != 0 && y_offset.abs > half_orig_h - half_h
end
img.combine_options do |op|
op.crop "#{w.to_i}x#{h.to_i}#{'%+d' % x_offset.round}#{'%+d' % y_offset.round}"
op.gravity 'Center'
end
img.resize("#{width}x#{height}")
img
end
rescue Exception => e
raise "Failed to crop - #{e.message}"
end
else
raise "Failed to crop #{attachment}. Add mini_magick."
end
end | ruby | {
"resource": ""
} |
q9003 | Breeze.Veur.report | train | def report(title, columns, rows)
table = capture_table([columns] + rows)
title = "=== #{title} "
title << "=" * [(table.split($/).max{|a,b| a.size <=> b.size }.size - title.size), 3].max
puts title
puts table
end | ruby | {
"resource": ""
} |
q9004 | Breeze.Veur.capture_table | train | def capture_table(table)
return 'none' if table.size == 1 # the first row is for column titles
$stdout = StringIO.new # start capturing the output
print_table(table.map{ |row| row.map(&:to_s) })
output = $stdout
$stdout = STDOUT # restore normal output
return output.string
end | ruby | {
"resource": ""
} |
q9005 | AndFeathers.Archive.to_io | train | def to_io(package_type, traversal = :each)
package_type.open do |package|
package.add_directory(@initial_version)
send(traversal) do |child|
case child
when File
package.add_file(child)
when Directory
package.add_directory(child)
end
end
end
end | ruby | {
"resource": ""
} |
q9006 | Truty.Conversion.czech_html | train | def czech_html(input)
coder = HTMLEntities.new
encoded = coder.encode(input, :named, :decimal)
czech_diacritics.each { |k, v| encoded.gsub!(k, v) }
encoded
end | ruby | {
"resource": ""
} |
q9007 | FreelingClient.Client.call | train | def call(text)
output = []
file = Tempfile.new('foo', encoding: 'utf-8')
begin
file.write(text)
file.close
stdin, stdout, stderr = Open3.popen3(command(file.path))
Timeout::timeout(@timeout) {
until (line = stdout.gets).nil?
output << line.chomp
end
message = stderr.readlines
unless message.empty?
raise ExtractionError, message.join("\n")
end
}
rescue Timeout::Error
raise ExtractionError, "Timeout"
ensure
file.close
file.unlink
end
output
end | ruby | {
"resource": ""
} |
q9008 | Cathode.Version.action? | train | def action?(resource, action)
resource = resource.to_sym
action = action.to_sym
return false unless resource?(resource)
_resources.find(resource).actions.names.include? action
end | ruby | {
"resource": ""
} |
q9009 | MARC.MARC4J.marc4j_to_rubymarc | train | def marc4j_to_rubymarc(marc4j)
rmarc = MARC::Record.new
rmarc.leader = marc4j.getLeader.marshal
marc4j.getControlFields.each do |marc4j_control|
rmarc.append( MARC::ControlField.new(marc4j_control.getTag(), marc4j_control.getData ) )
end
marc4j.getDataFields.each do |marc4j_data|
rdata = MARC::DataField.new( marc4j_data.getTag, marc4j_data.getIndicator1.chr, marc4j_data.getIndicator2.chr )
marc4j_data.getSubfields.each do |subfield|
# We assume Marc21, skip corrupted data
# if subfield.getCode is more than 255, subsequent .chr
# would raise.
if subfield.getCode > 255
if @logger
@logger.warn("Marc4JReader: Corrupted MARC data, record id #{marc4j.getControlNumber}, field #{marc4j_data.tag}, corrupt subfield code byte #{subfield.getCode}. Skipping subfield, but continuing with record.")
end
next
end
rsubfield = MARC::Subfield.new(subfield.getCode.chr, subfield.getData)
rdata.append rsubfield
end
rmarc.append rdata
end
return rmarc
end | ruby | {
"resource": ""
} |
q9010 | MARC.MARC4J.rubymarc_to_marc4j | train | def rubymarc_to_marc4j(rmarc)
marc4j = @factory.newRecord(rmarc.leader)
rmarc.each do |f|
if f.is_a? MARC::ControlField
new_field = @factory.newControlField(f.tag, f.value)
else
new_field = @factory.new_data_field(f.tag, f.indicator1.ord, f.indicator2.ord)
f.each do |sf|
new_field.add_subfield(@factory.new_subfield(sf.code.ord, sf.value))
end
end
marc4j.add_variable_field(new_field)
end
return marc4j
end | ruby | {
"resource": ""
} |
q9011 | MARC.MARC4J.require_marc4j_jar | train | def require_marc4j_jar(jardir)
unless defined? JRUBY_VERSION
raise LoadError.new, "MARC::MARC4J requires the use of JRuby", nil
end
if jardir
Dir.glob("#{jardir}/*.jar") do |x|
require x
end
else
Dir.glob(File.join(DEFAULT_JAR_RELATIVE_DIR, "*.jar")) do |x|
require x
end
end
end | ruby | {
"resource": ""
} |
q9012 | Temppath.Generator.mkdir | train | def mkdir(option={})
mode = option[:mode] || 0700
path = create(option)
path.mkdir(mode)
return path
end | ruby | {
"resource": ""
} |
q9013 | Temppath.Generator.touch | train | def touch(option={})
mode = option[:mode] || 0600
path = create(option)
path.open("w", mode)
return path
end | ruby | {
"resource": ""
} |
q9014 | VoterLove.Voter.up_vote | train | def up_vote(votable)
is_votable?(votable)
vote = get_vote(votable)
if vote
if vote.up_vote
raise Exceptions::AlreadyVotedError.new(true)
else
vote.up_vote = true
votable.down_votes -= 1
self.down_votes -= 1 if has_attribute?(:down_votes)
end
else
vote = Vote.create(:votable => votable, :voter => self, :up_vote => true)
end
votable.up_votes += 1
self.up_votes += 1 if has_attribute?(:up_votes)
Vote.transaction do
save
votable.save
vote.save
end
true
end | ruby | {
"resource": ""
} |
q9015 | VoterLove.Voter.up_vote! | train | def up_vote!(votable)
begin
up_vote(votable)
success = true
rescue Exceptions::AlreadyVotedError
success = false
end
success
end | ruby | {
"resource": ""
} |
q9016 | VoterLove.Voter.down_vote! | train | def down_vote!(votable)
begin
down_vote(votable)
success = true
rescue Exceptions::AlreadyVotedError
success = false
end
success
end | ruby | {
"resource": ""
} |
q9017 | VoterLove.Voter.up_voted? | train | def up_voted?(votable)
is_votable?(votable)
vote = get_vote(votable)
return false if vote.nil?
return true if vote.has_attribute?(:up_vote) && vote.up_vote
false
end | ruby | {
"resource": ""
} |
q9018 | Spirit.Manifest.check_types | train | def check_types(key='root', expected=TYPES, actual=self, opts={})
bad_type(key, expected, actual, opts) unless actual.is_a? expected.class
case actual
when Hash then actual.each { |k, v| check_types(k, expected[k], v) }
when Enumerable then actual.each { |v| check_types(key, expected.first, v, enum: true) }
end
end | ruby | {
"resource": ""
} |
q9019 | Cellula.WolframCodeRule.next_generation_cell | train | def next_generation_cell(left, middle, right)
case [left, middle, right]
when [1,1,1] then @binary_string[0].to_i
when [1,1,0] then @binary_string[1].to_i
when [1,0,1] then @binary_string[2].to_i
when [1,0,0] then @binary_string[3].to_i
when [0,1,1] then @binary_string[4].to_i
when [0,1,0] then @binary_string[5].to_i
when [0,0,1] then @binary_string[6].to_i
when [0,0,0] then @binary_string[7].to_i
end
end | ruby | {
"resource": ""
} |
q9020 | JiraIssues.JiraIssueMapper.call | train | def call(issue)
status = decode_status(issue)
{
key: issue.key,
type: issue.issuetype.name,
priority: issue.priority.name,
status: status,
#description: i.description,
summary: issue.summary,
created_date: issue.created,
closed_date: issue.resolutiondate
}
end | ruby | {
"resource": ""
} |
q9021 | AndFeathers.Directory.path | train | def path
if @parent
::File.join(@parent.path, name)
else
if name != '.'
::File.join('.', name)
else
name
end
end
end | ruby | {
"resource": ""
} |
q9022 | AndFeathers.Directory.| | train | def |(other)
if !other.is_a?(Directory)
raise ArgumentError, "#{other} is not a Directory"
end
dup.tap do |directory|
other.files.each do |file|
directory.add_file(file.dup)
end
other.directories.each do |new_directory|
existing_directory = @directories[new_directory.name]
if existing_directory.nil?
directory.add_directory(new_directory.dup)
else
directory.add_directory(new_directory | existing_directory)
end
end
end
end | ruby | {
"resource": ""
} |
q9023 | AndFeathers.Directory.each | train | def each(&block)
files.each(&block)
directories.each do |subdirectory|
block.call(subdirectory)
subdirectory.each(&block)
end
end | ruby | {
"resource": ""
} |
q9024 | Characterizable.BetterHash.slice | train | def slice(*keep)
inject(Characterizable::BetterHash.new) do |memo, ary|
if keep.include?(ary[0])
memo[ary[0]] = ary[1]
end
memo
end
end | ruby | {
"resource": ""
} |
q9025 | QueueToTheFuture.Job.method_missing | train | def method_missing(*args, &block)
Thread.pass until defined?(@result)
case @result
when Exception
def self.method_missing(*args, &block); raise @result; end
else
def self.method_missing(*args, &block); @result.send(*args, &block); end
end
self.method_missing(*args, &block)
end | ruby | {
"resource": ""
} |
q9026 | EnumerateBy.MacroMethods.enumerate_by | train | def enumerate_by(attribute = :name, options = {})
options.reverse_merge!(:cache => true)
options.assert_valid_keys(:cache)
extend EnumerateBy::ClassMethods
extend EnumerateBy::Bootstrapped
include EnumerateBy::InstanceMethods
# The attribute representing a record's enumerator
cattr_accessor :enumerator_attribute
self.enumerator_attribute = attribute
# Whether to perform caching of enumerators within finder queries
cattr_accessor :perform_enumerator_caching
self.perform_enumerator_caching = options[:cache]
# The cache store to use for queries (default is a memory store)
cattr_accessor :enumerator_cache_store
self.enumerator_cache_store = ActiveSupport::Cache::MemoryStore.new
validates_presence_of attribute
validates_uniqueness_of attribute
end | ruby | {
"resource": ""
} |
q9027 | EnumerateBy.ClassMethods.typecast_enumerator | train | def typecast_enumerator(enumerator)
if enumerator.is_a?(Array)
enumerator.flatten!
enumerator.map! {|value| typecast_enumerator(value)}
enumerator
else
enumerator.is_a?(Symbol) ? enumerator.to_s : enumerator
end
end | ruby | {
"resource": ""
} |
q9028 | Humpyard.Page.root_elements | train | def root_elements(yield_name = 'main')
# my own elements
ret = elements.where('container_id IS NULL and page_yield_name = ?', yield_name.to_s).order('position ASC')
# sibling shared elements
unless siblings.empty?
ret += Humpyard::Element.where('container_id IS NULL and page_id in (?) and page_yield_name = ? and shared_state = ?', siblings, yield_name.to_s, Humpyard::Element::SHARED_STATES[:shared_on_siblings]).order('position ASC')
end
# ancestors shared elements
unless ancestor_pages.empty?
ret += Humpyard::Element.where('container_id IS NULL and page_id in (?) and page_yield_name = ? and shared_state = ?', ancestor_pages, yield_name.to_s, Humpyard::Element::SHARED_STATES[:shared_on_children]).order('position ASC')
end
ret
end | ruby | {
"resource": ""
} |
q9029 | Humpyard.Page.child_pages | train | def child_pages options={}
if content_data.is_humpyard_dynamic_page?
content_data.child_pages
else
if options[:single_root] and is_root_page?
Page.where(["parent_id = ? or parent_id IS NULL and NOT id = ?", id, id])
else
children
end
end
end | ruby | {
"resource": ""
} |
q9030 | Humpyard.Page.last_modified | train | def last_modified options = {}
changed_at = [Time.zone.at(::File.new("#{Rails.root}").mtime), created_at, updated_at, modified_at]
if(options[:include_pages])
changed_at << Humpyard::Page.select('updated_at').order('updated_at DESC').first.updated_at
end
(changed_at - [nil]).max.utc
end | ruby | {
"resource": ""
} |
q9031 | Dragonfly.DropboxDataStore.url_for | train | def url_for(path, opts = {})
path = absolute(path)
(opts[:expires] ? storage.media(path) : storage.shares(path))['url']
end | ruby | {
"resource": ""
} |
q9032 | BoardGameGrid.Square.attribute_match? | train | def attribute_match?(attribute, value)
hash_obj_matcher = lambda do |obj, k, v|
value = obj.send(k)
if !value.nil? && v.is_a?(Hash)
v.all? { |k2,v2| hash_obj_matcher.call(value, k2, v2) }
else
value == v
end
end
hash_obj_matcher.call(self, attribute, value)
end | ruby | {
"resource": ""
} |
q9033 | Dio.ModuleBase.included | train | def included(base)
my_injector = injector
injector_holder = Module.new do
define_method :__dio_injector__ do
my_injector
end
end
base.extend(ClassMethods, injector_holder)
base.include(InstanceMethods)
end | ruby | {
"resource": ""
} |
q9034 | Smsified.OneAPI.send_sms | train | def send_sms(options)
raise ArgumentError, 'an options Hash is required' if !options.instance_of?(Hash)
raise ArgumentError, ':sender_address is required' if options[:sender_address].nil? && @sender_address.nil?
raise ArgumentError, ':address is required' if options[:address].nil?
raise ArgumentError, ':message is required' if options[:message].nil?
options[:sender_address] = options[:sender_address] || @sender_address
query_options = options.clone
query_options.delete(:sender_address)
query_options = camelcase_keys(query_options)
Response.new self.class.post("/smsmessaging/outbound/#{options[:sender_address]}/requests",
:body => build_query_string(query_options),
:basic_auth => @auth,
:headers => SMSIFIED_HTTP_HEADERS)
end | ruby | {
"resource": ""
} |
q9035 | Smsified.OneAPI.method_missing | train | def method_missing(method, *args)
if method.to_s.match /subscription/
if args.size == 2
@subscriptions.send method, args[0], args[1]
else
@subscriptions.send method, args[0]
end
else
if method == :delivery_status || method == :retrieve_sms || method == :search_sms
@reporting.send method, args[0]
else
raise RuntimeError, 'Unknown method'
end
end
end | ruby | {
"resource": ""
} |
q9036 | Aker::Authorities.AutomaticAccess.amplify! | train | def amplify!(user)
user.portals << @portal unless user.portals.include?(@portal)
user.default_portal = @portal unless user.default_portal
user
end | ruby | {
"resource": ""
} |
q9037 | SimpleMetarParser.Metar.decode | train | def decode
self.raw_splits.each do |split|
self.modules.each do |m|
m.decode_split(split)
end
end
end | ruby | {
"resource": ""
} |
q9038 | Domain.Factory.sbyc | train | def sbyc(super_domain = Object, &pred)
Class.new(super_domain){ extend SByC.new(super_domain, pred) }
end | ruby | {
"resource": ""
} |
q9039 | Wireless.SynchronizedStore.get_or_create | train | def get_or_create(key)
@lock.synchronize do
if @store.include?(key)
@store[key]
elsif block_given?
@store[key] = yield
else
# XXX don't expose the receiver as this class is an internal
# implementation detail
raise Wireless::KeyError.new(
"#{@type} not found: #{key}",
key: key
)
end
end
end | ruby | {
"resource": ""
} |
q9040 | UnlockGateway.Controller.transition_state | train | def transition_state(state)
authorize @contribution
@initiative = @contribution.initiative
@user = @contribution.user
state = state.to_sym
transition = @contribution.transition_by_state(state)
initial_state = @contribution.state_name
resource_name = @contribution.class.model_name.human
if @contribution.send("can_#{transition}?")
begin
if @contribution.state_on_gateway != state
if @contribution.update_state_on_gateway!(state)
@contribution.send("#{transition}!")
else
flash[:alert] = t('flash.actions.update.alert', resource_name: resource_name)
end
else
@contribution.send("#{transition}!")
end
rescue
flash[:alert] = t('flash.actions.update.alert', resource_name: resource_name)
end
else
flash[:alert] = t('flash.actions.update.alert', resource_name: resource_name)
end
if flash[:alert].present?
render 'initiatives/contributions/show'
else
if initial_state == :pending
flash[:notice] = t('flash.actions.create.notice', resource_name: resource_name)
else
flash[:notice] = t('flash.actions.update.notice', resource_name: resource_name)
end
redirect_to initiative_contribution_path(@contribution.initiative.id, @contribution)
end
end | ruby | {
"resource": ""
} |
q9041 | Machined.CLI.rack_options | train | def rack_options # :nodoc:
symbolized_options(:port, :host, :server, :daemonize, :pid).tap do |rack_options|
rack_options[:environment] = environment
rack_options[:Port] = rack_options.delete :port
rack_options[:Host] = rack_options.delete :host
rack_options[:app] = machined
end
end | ruby | {
"resource": ""
} |
q9042 | Machined.CLI.symbolized_options | train | def symbolized_options(*keys) # :nodoc:
@symbolized_options ||= begin
opts = {}.merge(options)
opts.merge! saved_options if saved_options?
opts.symbolize_keys
end
@symbolized_options.slice(*keys)
end | ruby | {
"resource": ""
} |
q9043 | LookUpTable.ClassMethods.look_up_table | train | def look_up_table(lut_key, options = {}, &block)
options = {
:batch_size => 10000,
:prefix => "#{self.name}/",
:read_on_init => false,
:use_cache => true,
:sql_mode => true,
:where => nil
}.merge(options)
self.lut_set_proc(lut_key, block)
self.lut_set_options(lut_key, options)
self.lut(lut_key) if options[:read_on_init]
end | ruby | {
"resource": ""
} |
q9044 | LookUpTable.ClassMethods.lut | train | def lut(lut_key = nil, lut_item_key = nil)
@lut ||= {}
if lut_key.nil?
hash = {}
self.lut_keys.each { |key| hash[key] = self.lut(key) } # CHECK: use .inject?
return hash
end
@lut[lut_key.intern] ||= lut_read(lut_key) || {} if lut_key.respond_to?(:intern)
self.lut_deep_hash_call(:lut, @lut, lut_key, lut_item_key)
end | ruby | {
"resource": ""
} |
q9045 | LookUpTable.ClassMethods.lut_reload | train | def lut_reload(lut_key = nil)
if lut_key
lut_reset(lut_key)
lut(lut_key)
else
lut_keys.each { |k| lut_reload(k) }
end
lut_keys
end | ruby | {
"resource": ""
} |
q9046 | LookUpTable.ClassMethods.lut_read | train | def lut_read(name)
return nil unless options = lut_options(name)# HACK
if options[:use_cache]
lut_read_from_cache(name)
else
lut_read_without_cache(name)
end
end | ruby | {
"resource": ""
} |
q9047 | ActiveHarmony.SynchronizerConfiguration.synchronizable_for_types | train | def synchronizable_for_types(types)
@synchronizable_fields.select do |field_description|
types.include?(field_description[:type])
end.collect do |field_description|
field_description[:field]
end
end | ruby | {
"resource": ""
} |
q9048 | Measurement.Base.to_s | train | def to_s(unit = nil, precision = 0)
if unit.to_s =~ /_and_/
units = unit.to_s.split('_and_').map do |unit|
self.class.fetch_scale(unit)
end
UnitGroup.new(units).format(@amount, precision)
else
unit = self.class.fetch_scale(unit)
unit.format(@amount, precision)
end
end | ruby | {
"resource": ""
} |
q9049 | MatchyMatchy.MatchList.<< | train | def <<(match)
if include?(match)
match.reject!
else
@matches << match
@matches.sort!
@matches.pop.reject! if @matches.size > @capacity
end
self
end | ruby | {
"resource": ""
} |
q9050 | MethodInfo.AncestorMethodStructure.method_owner | train | def method_owner(method_symbol)
# Under normal circumstances just calling @object.method(method_symbol) would work,
# but this will go wrong if the object has redefined the method method.
method = Object.instance_method(:method).bind(@object).call(method_symbol)
method.owner
rescue NameError
poor_mans_method_owner(method, method_symbol.to_s)
end | ruby | {
"resource": ""
} |
q9051 | PhpFpmDocker.Launcher.parse_config | train | def parse_config # rubocop:disable MethodLength
# Test for file usability
fail "Config file '#{@config_path}' not found"\
unless @config_path.file?
fail "Config file '#{@config_path}' not readable"\
unless @config_path.readable?
@ini_file = IniFile.load(@config_path)
begin
docker_image = @ini_file[:main]['docker_image']
@docker_image = Docker::Image.get(docker_image)
@logger.info(to_s) do
"Docker image id=#{@docker_image.id[0..11]} name=#{docker_image}"
end
rescue NoMethodError
raise 'No docker_image in section main in config found'
rescue Docker::Error::NotFoundError
raise "Docker_image '#{docker_image}' not found"
rescue Excon::Errors::SocketError => e
raise "Docker connection could not be established: #{e.message}"
end
end | ruby | {
"resource": ""
} |
q9052 | PhpFpmDocker.Launcher.pools_config_content_from_file | train | def pools_config_content_from_file(config_path)
ini_file = IniFile.load(config_path)
ret_val = []
ini_file.each_section do |section|
ret_val << [section, ini_file[section]]
end
ret_val
end | ruby | {
"resource": ""
} |
q9053 | PhpFpmDocker.Launcher.pools_config_contents | train | def pools_config_contents
ret_val = []
# Loop over
Dir[@pools_directory.join('*.conf').to_s].each do |config_path|
ret_val += pools_config_content_from_file(config_path)
end
ret_val
end | ruby | {
"resource": ""
} |
q9054 | PhpFpmDocker.Launcher.pools_from_config | train | def pools_from_config
configs = {}
pools_config_contents.each do |section|
# Hash section name and content
d = Digest::SHA2.new(256)
hash = d.reset.update(section[0]).update(section[1].to_s).to_s
configs[hash] = {
name: section[0],
config: section[1]
}
end
configs
end | ruby | {
"resource": ""
} |
q9055 | Tkar.Primitives.polybox | train | def polybox args, key_args
dx, dy = args
# return a proc to make the info needed to instantiate/update
proc do |tkaroid, cos_r, sin_r|
x = tkaroid.x
y = tkaroid.y
params = tkaroid.params
ex = dx[params] rescue dx
ey = dy[params] rescue dy
points =
[ [ ex, ey],
[ ex, -ey],
[-ex, -ey],
[-ex, ey] ]
coords = []
points.each do |xv, yv|
coords << x + xv * cos_r - yv * sin_r
coords << y + xv * sin_r + yv * cos_r
end
## possible to skip below if no changes?
config = {}
handle_generic_config(config, params, key_args)
[TkcPolygon, coords, config]
end
end | ruby | {
"resource": ""
} |
q9056 | Redlander.ModelProxy.delete_all | train | def delete_all(pattern = {})
result = true
each(pattern) { |st| result &&= delete(st) }
result
end | ruby | {
"resource": ""
} |
q9057 | Redlander.ModelProxy.find | train | def find(scope, pattern = {})
case scope
when :first
each(pattern).first
when :all
each(pattern).to_a
else
raise RedlandError, "Invalid search scope '#{scope}' specified."
end
end | ruby | {
"resource": ""
} |
q9058 | CodeCache.Repo.location_in_cache | train | def location_in_cache( revision = nil )
begin
elements = [cache, repo_type, split_url, revision].flatten.compact.collect { |i| i.to_s }
File.join( elements )
rescue => e
raise CacheCalculationError.new(e.msg + e.backtrace.to_s)
end
end | ruby | {
"resource": ""
} |
q9059 | Mango.ContentPage.method_missing | train | def method_missing(method_name, *args, &block)
key = method_name.to_s
attributes.has_key?(key) ? attributes[key] : super
end | ruby | {
"resource": ""
} |
q9060 | Kawaii.ContentManager.load_image | train | def load_image(path, tileable = false)
if !@images[path]
@images[path] = Gosu::Image.new(@window, "#{@root}/#{path}", tileable)
end
@images[path]
end | ruby | {
"resource": ""
} |
q9061 | MustacheRender.Mustache.partial | train | def partial(name)
name = self.class.generate_template_name name, config.file_template_extension
# return self.read_template_from_media name, media
@_cached_partials ||= {}
(@_cached_partials[media] ||= {})[name] ||= self.read_template_from_media name, media
end | ruby | {
"resource": ""
} |
q9062 | HasPrice.HasPrice.has_price | train | def has_price(options = {}, &block)
attribute = options[:attribute] || :price
free = !block_given? && options[:free]
define_method attribute.to_sym do
builder = PriceBuilder.new self
builder.instance_eval &block unless free
builder.price
end
end | ruby | {
"resource": ""
} |
q9063 | Octo.Scheduler.schedule_counters | train | def schedule_counters
counter_classes = [
Octo::ProductHit,
Octo::CategoryHit,
Octo::TagHit,
Octo::ApiHit,
Octo::NewsfeedHit
]
counter_classes.each do |clazz|
clazz.send(:get_typecounters).each do |counter|
name = [clazz, counter].join('::')
config = {
class: clazz.to_s,
args: [counter],
cron: '* * * * *',
persist: true,
queue: 'high'
}
Resque.set_schedule name, config
end
end
# Schedules the processing of baselines
def schedule_baseline
baseline_classes = [
Octo::ProductBaseline,
Octo::CategoryBaseline,
Octo::TagBaseline
]
baseline_classes.each do |clazz|
clazz.send(:get_typecounters).each do |counter|
name = [clazz, counter].join('::')
config = {
class: clazz.to_s,
args: [counter],
cron: '* * * * *',
persists: true,
queue: 'baseline_processing'
}
Resque.set_schedule name, config
end
end
end
# Schedules the daily mail, to be sent at noon
def schedule_subscribermail
name = 'SubscriberDailyMailer'
config = {
class: Octo::Mailer::SubscriberMailer,
args: [],
cron: '0 0 * * *',
persist: true,
queue: 'subscriber_notifier'
}
Resque.set_schedule name, config
end
end | ruby | {
"resource": ""
} |
q9064 | Hosties.HasAttributes.have_attributes | train | def have_attributes(attr, *more)
sum = (more << attr)
sum.each do |name|
raise ArgumentError, "Reserved attribute name #{name}" if @verbotten.include?(name)
end
@attributes += sum
end | ruby | {
"resource": ""
} |
q9065 | Hosties.HasAttributes.where | train | def where(name)
# Must define the attributes before constraining them
raise ArgumentError, "Unknown attribute: #{name}" unless @attributes.include? name
@constraints[name] = AttributeConstraint.new(name)
end | ruby | {
"resource": ""
} |
q9066 | Hosties.HasAttributes.valid? | train | def valid?(name, value)
if @constraints.include? name then
constraints[name].possible_vals.include? value
else true end
end | ruby | {
"resource": ""
} |
q9067 | XBeeRuby.XBee.open | train | def open
@serial ||= SerialPort.new @port, @rate
@serial_input = Enumerator.new { |y| loop do
y.yield @serial.readbyte
end }
@connected = true
end | ruby | {
"resource": ""
} |
q9068 | UseCaseValidations.ClassMethods.inherited | train | def inherited(base)
dup = _validators.dup
base._validators = dup.each { |k, v| dup[k] = v.dup }
super
end | ruby | {
"resource": ""
} |
q9069 | MaRuKu.Section.numerate | train | def numerate(a=[])
self.section_number = a
section_children.each_with_index do |c,i|
c.numerate(a.clone.push(i+1))
end
if h = self.header_element
h.attributes[:section_number] = self.section_number
end
end | ruby | {
"resource": ""
} |
q9070 | ActsAsFeatured.ClassMethods.acts_as_featured | train | def acts_as_featured(attribute, options = {})
cattr_accessor :featured_attribute
cattr_accessor :featured_attribute_scope
self.featured_attribute = attribute
self.featured_attribute_scope = options[:scope] || false
if scope_name = options[:create_scope]
scope_name = attribute if scope_name == true
scope scope_name, -> { where(attribute => true).limit(1) }
end
before_save :remove_featured_from_other_records
after_save :add_featured_to_first_record
before_destroy :add_featured_to_first_record_if_featured
end | ruby | {
"resource": ""
} |
q9071 | TodoLint.Judge.make_charge | train | def make_charge
if !todo.annotated?
"Missing due date annotation"
elsif todo.due_date.overdue? && todo.tag?
"Overdue due date #{todo.due_date.to_date} via tag"
elsif todo.due_date.overdue?
"Overdue due date"
end
end | ruby | {
"resource": ""
} |
q9072 | XivelyConnector.Datastream.<< | train | def <<(measurement)
# Make sure the value provided is a datapoint
datapoint = cast_to_datapoint(measurement)
# If only_save_changes is true, ignore datapoints whose value is the same as the current value
if only_save_changes and BigDecimal.new(datapoint.value) == BigDecimal.new(current_value)
@logger.debug "Ignoring datapoint from #{datapoint.at} because value did not change from #{current_value}"
else
@current_value = datapoint.value
datapoints << datapoint
@logger.debug "Queuing datapoint from #{datapoint.at} with value #{current_value}"
end
# See if the buffer is full
check_datapoints_buffer
end | ruby | {
"resource": ""
} |
q9073 | XivelyConnector.Datastream.cast_to_datapoint | train | def cast_to_datapoint(measurement, at=Time.now())
@logger.debug "cast_to_datapoint(#{measurement.inspect})"
if measurement.is_a?(Xively::Datapoint)
return measurement
elsif measurement.is_a?(Hash)
raise "The datapoint hash does not contain :at" unless measurement[:at]
raise "The datapoint hash does not contain :value" unless measurement[:value]
return Xively::Datapoint.new(measurement)
else
return Xively::Datapoint.new(:at => at, :value => measurement.to_s)
end
end | ruby | {
"resource": ""
} |
q9074 | XivelyConnector.Datastream.save_datapoints | train | def save_datapoints
@logger.debug "Saving #{datapoints.size} datapoints to the #{id} datastream"
response = XivelyConnector.connection.post("/v2/feeds/#{device.id}/datastreams/#{id}/datapoints",
:body => {:datapoints => datapoints}.to_json)
# If the response succeeded, clear the datapoint buffer and return the response object
if response.success?
clear_datapoints_buffer
response
else
logger.error response.response
raise response.response
end
end | ruby | {
"resource": ""
} |
q9075 | Coach4rb.Client.put | train | def put(url, payload, options={}, &block)
http_options = options.merge(@basic_options)
if block_given?
RestClient.put(url, payload, http_options, &block)
else
RestClient.put(url, payload, http_options)
end
end | ruby | {
"resource": ""
} |
q9076 | Coach4rb.Client.delete | train | def delete(url, options={}, &block)
http_options = options.merge(@basic_options)
if block_given?
RestClient.delete(url, http_options, &block)
else
RestClient.delete(url, http_options)
end
end | ruby | {
"resource": ""
} |
q9077 | ObjectAttorney.ClassMethods.inherited | train | def inherited(base)
base.allegations = allegations.clone
base.defendant_options = defendant_options.clone
super
end | ruby | {
"resource": ""
} |
q9078 | Rexport.ExportMethods.to_s | train | def to_s
String.new.tap do |result|
result << header * '|' << "\n"
records.each do |record|
result << record * '|' << "\n"
end
end
end | ruby | {
"resource": ""
} |
q9079 | Rexport.ExportMethods.to_csv | train | def to_csv(objects = nil)
seed_records(objects) unless objects.nil?
CSV.generate do |csv|
csv << header
records.each do |record|
csv << record
end
end
end | ruby | {
"resource": ""
} |
q9080 | Rexport.ExportMethods.get_klass_from_path | train | def get_klass_from_path(path, klass = export_model)
return klass unless (association_name = path.shift)
get_klass_from_path(path, klass.reflect_on_association(association_name.to_sym).klass)
end | ruby | {
"resource": ""
} |
q9081 | Diffable.InstanceMethods.diff | train | def diff(other)
check_class_compatibility(self, other)
self_attribs = self.get_attributes(self.class.excluded_fields)
other_attribs = other.get_attributes(other.class.excluded_fields)
change = compare_objects(self_attribs, other_attribs, self, other)
#the last bit - no change, no report; simples
if other.class.conditional_fields
other.class.conditional_fields.each do |key|
change[key.to_sym] = eval("other.#{key}") unless change.empty?
end
end
change
end | ruby | {
"resource": ""
} |
q9082 | Diffable.InstanceMethods.get_attributes | train | def get_attributes(excluded)
attribs = attributes.dup
attribs.delete_if { |key, value|
(!excluded.nil? and excluded.include?(key)) or key == "id" }
end | ruby | {
"resource": ""
} |
q9083 | Diffable.InstanceMethods.reflected_names | train | def reflected_names(obj)
classes = obj.reflections
class_names = []
classes.each do |key, cl|
if eval(cl.class_name).respond_to?("diffable") \
and cl.association_class != ActiveRecord::Associations::BelongsToAssociation
class_names << key
end
end
class_names
end | ruby | {
"resource": ""
} |
q9084 | PayuLatam.SubscriptionService.plan | train | def plan
# si el usuario no tiene plan_id en su modelo, se le asigna el plan_id seleccionado en el formulario
# recordar que ese plan_id llega en los params del contexto y por tanto tenemos acceso a el
# como variable de clase @plan_id
if @current_user.plan_id.nil?
if @plan_id.nil?
raise StandardError, 'Error creando plan, plan_id null'
end
# se almacena en el user
@current_user.update_attribute(:plan_id, @plan_id)
# despues de tenerlo almacenado en la bd, llamamos nuevamente este metodo plan
plan
else
# el usuario tiene un plan_id asignado, se le actualiza el plan_id en caso de que haya seleccionado uno
# diferente al que tiene actualmente
@current_user.update_attribute(:plan_id, @plan_id)
# obtener informacion del plan de la BD
plan_db = @current_user.plan
#
# NOTA: los planes deben tener un plan_code es OBLIGATORIO para el buen funcionamiento
#
if plan_db.plan_code.nil? || plan_db.plan_code.empty?
raise StandardError, 'Error creando plan, code null'
end
# con el plan_code lo buscamos en payu
plan_payu = PayuLatam::Plan.new(plan_db.plan_code)
# si existe?
if plan_payu.success?
# llenar la variable plan con la instancia de clase PayuLatam:Plan
@plan = plan_payu
else
# si no existe en pyu, crearlo con el metodo del modelo plan
plan_db.create_payu_plan
# llamado recursivo
plan
end
end
end | ruby | {
"resource": ""
} |
q9085 | PayuLatam.SubscriptionService.create_card | train | def create_card
raise StandardError, 'Cliente null' if @client.nil?
# la instancia de card recibe como parametro el @client al que se le va asociar la tarjeta
card = PayuLatam::Card.new(@client)
# hay un metodo card_params que genera el objeto a enviar con los datos correctos
# se asignan los params correctos para la peticion
card.params.merge! card_params
# intento de creacion de tarjeta
card.create!
# si todo bien
if card.success?
# se llena la variable @card con la instancia de la clase PayuLatam::Card
@card = card
# no me acuerdo XD
@client.remove_cards
# se agrega la tarjeta al array de tarjetas del usuario. Ojo este array esta en memoria no necesariamente
# es el mismo array de payu
@client.add_card( card.response )
# la respuesta de creacion de payu solo incluye el token de la tarjeta, entonces
# volvemos a consultar la info almacenada en payu para recibier un poco mas de detalle y almacenarlo
_card = card.load(card.response['token'])
# se crea un registro de payu_card con la info publica de la tarjeta y el token de payu de la tarjeta
@current_user.payu_cards.create(token: @card.response['token'], last_4: _card['number'], brand: _card['type'])
else
raise StandardError, "Error generando token de tarjeta: #{card.error}"
end
end | ruby | {
"resource": ""
} |
q9086 | PayuLatam.SubscriptionService.find_card | train | def find_card
@client.remove_cards
card = PayuLatam::Card.new(@client) # info de payu
card.load( PayuCard.find(@selected_card).token )
@client.add_card({token: card.resource['token']})
end | ruby | {
"resource": ""
} |
q9087 | DataMapper.Paginator.limit | train | def limit options = {}
# Remove this key if we come from limit_page method.
page = options.delete :page
query = options.dup
collection = new_collection scoped_query( options = {
:limit => options[:limit],
:offset => options[:offset],
:order => [options[:order]]
}.merge( query ) )
options.merge! :count => calculate_total_records( query ), :page => page
collection.paginator = DataMapper::Paginator::Main.new options
collection
end | ruby | {
"resource": ""
} |
q9088 | DataMapper.Paginator.limit_page | train | def limit_page page = nil, options = {}
if page.is_a?( Hash )
options = page
else
options[:page] = page.to_i
end
options[:page] = options[:page].to_i > 0 ? options[:page] : DataMapper::Paginator.default[:page]
options[:limit] = options[:limit].to_i || DataMapper::Paginator.default[:limit]
options[:offset] = options[:limit] * ( options[:page] - 1 )
options[:order] = options[:order] || DataMapper::Paginator.default[:order]
limit options
end | ruby | {
"resource": ""
} |
q9089 | DataMapper.Paginator.calculate_total_records | train | def calculate_total_records query
# Remove those keys from the query
query.delete :page
query.delete :limit
query.delete :offset
collection = new_collection scoped_query( query )
collection.count.to_i
end | ruby | {
"resource": ""
} |
q9090 | NetworkExecutive.ScheduledProgram.+ | train | def +( other_program )
raise ArgumentError if @program.class != other_program.class
additional_duration = other_program.duration + 1
program.duration += additional_duration
occurrence.duration += additional_duration
occurrence.end_time += additional_duration
self
end | ruby | {
"resource": ""
} |
q9091 | Kharon.Processor.before_all | train | def before_all(key, options)
required(key) if (options.has_key?(:required) and options[:required] == true)
if options.has_key?(:dependencies)
dependencies(key, options[:dependencies])
elsif options.has_key?(:dependency)
dependency(key, options[:dependency])
end
end | ruby | {
"resource": ""
} |
q9092 | Kharon.Processor.store | train | def store(key, process, options = {})
unless (options.has_key?(:extract) and options[:extract] == false)
if validator.datas.has_key?(key)
value = ((options.has_key?(:cast) and options[:cast] == false) ? validator.datas[key] : process.call(validator.datas[key]))
if(options.has_key?(:in))
in_array?(key, options[:in])
elsif(options.has_key?(:equals))
equals_to?(key, options[:equals])
elsif(options.has_key?(:equals_key))
equals_key?(key, options[:equals_key])
end
options.has_key?(:rename) ? (validator.filtered[options[:rename]] = value) : (validator.filtered[key] = value)
end
end
end | ruby | {
"resource": ""
} |
q9093 | Kharon.Processor.raise_type_error | train | def raise_type_error(key, type)
raise_error(type: "type", key: key, supposed: type, found: key.class)
end | ruby | {
"resource": ""
} |
q9094 | Kharon.Processor.required | train | def required(key)
raise_error(type: "required", key: key) unless validator.datas.has_key?(key)
end | ruby | {
"resource": ""
} |
q9095 | Kharon.Processor.dependency | train | def dependency(key, dependency)
raise_error(type: "dependency", key: "key", needed: dependency) unless validator.datas.has_key?(dependency)
end | ruby | {
"resource": ""
} |
q9096 | Kharon.Processor.is_typed? | train | def is_typed?(key, type)
return (!validator.datas.has_key?(key) or validator.datas[key].kind_of?(type))
end | ruby | {
"resource": ""
} |
q9097 | Kharon.Processor.in_array? | train | def in_array?(key, values)
raise_error(type: "array.in", key: key, supposed: values, value: validator.datas[key]) unless (values.empty? or values.include?(validator.datas[key]))
end | ruby | {
"resource": ""
} |
q9098 | Kharon.Processor.equals_to? | train | def equals_to?(key, value)
raise_error(type: "equals", key: key, supposed: value, found: validator.datas[key]) unless validator.datas[key] == value
end | ruby | {
"resource": ""
} |
q9099 | Kharon.Processor.match? | train | def match?(key, regex)
return (!validator.datas.has_key?(key) or validator.datas[key].to_s.match(regex))
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.