_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q10100 | ProjectEulerCli.Scraper.load_page | train | def load_page(page)
return if Page.visited.include?(page)
html = open("https://projecteuler.net/archives;page=#{page}")
fragment = Nokogiri::HTML(html)
problem_links = fragment.css('#problems_table td a')
i = (page - 1) * Page::LENGTH + 1
problem_links.each do |link|
Problem[i].title = link.text
i += 1
end
Page.visited << page
end | ruby | {
"resource": ""
} |
q10101 | ProjectEulerCli.Scraper.load_problem_details | train | def load_problem_details(id)
return unless Problem[id].published.nil?
html = open("https://projecteuler.net/problem=#{id}")
fragment = Nokogiri::HTML(html)
problem_info = fragment.css('div#problem_info span span')
details = problem_info.text.split(';')
Problem[id].published = details[0].strip
Problem[id].solved_by = details[1].strip
# recent problems do not have a difficult rating
Problem[id].difficulty = details[2].strip if id <= Problem.total - 10
end | ruby | {
"resource": ""
} |
q10102 | Authpwn.SessionMailer.email_verification_email | train | def email_verification_email(token, root_url)
@token = token
@protocol, @host = *root_url.split('://', 2)
@host.slice!(-1) if @host[-1] == ?/
hostname = @host.split(':', 2).first # Strip out any port.
mail to: @token.email,
subject: email_verification_subject(token, hostname, @protocol),
from: email_verification_from(token, hostname, @protocol)
end | ruby | {
"resource": ""
} |
q10103 | Authpwn.SessionMailer.reset_password_email | train | def reset_password_email(email, token, root_url)
@email, @token, @host, @protocol = email, token
@token = token
@protocol, @host = *root_url.split('://', 2)
@host.slice!(-1) if @host[-1] == ?/
hostname = @host.split(':', 2).first # Strip out any port.
mail to: email, from: reset_password_from(token, hostname, @protocol),
subject: reset_password_subject(token, hostname, @protocol)
end | ruby | {
"resource": ""
} |
q10104 | DescendantsLoader.ClassMethods.load_self_descendants | train | def load_self_descendants
file = ClassFinder.where_is(self)
path = File.expand_path(File.dirname(file))
Dir["#{path}/**/*.rb"].each { |f| load f }
end | ruby | {
"resource": ""
} |
q10105 | Renegade.CommitMessage.check_commit_message_length | train | def check_commit_message_length(message)
check_label = 'Commit message length'
if message.length >= @min_length && message.length <= @max_length
Status.report(check_label, true)
else
@errors.push "Commit messages should be between #{@min_length} "\
"and #{@max_length} characters."
Status.report(check_label, false)
end
end | ruby | {
"resource": ""
} |
q10106 | Disqussion.Users.follow | train | def follow(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
target = args.first
merge_target_into_options!(target, options)
response = post('users/follow', options)
end | ruby | {
"resource": ""
} |
q10107 | Codependency.Parser.parse | train | def parse( file )
pattern = PATTERNS[ File.extname( file ) ]
IO.readlines( file ).take_while do |line|
line =~ pattern
end.map { |line| line[ pattern, 1 ] }
end | ruby | {
"resource": ""
} |
q10108 | Octo.Counter.increment_for | train | def increment_for(obj)
# decide the time of event asap
ts = Time.now.ceil.to_i
if obj.class.ancestors.include?Cequel::Record
args = obj.key_attributes.collect { |k,v| v.to_s }
cache_key = generate_key(ts, obj.class.name, *args)
val = Cequel::Record.redis.get(cache_key)
if val.nil?
val = 1
else
val = val.to_i + 1
end
ttl = (time_window + 1) * 60
# Update a sharded counter
Cequel::Record.redis.setex(cache_key, ttl, val)
# Optionally, update the index
index_key = generate_index_key(ts, obj.class.name, *args)
index_present = Cequel::Record.redis.get(index_key).try(:to_i)
if index_present != 1
Cequel::Record.redis.setex(index_key, ttl, 1)
end
end
end | ruby | {
"resource": ""
} |
q10109 | Octo.Counter.aggregate | train | def aggregate(ts = Time.now.floor)
ts = ts.to_i
aggr = {}
# Find all counters from the index
index_key = generate_index_key(ts, '*')
counters = Cequel::Record.redis.keys(index_key)
counters.each do |cnt|
_tmp = cnt.split(SEPARATOR)
_ts = _tmp[2].to_i
aggr[_ts] = {} unless aggr.has_key?(_ts)
clazz = _tmp[3]
_clazz = clazz.constantize
_attrs = _tmp[4.._tmp.length]
args = {}
_clazz.key_column_names.each_with_index do |k, i|
args[k] = _attrs[i]
end
obj = _clazz.public_send(:get_cached, args)
# construct the keys for all counters matching this patter
_attrs << '*'
counters_search_key = generate_key_prefix(_ts, clazz, _attrs)
counter_keys = Cequel::Record.redis.keys(counters_search_key)
counter_keys.each do |c_key|
val = Cequel::Record.redis.get(c_key)
if val
aggr[_ts][obj] = aggr[_ts].fetch(obj, 0) + val.to_i
else
aggr[_ts][obj] = aggr[_ts].fetch(obj, 0) + 1
end
end
end
aggr
end | ruby | {
"resource": ""
} |
q10110 | Taxonifi.Model::Name.rank= | train | def rank=(value)
r = value.to_s.downcase.strip
if !RANKS.include?(r)
raise NameError, "#{r} is not a valid rank."
end
@rank = r
end | ruby | {
"resource": ""
} |
q10111 | Taxonifi.Model::IcznName.name= | train | def name=(name)
case @rank
when 'superfamily'
raise NameError, "ICZN superfamily name does not end in 'oidae'." if name[-5,5] != 'oidae'
when 'family'
raise NameError, "ICZN family name does not end in 'idae'." if name[-4,4] != 'idae'
when 'subfamily'
raise NameError, "ICZN subfamily name does not end in 'inae'." if name[-4,4] != 'inae'
when 'tribe'
raise NameError, "ICZN tribe name does not end in 'ini'." if name[-3,3] != 'ini'
when 'subtribe'
raise NameError, "ICZN subtribe name does not end in 'ina'." if name[-3,3] != 'ina'
end
@name = name
end | ruby | {
"resource": ""
} |
q10112 | ActiveRecord.Transactions.rollback_active_record_state! | train | def rollback_active_record_state!
remember_transaction_record_state
yield
rescue Exception
IdentityMap.remove(self) if IdentityMap.enabled?
restore_transaction_record_state
raise
ensure
clear_transaction_record_state
end | ruby | {
"resource": ""
} |
q10113 | ActiveRecord.Transactions.rolledback! | train | def rolledback!(force_restore_state = false) #:nodoc:
run_callbacks :rollback
ensure
IdentityMap.remove(self) if IdentityMap.enabled?
restore_transaction_record_state(force_restore_state)
end | ruby | {
"resource": ""
} |
q10114 | GemFootprintAnalyzer.ChildProcess.start_child | train | def start_child
@child_thread ||= Thread.new do # rubocop:disable Naming/MemoizedInstanceVariableName
Open3.popen3(child_env_vars, *ruby_command, context_file) do |_, stdout, stderr|
@pid = stdout.gets.strip.to_i
while (line = stderr.gets)
print "!! #{line}"
end
end
end
end | ruby | {
"resource": ""
} |
q10115 | RailsIdentity.UsersController.create | train | def create
logger.debug("Create new user")
@user = User.new(user_params)
if @user.save
# Save succeeded. Render the response based on the created user.
render json: @user,
except: [:verification_token, :reset_token, :password_digest],
status: 201
# Then, issue the verification token and send the email for
# verification.
@user.issue_token(:verification_token)
@user.save
user_mailer.email_verification(@user).deliver_later
else
render_errors 400, @user.errors.full_messages
end
end | ruby | {
"resource": ""
} |
q10116 | RailsIdentity.UsersController.update_user | train | def update_user(update_user_params)
if @user.update_attributes(update_user_params)
render json: @user, except: [:password_digest]
else
render_errors 400, @user.errors.full_messages
end
end | ruby | {
"resource": ""
} |
q10117 | RailsIdentity.UsersController.update_token | train | def update_token(kind)
@user.issue_token(kind)
@user.save
if kind == :reset_token
user_mailer.password_reset(@user).deliver_later
else
user_mailer.email_verification(@user).deliver_later
end
render body: '', status: 204
end | ruby | {
"resource": ""
} |
q10118 | FormatOutput.BulletPointBuilder.add | train | def add(bullet, *items)
items.each do |item|
@bullet_data << [bullet.to_s, item]
bullet = ""
end
end | ruby | {
"resource": ""
} |
q10119 | FormatOutput.BulletPointBuilder.render | train | def render
@key_length, results = get_key_length, []
@bullet_data.each do |key, item|
results.concat(render_bullet(key, item))
end
@bullet_data = []
results
end | ruby | {
"resource": ""
} |
q10120 | StorageRoom.Model.create | train | def create
return false unless new_record?
run_callbacks :save do
run_callbacks :create do
httparty = self.class.post(self.class.index_path, request_options.merge(:body => to_json))
handle_save_response(httparty)
end
end
end | ruby | {
"resource": ""
} |
q10121 | StorageRoom.Model.update | train | def update
return false if new_record?
run_callbacks :save do
run_callbacks :update do
httparty = self.class.put(self[:@url], request_options.merge(:body => to_json))
handle_save_response(httparty)
end
end
end | ruby | {
"resource": ""
} |
q10122 | StorageRoom.Model.destroy | train | def destroy
return false if new_record?
run_callbacks :destroy do
httparty = self.class.delete(self[:@url], request_options)
self.class.handle_critical_response_errors(httparty)
end
true
end | ruby | {
"resource": ""
} |
q10123 | StorageRoom.Model.to_hash | train | def to_hash(args = {}) # :nodoc:
args ||= {}
if args[:nested]
{'url' => self[:@url] || self[:url]}
else
hash = super
hash.merge!('@version' => self['@version']) unless new_record?
{self.class.json_name => hash}
end
end | ruby | {
"resource": ""
} |
q10124 | ActiveHarmony.Service.retrieve | train | def retrieve(url, method = :get, headers = {}, data = nil)
puts "[ActiveHarmony] Retrieving data:"
puts "[ActiveHarmony] URL: #{url}"
puts "[ActiveHarmony] Method: #{method}"
puts "[ActiveHarmony] Headers: #{headers.inspect}"
puts "[ActiveHarmony] Data: #{data.inspect}" if data
if defined?(Rails) && !Rails.env.test?
data = retrieve_with_typhoeus(url, method, headers, data)
else
data = retrieve_with_http(url, method, headers, data)
end
data
end | ruby | {
"resource": ""
} |
q10125 | ActiveHarmony.Service.list | train | def list(object_type)
url = generate_rest_url(:list, object_type)
result = retrieve(url.path)
parsed_result = parse_xml(result)
find_object_in_result(parsed_result, object_type, :list)
end | ruby | {
"resource": ""
} |
q10126 | ActiveHarmony.Service.show | train | def show(object_type, id)
url = generate_rest_url(:show, object_type, id)
result = retrieve(url.path)
parsed_result = parse_xml(result)
find_object_in_result(parsed_result, object_type, :show)
end | ruby | {
"resource": ""
} |
q10127 | ActiveHarmony.Service.update | train | def update(object_type, id, data)
url = generate_rest_url(:update, object_type, id)
object_name = object_name_for(object_type, :update)
xml_data = data.to_xml(:root => object_name, :skip_instruct => true, :dasherize => false)
result = retrieve(url.path, url.method, {'Content-type' => 'application/xml'}, xml_data)
find_object_in_result(result, object_type, :update)
end | ruby | {
"resource": ""
} |
q10128 | ActiveHarmony.Service.create | train | def create(object_type, data)
url = generate_rest_url(:create, object_type)
object_name = object_name_for(object_type, :create)
xml_data = data.to_xml(:root => object_name, :skip_instruct => true, :dasherize => false)
result = retrieve(url.path, url.method, {'Content-type' => 'application/xml'}, xml_data)
parsed_result = parse_xml(result)
find_object_in_result(parsed_result, object_type, :create)
end | ruby | {
"resource": ""
} |
q10129 | ActiveHarmony.Service.custom_url_for | train | def custom_url_for(object_type, action)
path = @paths.find do |path|
path[:object_type] == object_type &&
path[:action] == action
end
if path
ServiceUrl.new(generate_url(path[:path]), path[:method])
end
end | ruby | {
"resource": ""
} |
q10130 | ActiveHarmony.Service.object_name_for | train | def object_name_for(object_type, action)
object_name = @object_names.find do |object_name|
object_name[:object_type] == object_type
object_name[:action] == action
end
object_name = object_name ? object_name[:new_object_name] : nil
unless object_name
object_name = object_type.to_s.gsub('-', '_')
end
object_name
end | ruby | {
"resource": ""
} |
q10131 | LoLBase.Connection.get | train | def get(path, options = {})
if options[:query].nil?
options.merge!({ query: { api_key: @key } })
else
options[:query].merge!({ api_key: @key })
end
response = self.class.get path, options
raise LoLBaseError, response.message if response.code != 200
response.body
end | ruby | {
"resource": ""
} |
q10132 | Sinatra::TwitterOAuth.Helpers.login_required | train | def login_required
setup_client
@user = ::TwitterOAuth::User.new(@client, session[:user]) if session[:user]
@rate_limit_status = @client.rate_limit_status
redirect '/login' unless user
end | ruby | {
"resource": ""
} |
q10133 | Sinatra::TwitterOAuth.Helpers.redirect_to_twitter_auth_url | train | def redirect_to_twitter_auth_url
request_token = get_request_token
session[:request_token] = request_token.token
session[:request_token_secret]= request_token.secret
redirect request_token.authorize_url.gsub('authorize','authenticate')
end | ruby | {
"resource": ""
} |
q10134 | SIUnits.Unit.convert_to | train | def convert_to(other)
return self if other.nil?
case other
when Unit
return self if other == self
target = other
when String
target = SIUnits::Unit.new(other.to_f)
else
raise ArgumentError, "Unknown target units"
end
end | ruby | {
"resource": ""
} |
q10135 | Fotofetch.Fetch.add_sources | train | def add_sources(urls)
urls.each_with_object( {}.compare_by_identity ) do |link, pairs|
pairs[root_url(link)] = link
end
end | ruby | {
"resource": ""
} |
q10136 | Fotofetch.Fetch.save_images | train | def save_images(urls, file_path)
urls.each_with_index do |url, i|
open("#{@topic.gsub(' ', '-')}_#{i}.jpg", 'wb') do |file|
file << open(url).read
end
end
end | ruby | {
"resource": ""
} |
q10137 | Calco.Sheet.row | train | def row row_number
if row_number == 0
cells = []
if @has_titles
@column_titles.each do |title|
cells << title ? title : ''
end
end
cells
else
@engine.generate(row_number, @columns, @cell_styles, @column_styles, @column_types)
end
end | ruby | {
"resource": ""
} |
q10138 | Calco.Sheet.each_cell_definition | train | def each_cell_definition &block
if block.arity == 1
@columns.each do |column|
yield column
end
else
@columns.each_with_index do |column, i|
yield column, i
end
end
end | ruby | {
"resource": ""
} |
q10139 | Empyrean.TemplateRenderer.render | train | def render
mentions = mentions_erb
hashtags = hashtags_erb
smileys = smileys_erb
clients = clients_erb
counters = {
tweets: @parsed[:tweet_count],
retweets: @parsed[:retweet_count],
retweets_percentage: (@parsed[:retweet_count] * 100 / @parsed[:tweet_count].to_f).round(2),
selftweets: @parsed[:selftweet_count],
selftweets_percentage: (@parsed[:selftweet_count] * 100 / @parsed[:tweet_count].to_f).round(2)
}
times_of_day = times_erb
erb = ERB.new @template
erb.result binding
end | ruby | {
"resource": ""
} |
q10140 | Empyrean.TemplateRenderer.mentions_erb | train | def mentions_erb
retdict = {
enabled: @config[:mentions][:enabled],
top: [],
nottop: []
}
if @config[:mentions][:enabled]
top = @parsed[:mentions].slice(0, @config[:mentions][:top]) # top X mentions
top.each do |mention|
retdict[:top] << mention[1]
end
nottop = @parsed[:mentions].slice(@config[:mentions][:top], @config[:mentions][:notop]) # not in the top X
unless nottop.nil?
nottop.each do |mention|
mention[1].delete(:example)
retdict[:nottop] << mention[1]
end
end
end
retdict
end | ruby | {
"resource": ""
} |
q10141 | Empyrean.TemplateRenderer.hashtags_erb | train | def hashtags_erb
retdict = {
enabled: @config[:hashtags][:enabled],
top: [],
nottop: []
}
if @config[:hashtags][:enabled]
top = @parsed[:hashtags].slice(0, @config[:hashtags][:top]) # top X hashtags
top.each do |hashtag|
retdict[:top] << hashtag[1]
end
nottop = @parsed[:hashtags].slice(@config[:hashtags][:top], @config[:hashtags][:notop]) # not in the top X
unless nottop.nil?
nottop.each do |hashtag|
hashtag[1].delete(:example)
retdict[:nottop] << hashtag[1]
end
end
end
retdict
end | ruby | {
"resource": ""
} |
q10142 | Empyrean.TemplateRenderer.smileys_erb | train | def smileys_erb
retdict = {
enabled: @config[:smileys][:enabled],
top: [],
nottop: []
}
if @config[:smileys][:enabled]
top = @parsed[:smileys].slice(0, @config[:smileys][:top]) # top X smileys
top.each do |smiley|
retdict[:top] << smiley[1]
end
nottop = @parsed[:smileys].slice(@config[:smileys][:top], @config[:smileys][:notop]) # not in the top X
unless nottop.nil?
nottop.each do |smiley|
smiley[1].delete(:example)
retdict[:nottop] << smiley[1]
end
end
end
retdict
end | ruby | {
"resource": ""
} |
q10143 | Empyrean.TemplateRenderer.clients_erb | train | def clients_erb
retdict = {
enabled: @config[:clients][:enabled],
top: [],
nottop: []
}
if @config[:clients][:enabled]
top = @parsed[:clients].slice(0, @config[:clients][:top]) # top X clients
top.each do |client|
retdict[:top] << {
name: client[1][:name],
url: client[1][:url],
count: client[1][:count],
percentage: (client[1][:count] * 100 / @parsed[:tweet_count].to_f).round(2)
}
end
nottop = @parsed[:clients].slice(@config[:clients][:top], @config[:clients][:notop]) # not in the top X
unless nottop.nil?
nottop.each do |client|
client[1].delete(:example)
retdict[:nottop] << {
name: client[1][:name],
url: client[1][:url],
count: client[1][:count],
percentage: (client[1][:count] * 100 / @parsed[:tweet_count].to_f).round(2)
}
end
end
end
retdict
end | ruby | {
"resource": ""
} |
q10144 | TodoLint.FileFinder.list | train | def list(*extensions)
all_files.keep_if do |filename|
extensions.include?(Pathname.new(filename).extname)
end
all_files.reject! { |file| excluded_file?(file) }
all_files
end | ruby | {
"resource": ""
} |
q10145 | TodoLint.FileFinder.excluded_file? | train | def excluded_file?(file)
full_path = File.expand_path(file)
options.fetch(:excluded_files) { [] }.any? do |file_to_exclude|
File.fnmatch(file_to_exclude, full_path)
end
end | ruby | {
"resource": ""
} |
q10146 | Prequel.Errors.full_messages | train | def full_messages
inject([]) do |m, kv|
att, errors = *kv
errors.each {|e| m << "#{e}"}
m
end
end | ruby | {
"resource": ""
} |
q10147 | StixSchemaSpy.Schema.find_prefix | train | def find_prefix(doc)
return config['prefix'] if config && config['prefix']
# Loop through the attributes until we see one with the same value
ns_prefix_attribute = doc.namespaces.find do |prefix, ns|
ns.to_s == namespace.to_s && prefix != 'xmlns'
end
# If the attribute was found, return it, otherwise return nil
ns_prefix_attribute ? ns_prefix_attribute[0].split(':').last : "Unknown"
end | ruby | {
"resource": ""
} |
q10148 | MultiGit.Config.each | train | def each
return to_enum unless block_given?
each_explicit_key do |*key|
next if default?(*key)
yield key, get(*key)
end
end | ruby | {
"resource": ""
} |
q10149 | Eventual.Node.include? | train | def include? date
result = false
walk { |elements|
break result = true if elements.include? date
}
return result if !result || date.class == Date || times.nil?
times.include? date
end | ruby | {
"resource": ""
} |
q10150 | Rokko.Task.define | train | def define
desc "Generate rokko documentation"
task @name do
# Find README file for `index.html` and delete it from `sources`
if @options[:generate_index]
readme_source = @sources.detect { |f| File.basename(f) =~ /README(\.(md|text|markdown|mdown|mkd|mkdn)$)?/i }
readme = readme_source ? File.read(@sources.delete(readme_source)) : ''
end
# Run each file through Rokko and write output
@sources.each do |filename|
rokko = Rokko.new(filename, @sources, @options)
out_dest = File.join(@dest, filename.sub(Regexp.new("#{File.extname(filename)}$"), ".html"))
puts "rokko: #{filename} -> #{out_dest}"
FileUtils.mkdir_p File.dirname(out_dest)
File.open(out_dest, 'wb') { |fd| fd.write(rokko.to_html) }
end
# Generate index.html if needed
if @options[:generate_index]
require 'rokko/index_layout'
out_dest = File.join(@dest, 'index.html')
puts "rokko: #{out_dest}"
File.open(out_dest, 'wb') { |fd| fd.write(IndexLayout.new(@sources, readme, @options).render) }
end
# Run specified file through rokko and use it as index
if @options[:index] && source_index = @sources.find{|s| s == @options[:index]}
rokko = Rokko.new(source_index, @sources, @options.merge(preserve_urls: true))
out_dest = File.join(@dest, 'index.html')
puts "rokko: #{source_index} -> index.html"
File.open(out_dest, 'wb') { |fd| fd.write(rokko.to_html) }
end
end
end | ruby | {
"resource": ""
} |
q10151 | Stocker.Generator.new | train | def new(item, total)
data = read_file
data[item] = {'total' => total.to_i, 'min' => options[:minimum].to_i, 'url' => options[:url] || read_config['url'], 'checked' => Time.now}
write_file(data)
end | ruby | {
"resource": ""
} |
q10152 | Stocker.Generator.delete | train | def delete(item)
data = read_file
match_name(item)
data.delete(@@item)
write_file(data)
end | ruby | {
"resource": ""
} |
q10153 | Stocker.Generator.check | train | def check
links = []
read_file.each do |key, value|
value["checked"] = Time.now
if value["total"] < value["min"]
puts "You're running low on #{key}!"
links << key
end
end
links.uniq!
links.each { |link| buy(link)}
end | ruby | {
"resource": ""
} |
q10154 | Stocker.Generator.total | train | def total(item, total)
data = read_file
match_name(item)
data[@@item]["total"] = total.to_i
time(item)
write_file(data)
end | ruby | {
"resource": ""
} |
q10155 | Stocker.Generator.url | train | def url(item, url)
data = read_file
match_name(item)
data[@@item]["url"] = url
time(item)
write_file(data)
end | ruby | {
"resource": ""
} |
q10156 | Stocker.Generator.min | train | def min(item, min)
data = read_file
match_name(item)
data[@@item]["min"] = min.to_i
write_file(data)
end | ruby | {
"resource": ""
} |
q10157 | Stocker.Generator.list | train | def list
begin
@header = [["", ""]]
# @header = [[set_color("Item", :white), set_color("Total", :white)], [set_color("=================", :white), set_color("=====", :white)]]
@green = []
@yellow = []
@yellow2 = []
@green2 = []
@red = []
@red2 = []
read_file.each do |key, value|
if value['total'] > value['min']
@green += [[key.titlecase,value['total'], value['total']-value['min']]]
elsif value['total'] == value['min']
@yellow += [[key.titlecase,value['total'], value['total']-value['min']]]
else
@red += [[key.titlecase,value['total'], value['total']-value['min']]]
end
end
@green.sort_by! { |a,b,c| c }
@yellow.sort_by! { |a,b,c| c }
@red.sort_by! { |a,b,c| c }
@green.reverse!
@yellow.reverse!
@red.reverse!
@green.each { |a,b| @green2 += [[set_color(a, :green), set_color(b, :green)]] }
@yellow.each { |a,b| @yellow2 += [[set_color(a, :yellow), set_color(b, :yellow)]] }
@red.each { |a,b| @red2 += [[set_color(a, :red), set_color(b, :red)]] }
print_table(@header + @green2 + @yellow2 + @red2,{indent: 2})
rescue Exception => e
puts "Inventory empty"
end
end | ruby | {
"resource": ""
} |
q10158 | Gumdrop.DataManager.parse_file | train | def parse_file(path, target_ext=nil)
return nil if path.nil?
return nil if File.directory? path
_load_from_file path, target_ext
# if File.directory? path
# _load_from_directory path
# else
# _load_from_file path, target_ext
# end
end | ruby | {
"resource": ""
} |
q10159 | MIPPeR.Model.build_pointer_array | train | def build_pointer_array(array, type)
buffer = FFI::MemoryPointer.new type, array.length
buffer.send("write_array_of_#{type}".to_sym, array)
buffer
end | ruby | {
"resource": ""
} |
q10160 | HasDefaultAssociation.ClassMethods.has_default_association | train | def has_default_association *names, &default_proc
opts = names.extract_options!
opts.assert_valid_keys(:eager)
names.each do |name|
create_default_association(name, default_proc)
add_default_association_callback(name) if opts[:eager]
end
end | ruby | {
"resource": ""
} |
q10161 | VCSToolkit.Repository.status | train | def status(commit, ignore: [])
tree = get_object(commit.tree) unless commit.nil?
Utils::Status.compare_tree_and_store tree,
staging_area,
object_store,
ignore: ignore
end | ruby | {
"resource": ""
} |
q10162 | VCSToolkit.Repository.commit_status | train | def commit_status(base_commit, new_commit, ignore: [])
base_tree = get_object(base_commit.tree) unless base_commit.nil?
new_tree = get_object(new_commit.tree) unless new_commit.nil?
Utils::Status.compare_trees base_tree,
new_tree,
object_store,
ignore: ignore
end | ruby | {
"resource": ""
} |
q10163 | VCSToolkit.Repository.merge | train | def merge(commit_one, commit_two)
common_ancestor = commit_one.common_ancestor(commit_two, object_store)
commit_one_files = Hash[get_object(commit_one.tree).all_files(object_store).to_a]
commit_two_files = Hash[get_object(commit_two.tree).all_files(object_store).to_a]
if common_ancestor.nil?
ancestor_files = {}
else
ancestor_files = Hash[get_object(common_ancestor.tree).all_files(object_store).to_a]
end
all_files = commit_one_files.keys | commit_two_files.keys | ancestor_files.keys
merged = []
conflicted = []
all_files.each do |file|
ancestor = ancestor_files.key?(file) ? get_object(ancestor_files[file]).content.lines : []
file_one = commit_one_files.key?(file) ? get_object(commit_one_files[file]).content.lines : []
file_two = commit_two_files.key?(file) ? get_object(commit_two_files[file]).content.lines : []
diff = VCSToolkit::Merge.three_way ancestor, file_one, file_two
if diff.has_conflicts?
conflicted << file
elsif diff.has_changes?
merged << file
end
content = diff.new_content("<<<<< #{commit_one.id}\n", ">>>>> #{commit_two.id}\n", "=====\n")
if content.empty?
staging_area.delete_file file if staging_area.file? file
else
staging_area.store file, content.join('')
end
end
{merged: merged, conflicted: conflicted}
end | ruby | {
"resource": ""
} |
q10164 | VCSToolkit.Repository.file_difference | train | def file_difference(file_path, commit)
if staging_area.file? file_path
file_lines = staging_area.fetch(file_path).lines
file_lines.last << "\n" unless file_lines.last.nil? or file_lines.last.end_with? "\n"
else
file_lines = []
end
tree = get_object commit.tree
blob_name_and_id = tree.all_files(object_store).find { |file, _| file_path == file }
if blob_name_and_id.nil?
blob_lines = []
else
blob = get_object blob_name_and_id.last
blob_lines = blob.content.lines
blob_lines.last << "\n" unless blob_lines.last.nil? or blob_lines.last.end_with? "\n"
end
Diff.from_sequences blob_lines, file_lines
end | ruby | {
"resource": ""
} |
q10165 | Oauthio.Client.me_url | train | def me_url(provider, params=nil)
connection.build_url(options[:me_url].sub(/:provider/, provider), params).
to_s
end | ruby | {
"resource": ""
} |
q10166 | MuchPlugin.ClassMethods.included | train | def included(plugin_receiver)
return if plugin_receiver.include?(self.much_plugin_included_detector)
plugin_receiver.send(:include, self.much_plugin_included_detector)
self.much_plugin_included_hooks.each do |hook|
plugin_receiver.class_eval(&hook)
end
end | ruby | {
"resource": ""
} |
q10167 | AmberbitConfig.HashStruct.to_hash | train | def to_hash
_copy = {}
@table.each { |key, value| _copy[key] = value.is_a?(HashStruct) ? value.to_hash : value }
_copy
end | ruby | {
"resource": ""
} |
q10168 | AmberbitConfig.HashStruct.check_hash_for_conflicts | train | def check_hash_for_conflicts(hash)
raise HashArgumentError, 'It must be a hash' unless hash.is_a?(Hash)
unless (conflicts = self.public_methods & hash.keys.map(&:to_sym)).empty?
raise HashArgumentError, "Rename keys in order to avoid conflicts with internal calls: #{conflicts.join(', ')}"
end
end | ruby | {
"resource": ""
} |
q10169 | Unchained.Request.get_resource | train | def get_resource(url, resource_class, params={})
resource_class.from_hash(get(url, params), client: self)
end | ruby | {
"resource": ""
} |
q10170 | Unchained.Request.get_resources | train | def get_resources(url, resource_class, params={})
get(url, params).map do |result|
resource_class.from_hash(result, client: self)
end
end | ruby | {
"resource": ""
} |
q10171 | Auditing.AuditRelationship.audit_relationship_enabled | train | def audit_relationship_enabled(opts={})
include InstanceMethods
# class_inheritable_accessor :audit_enabled_models
# class_inheritable_accessor :field_names
class_attribute :audit_enabled_models
class_attribute :field_names
self.audit_enabled_models = gather_models(opts)
self.field_names = gather_assoc_fields_for_auditing(opts[:fields])
after_create :audit_relationship_create
before_update :audit_relationship_update
before_destroy :audit_relationship_destroy
end | ruby | {
"resource": ""
} |
q10172 | Myrrha.Coercions.delegate | train | def delegate(method, &convproc)
convproc ||= lambda{|v,t| v.send(method) }
upon(lambda{|v,t| v.respond_to?(method) }, convproc)
end | ruby | {
"resource": ""
} |
q10173 | Myrrha.Coercions.coercion | train | def coercion(source, target = main_target_domain, converter = nil, &convproc)
@rules.send(@appender, [source, target, converter || convproc])
self
end | ruby | {
"resource": ""
} |
q10174 | Myrrha.Coercions.coerce | train | def coerce(value, target_domain = main_target_domain)
return value if belongs_to?(value, target_domain)
error = nil
each_rule do |from,to,converter|
next unless from.nil? or belongs_to?(value, from, target_domain)
begin
catch(:nextrule) do
if to.nil? or subdomain?(to, target_domain)
got = convert(value, target_domain, converter)
return got
elsif subdomain?(target_domain, to)
got = convert(value, target_domain, converter)
return got if belongs_to?(got, target_domain)
end
end
rescue => ex
error = ex unless error
end
end
error_handler.call(value, target_domain, error)
end | ruby | {
"resource": ""
} |
q10175 | Myrrha.Coercions.belongs_to? | train | def belongs_to?(value, domain, target_domain = domain)
if domain.is_a?(Proc) and domain.arity==2
domain.call(value, target_domain)
else
domain.respond_to?(:===) && (domain === value)
end
end | ruby | {
"resource": ""
} |
q10176 | KynetxAmApi.Application.endpoint | train | def endpoint(type, opts={})
options = {
:extname => @name,
:extdesc => "",
:extauthor => @user.name,
:force_build => 'N',
:contents => "compiled",
:format => 'json',
:env => 'prod'
}
# Set type specific options
case type.to_s
when 'bookmarklet'
options[:runtime] = "init.kobj.net/js/shared/kobj-static.js"
when 'info_card'
options[:image_url] = image_url('icard')
options[:datasets] = ""
when 'ie'
options[:appguid] = @guid
end
options.merge!(opts)
puts "ENDPOINT PARAMS: (#{type}): #{options.inspect}" if $DEBUG
return @api.post_app_generate(@application_id, type.to_s, options)
end | ruby | {
"resource": ""
} |
q10177 | Handcart::Concerns::Handcarts.ClassMethods.handcart_show_path | train | def handcart_show_path(handcart)
if Handcart.handcart_show_path.present?
# Load it straight from the config
"/#{Handcart.handcart_show_path}/#{handcart.to_param}"
else
if Rails.application.routes.url_helpers.respond_to?("#{Handcart.handcart_class.model_name.singular}_path".to_sym)
# Is there one already defined
Rails.application.routes.url_helpers.send("#{Handcart.handcart_class.model_name.singular}_path", handcart.to_param)
else
# Shot in the dark
"/#{Handcart.handcart_class.model_name.route_key}/#{handcart.to_param}"
end
end
end | ruby | {
"resource": ""
} |
q10178 | LatoBlog.Back::PostFieldsController.destroy_relay_field | train | def destroy_relay_field
# find post field
child_field = LatoBlog::PostField.find_by(id: params[:id])
@post_field = child_field.post_field
unless @post_field
@error = true
respond_to { |r| r.js }
end
# find post field child and destroy it
unless child_field.destroy
@error = true
respond_to { |r| r.js }
end
# send response to client
@error = false
respond_to { |r| r.js }
end | ruby | {
"resource": ""
} |
q10179 | Guard.Shopifytheme.start | train | def start
if File.exist? 'config.yml'
Notifier.notify "Watching for changes to Shopify Theme"
else
data = <<-EOF
---
:api_key: YOUR_API_KEY
:password: YOUR_PASSWORD
:store: YOURSHOP.myshopify.com
:theme_id: 'YOUR_THEME_ID'
:ignore_files:
- README.md
- CHANGELOG.md
EOF
File.open('./config.yml', "w") { |file| file.write data }
Notifier.notify "Created config.yml. Remember to add your Shopify details to it."
end
end | ruby | {
"resource": ""
} |
q10180 | FentonShell.Project.create | train | def create(global_options, options)
status, body = project_create(global_options, options)
if status == 201
save_message(create_success_message(body))
true
else
parse_message(body)
false
end
end | ruby | {
"resource": ""
} |
q10181 | FentonShell.Project.project_json | train | def project_json(options)
{
project: {
name: options[:name],
description: options[:description],
passphrase: options[:passphrase],
key: options[:key],
organization: options[:organization]
}
}.to_json
end | ruby | {
"resource": ""
} |
q10182 | StorageRoom.Resource.reload | train | def reload(url = nil, parameters = {})
httparty = self.class.get(url || self[:@url], StorageRoom.request_options.merge(parameters))
hash = httparty.parsed_response.first[1]
reset!
set_from_response_data(hash)
true
end | ruby | {
"resource": ""
} |
q10183 | RDO.Connection.debug | train | def debug
raise ArgumentError,
"RDO::Connection#debug requires a block" unless block_given?
reset, logger.level = logger.level, Logger::DEBUG
yield
ensure
logger.level = reset
end | ruby | {
"resource": ""
} |
q10184 | RDO.Connection.normalize_options | train | def normalize_options(options)
case options
when Hash
Hash[options.map{|k,v| [k.respond_to?(:to_sym) ? k.to_sym : k, v]}].tap do |opts|
opts[:driver] = opts[:driver].to_s if opts[:driver]
end
when String, URI
parse_connection_uri(options)
else
raise RDO::Exception,
"Unsupported connection argument format: #{options.class.name}"
end
end | ruby | {
"resource": ""
} |
q10185 | UnifiedPayment.InstallGenerator.create_migrations | train | def create_migrations
Dir["#{self.class.source_root}/migrations/*.rb"].sort.each do |filepath|
name = File.basename(filepath)
template "migrations/#{name}", "db/migrate/#{name}"
sleep 1
end
end | ruby | {
"resource": ""
} |
q10186 | FFI.Bitmask.to_native | train | def to_native(query, ctx)
return 0 if query.nil?
flat_query = [query].flatten
flat_query.inject(0) do |val, o|
case o
when Symbol
v = @kv_map[o]
raise ArgumentError, "invalid bitmask value, #{o.inspect}" unless v
val |= v
when Integer
val |= o
when ->(obj) { obj.respond_to?(:to_int) }
val |= o.to_int
else
raise ArgumentError, "invalid bitmask value, #{o.inspect}"
end
end
end | ruby | {
"resource": ""
} |
q10187 | FormatOutput.ColumnBuilder.render | train | def render
results, column_widths = [], get_column_widths
rows.times { |row_index| results << render_row(row_index, column_widths)}
@page_data.clear
results
end | ruby | {
"resource": ""
} |
q10188 | FormatOutput.ColumnBuilder.add_a_row | train | def add_a_row
new_rows = rows + 1
pool, @page_data = @page_data.flatten, []
until pool.empty?
@page_data << pool.shift(new_rows)
end
end | ruby | {
"resource": ""
} |
q10189 | Aker::Cas.RackProxyCallback.call | train | def call(env)
return receive(env) if env["PATH_INFO"] == RECEIVE_PATH
return retrieve(env) if env["PATH_INFO"] == RETRIEVE_PATH
@app.call(env)
end | ruby | {
"resource": ""
} |
q10190 | Aker::Cas.RackProxyCallback.store_iou | train | def store_iou(pgt_iou, pgt)
pstore = open_pstore
pstore.transaction do
pstore[pgt_iou] = pgt
end
end | ruby | {
"resource": ""
} |
q10191 | Aker::Cas.RackProxyCallback.resolve_iou | train | def resolve_iou(pgt_iou)
pstore = open_pstore
pgt = nil
pstore.transaction do
pgt = pstore[pgt_iou]
pstore.delete(pgt_iou) if pgt
end
pgt
end | ruby | {
"resource": ""
} |
q10192 | QuadrigaCX.Private.withdraw | train | def withdraw(coin, params = {})
raise ConfigurationError.new('No coin type specified') unless coin
raise ConfigurationError.new('Invalid coin type specified') unless Coin.valid?(coin)
request(:post, "/#{coin}_withdrawal", params)
end | ruby | {
"resource": ""
} |
q10193 | QuadrigaCX.Private.user_transactions | train | def user_transactions(params = {})
request(:post, '/user_transactions', params).each { |t| t.id = t.id.to_s }
end | ruby | {
"resource": ""
} |
q10194 | Bixby.Bench.label_width | train | def label_width
if !@label_width then
@label_width = @samples.find_all{ |s| Sample === s }.
max{ |a, b| a.label.length <=> b.label.length }.
label.length + 1
@label_width = 40 if @label_width < 40
end
return @label_width
end | ruby | {
"resource": ""
} |
q10195 | GenSpec.Matchers.delete | train | def delete(filename)
within_source_root do
FileUtils.mkdir_p File.dirname(filename)
FileUtils.touch filename
end
generate { expect(File).not_to exist(filename) }
end | ruby | {
"resource": ""
} |
q10196 | SoundDrop.Client.get_client | train | def get_client
init_opts = {
client_id: @CLIENT_ID,
client_secret: @CLIENT_SECRET
}
if username? and password?
init_opts[:username] = @USERNAME
init_opts[:password] = @PASSWORD
end
Soundcloud.new(init_opts)
end | ruby | {
"resource": ""
} |
q10197 | SoundDrop.Client.get_drop | train | def get_drop(url)
sc_track = client.get('/resolve', url: url)
SoundDrop::Drop.new(client: client, track: sc_track)
end | ruby | {
"resource": ""
} |
q10198 | ExceptionDog.Handler.format_backtrace | train | def format_backtrace(backtrace)
backtrace ||= []
backtrace[0..BACKTRACE_LINES].collect do |line|
"#{line.gsub(/\n|\`|\'/, '')}".split(//).last(MAX_LINE_LENGTH).join
end
end | ruby | {
"resource": ""
} |
q10199 | Bixby.CommandSpec.validate | train | def validate(expected_digest)
if not bundle_exists? then
raise BundleNotFound.new("repo = #{@repo}; bundle = #{@bundle}")
end
if not command_exists? then
raise CommandNotFound.new("repo = #{@repo}; bundle = #{@bundle}; command = #{@command}")
end
if self.digest != expected_digest then
raise BundleNotFound, "digest does not match ('#{self.digest}' != '#{expected_digest}')", caller
end
return true
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.