_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q7900
|
RProgram.OptionList.method_missing
|
train
|
def method_missing(sym,*args,&block)
name = sym.to_s
unless block
if (name =~ /=$/ && args.length == 1)
return self[name.chop.to_sym] = args.first
elsif args.empty?
return self[sym]
end
end
return super(sym,*args,&block)
end
|
ruby
|
{
"resource": ""
}
|
q7901
|
HttpArchive.Archive.get_total_data
|
train
|
def get_total_data
size = calc_total_size.to_s
load_time = (@pages.first.on_load / 1000.0).to_s
[@pages.first.title, @entries.size.to_s, size, load_time]
end
|
ruby
|
{
"resource": ""
}
|
q7902
|
HttpArchive.Archive.get_row_data
|
train
|
def get_row_data
rows = []
@entries.each do |entry|
method = entry.request.http_method
# get part after .com/ if any
url = entry.request.url
if url.end_with?("/")
ressource = entry.request.url
else
r = url.rindex("/")
ressource = url[r..-1]
end
# first 30 characters of the ressource name
ressource = ressource[0, 30]
status = entry.response.status.to_s
code = entry.response.status_text
size = (entry.response.content['size'] / 1000.0).round(2).to_s
duration = (entry.time / 1000.0).to_s
rows << [method, ressource, status, code, size, duration]
end
rows
end
|
ruby
|
{
"resource": ""
}
|
q7903
|
Bisques.AwsRequest.make_request
|
train
|
def make_request
create_authorization
options = {}
options[:header] = authorization.headers.merge(
'Authorization' => authorization.authorization_header
)
options[:query] = query if query.any?
options[:body] = form_body if body
http_response = @httpclient.request(method, url, options)
@response = AwsResponse.new(self, http_response)
freeze
@response
end
|
ruby
|
{
"resource": ""
}
|
q7904
|
Bisques.AwsRequest.form_body
|
train
|
def form_body
if body.is_a?(Hash)
body.map do |k,v|
[AwsRequest.aws_encode(k), AwsRequest.aws_encode(v)].join("=")
end.join("&")
else
body
end
end
|
ruby
|
{
"resource": ""
}
|
q7905
|
Bisques.AwsRequest.create_authorization
|
train
|
def create_authorization
@authorization = AwsRequestAuthorization.new.tap do |authorization|
authorization.url = url
authorization.method = method
authorization.query = query
authorization.body = form_body
authorization.region = region
authorization.service = service
authorization.credentials = credentials
authorization.headers = headers
end
end
|
ruby
|
{
"resource": ""
}
|
q7906
|
Bisques.Queue.retrieve
|
train
|
def retrieve(poll_time = 1)
response = client.receive_message(url, {"WaitTimeSeconds" => poll_time, "MaxNumberOfMessages" => 1})
raise QueueNotFound.new(self, "not found at #{url}") if response.http_response.status == 404
response.doc.xpath("//Message").map do |element|
attributes = Hash[*element.xpath("Attribute").map do |attr_element|
[attr_element.xpath("Name").text, attr_element.xpath("Value").text]
end.flatten]
Message.new(self, element.xpath("MessageId").text,
element.xpath("ReceiptHandle").text,
element.xpath("Body").text,
attributes
)
end.first
end
|
ruby
|
{
"resource": ""
}
|
q7907
|
Authorizable.Proxy.process_permission
|
train
|
def process_permission(permission, *args)
cached = value_from_cache(permission, *args)
if cached.nil?
evaluate_permission(permission, *args)
else
cached
end
end
|
ruby
|
{
"resource": ""
}
|
q7908
|
Authorizable.Proxy.value_from_cache
|
train
|
def value_from_cache(permission, *args)
# object; Event, Discount, etc
o = args[0]
role = get_role_of(o)
# don't perform the permission evaluation, if we have already computed it
cache.get_for_role(permission, role)
end
|
ruby
|
{
"resource": ""
}
|
q7909
|
Authorizable.Proxy.evaluate_permission
|
train
|
def evaluate_permission(permission, *args)
# object; Event, Discount, etc
o = args[0]
# default to allow
result = true
role = get_role_of(o)
# evaluate procs
if (proc = PermissionUtilities.has_procs?(permission))
result &= proc.call(o, self)
end
# Here is where the addition of adding collaborations may reside
# finally, determine if the user (self) can do the requested action
result &= allowed_to_perform?(permission, role)
# so we don't need to do everything again
cache.set_for_role(
name: permission,
value: result,
role: role
)
result
end
|
ruby
|
{
"resource": ""
}
|
q7910
|
Authorizable.Proxy.has_role_with
|
train
|
def has_role_with(object)
if object.respond_to?(:user_id)
if object.user_id == actor.id
return IS_OWNER
else
return IS_UNRELATED
end
end
# hopefully the object passed always responds to user_id
IS_UNRELATED
end
|
ruby
|
{
"resource": ""
}
|
q7911
|
Threadz.ThreadPool.spawn_thread
|
train
|
def spawn_thread
Thread.new do
while true
x = @queue.shift
if x == Directive::SUICIDE_PILL
@worker_threads_count.decrement
Thread.current.terminate
end
Thread.pass
begin
x.job.call(x)
rescue StandardError => e
$stderr.puts "Threadz: Error in thread, but restarting with next job: #{e.inspect}\n#{e.backtrace.join("\n")}"
end
end
end
@worker_threads_count.increment
end
|
ruby
|
{
"resource": ""
}
|
q7912
|
Threadz.ThreadPool.spawn_watch_thread
|
train
|
def spawn_watch_thread
@watch_thread = Thread.new do
while true
# If there are idle threads and we're above minimum
if @queue.num_waiting > 0 && @worker_threads_count.value > @min_size # documented
@killscore += THREADS_IDLE_SCORE * @queue.num_waiting
# If there are no threads idle and we have room for more
elsif(@queue.num_waiting == 0 && @worker_threads_count.value < @max_size) # documented
@killscore -= THREADS_BUSY_SCORE * @queue.length
else
# Decay
if @killscore != 0 # documented
@killscore *= 0.9
end
if @killscore.abs < 1
@killscore = 0
end
end
if @killscore.abs >= @killthreshold
@killscore > 0 ? kill_thread : spawn_thread
@killscore = 0
end
Threadz.dputs "killscore: #{@killscore}. waiting: #{@queue.num_waiting}. threads length: #{@worker_threads_count.value}. min/max: [#{@min_size}, #{@max_size}]"
sleep 0.1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7913
|
DuckMap.ArrayHelper.convert_to
|
train
|
def convert_to(values, type)
buffer = []
if values.kind_of?(Array)
values.each do |value|
begin
if type == :string
buffer.push(value.to_s)
elsif type == :symbol
buffer.push(value.to_sym)
end
rescue Exception => e
end
end
else
buffer = values
end
return buffer
end
|
ruby
|
{
"resource": ""
}
|
q7914
|
BabyErubis.Template.from_file
|
train
|
def from_file(filename, encoding='utf-8')
mode = "rb:#{encoding}"
mode = "rb" if RUBY_VERSION < '1.9'
input = File.open(filename, mode) {|f| f.read() }
compile(parse(input), filename, 1)
return self
end
|
ruby
|
{
"resource": ""
}
|
q7915
|
SchoolFriend.Session.additional_params
|
train
|
def additional_params
@additional_params ||= if session_scope?
if oauth2_session?
{application_key: application_key}
else
{application_key: application_key, session_key: options[:session_key]}
end
else
{application_key: application_key}
end
end
|
ruby
|
{
"resource": ""
}
|
q7916
|
SchoolFriend.Session.api_call
|
train
|
def api_call(method, params = {}, force_session_call = false)
raise RequireSessionScopeError.new('This API call requires session scope') if force_session_call and application_scope?
uri = build_uri(method, params)
Net::HTTP.get_response(uri)
end
|
ruby
|
{
"resource": ""
}
|
q7917
|
SchoolFriend.Session.build_uri
|
train
|
def build_uri(method, params = {})
uri = URI(api_server)
uri.path = '/api/' + method.sub('.', '/')
uri.query = URI.encode_www_form(sign(params))
SchoolFriend.logger.debug "API Request: #{uri}"
uri
end
|
ruby
|
{
"resource": ""
}
|
q7918
|
LazyCrud.InstanceMethods.undestroy
|
train
|
def undestroy
@resource = resource_proxy(true).find(params[:id])
set_resource_instance
@resource.deleted_at = nil
@resource.save
respond_with(@resource, location: { action: :index })
# flash[:notice] = "#{resource_name} has been undeleted"
# redirect_to action: :index
end
|
ruby
|
{
"resource": ""
}
|
q7919
|
LazyCrud.InstanceMethods.resource_proxy
|
train
|
def resource_proxy(with_deleted = false)
proxy = if parent_instance.present?
parent_instance.send(resource_plural_name)
else
self.class.resource_class
end
if with_deleted and proxy.respond_to?(:with_deleted)
proxy = proxy.with_deleted
end
proxy
end
|
ruby
|
{
"resource": ""
}
|
q7920
|
PuppetDBQuery.Parser.read_maximal_term
|
train
|
def read_maximal_term(priority)
return nil if empty?
logger.debug "read maximal term (#{priority})"
first = read_minimal_term
term = add_next_infix_terms(priority, first)
logger.debug "read maximal term: #{term}"
term
end
|
ruby
|
{
"resource": ""
}
|
q7921
|
Kublog.ApplicationHelper.error_messages_for
|
train
|
def error_messages_for(*objects)
options = objects.extract_options!
options[:header_message] ||= I18n.t(:"activerecord.errors.header", :default => "Invalid Fields")
options[:message] ||= I18n.t(:"activerecord.errors.message", :default => "Correct the following errors and try again.")
messages = objects.compact.map { |o| o.errors.full_messages }.flatten
unless messages.empty?
content_tag(:div, :class => "error_messages") do
list_items = messages.map { |msg| content_tag(:li, msg) }
content_tag(:h2, options[:header_message]) + content_tag(:p, options[:message]) + content_tag(:ul, list_items.join.html_safe)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7922
|
ShinyThemes.RendersTheme.update_current_theme
|
train
|
def update_current_theme(name, options = {})
self.class.renders_theme(name, options)
Rails.application.config.theme.name = current_theme_name
Rails.application.config.theme.layout = current_theme_layout
ShinyThemes::Engine.theme_config.save unless options[:dont_save]
self.class.theme # return current theme object
end
|
ruby
|
{
"resource": ""
}
|
q7923
|
Revenant.MySQL.acquire_lock
|
train
|
def acquire_lock(lock_name)
begin
acquired = false
sql = lock_query(lock_name)
connection.query(sql) do |result|
acquired = result.fetch_row.first == "1"
end
acquired
rescue ::Exception
false
end
end
|
ruby
|
{
"resource": ""
}
|
q7924
|
ShinyThemes.ThemeConfig.load
|
train
|
def load
new_config = full_config[Rails.env].try(:deep_symbolize_keys!) || {}
# Honor values in config file over defaults
@defaults.reject! { |k, _| new_config.keys.include?(k) }
Rails.application.config.theme.merge!(@defaults.merge(new_config))
end
|
ruby
|
{
"resource": ""
}
|
q7925
|
ShinyThemes.ThemeConfig.save
|
train
|
def save
# Don't save default values
save_config = Rails.application.config.theme.reject { |k, _| @defaults.keys.include?(k) }
full_config[Rails.env].merge!(save_config)
File.open(config_pathname, 'w') { |f| f << full_config.to_yaml }
end
|
ruby
|
{
"resource": ""
}
|
q7926
|
Rattler::Parsers.Super.parse
|
train
|
def parse(scanner, rules, scope = ParserScope.empty)
rules.inherited_rule(rule_name).parse(scanner, rules, scope)
end
|
ruby
|
{
"resource": ""
}
|
q7927
|
BorrowDirect.FindItem.exact_search_request_hash
|
train
|
def exact_search_request_hash(type, value)
# turn it into an array if it's not one already
values = Array(value)
hash = {
"PartnershipId" => Defaults.partnership_id,
"ExactSearch" => []
}
values.each do |value|
hash["ExactSearch"] << {
"Type" => type.to_s.upcase,
"Value" => value
}
end
return hash
end
|
ruby
|
{
"resource": ""
}
|
q7928
|
Hardmock.Expectation.raises
|
train
|
def raises(err=nil)
case err
when Exception
@options[:raises] = err
when String
@options[:raises] = RuntimeError.new(err)
else
@options[:raises] = RuntimeError.new("An Error")
end
self
end
|
ruby
|
{
"resource": ""
}
|
q7929
|
Hardmock.Expectation.yields
|
train
|
def yields(*items)
@options[:suppress_arguments_to_block] = true
if items.empty?
# Yield once
@options[:block] = lambda do |block|
if block.arity != 0 and block.arity != -1
raise ExpectationError.new("The given block was expected to have no parameter count; instead, got #{block.arity} to <#{to_s}>")
end
block.call
end
else
# Yield one or more specific items
@options[:block] = lambda do |block|
items.each do |item|
if item.kind_of?(Array)
if block.arity == item.size
# Unfold the array into the block's arguments:
block.call *item
elsif block.arity == 1
# Just pass the array in
block.call item
else
# Size mismatch
raise ExpectationError.new("Can't pass #{item.inspect} to block with arity #{block.arity} to <#{to_s}>")
end
else
if block.arity != 1
# Size mismatch
raise ExpectationError.new("Can't pass #{item.inspect} to block with arity #{block.arity} to <#{to_s}>")
end
block.call item
end
end
end
end
self
end
|
ruby
|
{
"resource": ""
}
|
q7930
|
Words.Wordnet.find
|
train
|
def find(term)
raise NoWordnetConnection, "There is presently no connection to wordnet. To attempt to reistablish a connection you should use the 'open!' command on the Wordnet object." unless connected?
homographs = @wordnet_connection.homographs(term)
Homographs.new(homographs, @wordnet_connection) unless homographs.nil?
end
|
ruby
|
{
"resource": ""
}
|
q7931
|
Griddle.Point.to_rectangle
|
train
|
def to_rectangle(point)
d = delta(point)
Rectangle.new(
row,
col,
d.col + 1,
d.row + 1
)
end
|
ruby
|
{
"resource": ""
}
|
q7932
|
MakeVoteable.Voter.up_vote
|
train
|
def up_vote(voteable)
check_voteable(voteable)
voting = fetch_voting(voteable)
if voting
if voting.up_vote
raise Exceptions::AlreadyVotedError.new(true)
else
voting.up_vote = true
voteable.down_votes -= 1
self.down_votes -= 1 if has_attribute?(:down_votes)
end
else
voting = Voting.create(:voteable => voteable, :voter_id => self.id, :voter_type => self.class.to_s, :up_vote => true)
end
voteable.up_votes += 1
self.up_votes += 1 if has_attribute?(:up_votes)
Voting.transaction do
save
voteable.save
voting.save
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7933
|
MakeVoteable.Voter.up_vote!
|
train
|
def up_vote!(voteable)
begin
up_vote(voteable)
success = true
rescue Exceptions::AlreadyVotedError
success = false
end
success
end
|
ruby
|
{
"resource": ""
}
|
q7934
|
MakeVoteable.Voter.down_vote!
|
train
|
def down_vote!(voteable)
begin
down_vote(voteable)
success = true
rescue Exceptions::AlreadyVotedError
success = false
end
success
end
|
ruby
|
{
"resource": ""
}
|
q7935
|
MakeVoteable.Voter.unvote
|
train
|
def unvote(voteable)
check_voteable(voteable)
voting = fetch_voting(voteable)
raise Exceptions::NotVotedError unless voting
if voting.up_vote
voteable.up_votes -= 1
self.up_votes -= 1 if has_attribute?(:up_votes)
else
voteable.down_votes -= 1
self.down_votes -= 1 if has_attribute?(:down_votes)
end
Voting.transaction do
save
voteable.save
voting.destroy
end
true
end
|
ruby
|
{
"resource": ""
}
|
q7936
|
MakeVoteable.Voter.unvote!
|
train
|
def unvote!(voteable)
begin
unvote(voteable)
success = true
rescue Exceptions::NotVotedError
success = false
end
success
end
|
ruby
|
{
"resource": ""
}
|
q7937
|
MakeVoteable.Voter.down_voted?
|
train
|
def down_voted?(voteable)
check_voteable(voteable)
voting = fetch_voting(voteable)
return false if voting.nil?
return true if voting.has_attribute?(:up_vote) && !voting.up_vote
false
end
|
ruby
|
{
"resource": ""
}
|
q7938
|
Mindset.LoopbackConnection.read_packet_buffer
|
train
|
def read_packet_buffer
packets = @data[:wave][@wave_idx, 64].map { |val|
Packet.factory(:wave, val) }
@wave_idx += 64
@wave_idx = 0 if @wave_idx >= @data[:wave].count
if @counter == 7
packets << Packet.factory(:delta, @data[:delta][@esense_idx])
packets << Packet.factory(:theta, @data[:theta][@esense_idx])
packets << Packet.factory(:lo_alpha, @data[:lo_alpha][@esense_idx])
packets << Packet.factory(:hi_alpha, @data[:hi_alpha][@esense_idx])
packets << Packet.factory(:lo_beta, @data[:lo_beta][@esense_idx])
packets << Packet.factory(:hi_beta, @data[:hi_beta][@esense_idx])
packets << Packet.factory(:lo_gamma, @data[:lo_gamma][@esense_idx])
packets << Packet.factory(:mid_gamma, @data[:mid_gamma][@esense_idx])
packets << Packet.factory(:signal_quality,
@data[:signal_quality][@esense_idx])
packets << Packet.factory(:attention, @data[:attention][@esense_idx])
packets << Packet.factory(:meditation, @data[:meditation][@esense_idx])
packets << Packet.factory(:blink, @data[:blink][@esense_idx])
@esense_idx += 1
@esense_idx = 0 if @esense_idx >= @data[:delta].count
end
@counter = (@counter + 1) % 8
packets
end
|
ruby
|
{
"resource": ""
}
|
q7939
|
Danger.DangerPackageJsonLockdown.verify
|
train
|
def verify(package_json)
inspect(package_json).each do |suspicious|
warn(
"`#{suspicious[:package]}` doesn't specify fixed version number",
file: package_json,
line: suspicious[:line]
)
end
end
|
ruby
|
{
"resource": ""
}
|
q7940
|
Danger.DangerPackageJsonLockdown.inspect
|
train
|
def inspect(package_json)
json = JSON.parse(File.read(package_json))
suspicious_packages = []
dependency_keys.each do |dependency_key|
next unless json.key?(dependency_key)
results = find_something_suspicious(json[dependency_key], package_json)
suspicious_packages.push(*results)
end
suspicious_packages
end
|
ruby
|
{
"resource": ""
}
|
q7941
|
Bisques.QueueListener.listen
|
train
|
def listen(&block)
return if @listening
@listening = true
@thread = Thread.new do
while @listening
message = @queue.retrieve(@poll_time)
block.call(message) if message.present?
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7942
|
TokenField.FormBuilder.token_field
|
train
|
def token_field(attribute_name, options = {})
association_type = @object.send(attribute_name).respond_to?(:each) ? :many : :one
model_name = options.fetch(:model) { attribute_name.to_s.gsub(/_ids?/, "") }.to_s
association = attribute_name.to_s.gsub(/_ids?/, "").to_sym
token_url = options.fetch(:token_url) { "/#{model_name.pluralize}/token.json" }
token_url_is_function = options.fetch(:token_url_is_function) { false }
append_to_id = options[:append_to_id]
token_method = options.fetch(:token_method) { :to_token }
token_limit = nil
token_limit = 1 if association_type == :one
id = @object.send(:id)
html_id = "#{@object_name}_#{attribute_name.to_s}"
if append_to_id == :id && id
html_id << "_#{id}"
elsif append_to_id && append_to_id != :id
html_id << "_#{append_to_id}"
end
html_id = html_id.parameterize.underscore
results = []
if association_type == :one && @object.public_send(association)
results << @object.public_send(association)
elsif association_type == :many && @object.public_send(association.to_s.pluralize).count > 0
@object.public_send(association.to_s.pluralize).each { |record| results << record }
end
data_pre = results.map{ |result| result.public_send(token_method) }
value = data_pre.map{ |row| row[:id] }.join(',')
on_add = options[:on_add] ? "#{options[:on_add]}" : "false"
on_delete = options[:on_delete] ? "#{options[:on_delete]}" : "false"
token_url = "'#{token_url}'" unless token_url_is_function
js_content = "
jQuery.noConflict();
jQuery(function() {
jQuery('##{html_id}').tokenInput(#{token_url}, {
crossDomain: false,
tokenLimit: #{token_limit.nil? ? "null" : token_limit.to_i},
preventDuplicates: true,
prePopulate: jQuery('##{attribute_name}').data('pre'),
theme: 'facebook',
hintText: '"+t('helpers.token_field.hint_text')+"',
searchingText: '"+t('helpers.token_field.searching_text')+"',
noResultsText: '"+t('helpers.token_field.no_results_text')+"',
onAdd: "+on_add+",
onDelete: "+on_delete+"
});
});
"
script = content_tag(:script, js_content.html_safe, :type => Mime::JS)
text_field("#{attribute_name}", "data-pre" => data_pre.to_json, :value => value, :id => html_id) + script
end
|
ruby
|
{
"resource": ""
}
|
q7943
|
TreeRb.DomWalker.process_node
|
train
|
def process_node(node, level=1)
entries = node.children
@visitor.enter_node(node)
entries.each do |entry|
unless is_leaf?(entry)
process_node(entry, level+1)
else
@visitor.visit_leaf(entry)
end
end
@visitor.exit_node(node)
end
|
ruby
|
{
"resource": ""
}
|
q7944
|
Cxxproject.Linkable.handle_whole_archive
|
train
|
def handle_whole_archive(building_block, res, linker, flag)
if is_whole_archive(building_block)
res.push(flag) if flag and !flag.empty?
end
end
|
ruby
|
{
"resource": ""
}
|
q7945
|
Cxxproject.Linkable.convert_to_rake
|
train
|
def convert_to_rake()
object_multitask = prepare_tasks_for_objects()
res = typed_file_task get_rake_task_type(), get_task_name => object_multitask do
cmd = calc_command_line
Dir.chdir(@project_dir) do
mapfileStr = @mapfile ? " >#{@mapfile}" : ""
rd, wr = IO.pipe
cmdLinePrint = cmd
printCmd(cmdLinePrint, "Linking #{executable_name}", false) #OK
cmd << {
:out=> @mapfile ? "#{@mapfile}" : wr, # > xy.map
:err=>wr
}
sp = spawn(*cmd)
cmd.pop
# for console print
cmd << " >#{@mapfile}" if @mapfile
consoleOutput = ProcessHelper.readOutput(sp, rd, wr)
process_result(cmdLinePrint, consoleOutput, @tcs[:LINKER][:ERROR_PARSER], nil)
check_config_file()
post_link_hook(@tcs[:LINKER])
end
end
res.tags = tags
res.immediate_output = true
res.enhance(@config_files)
res.enhance([@project_dir + "/" + @linker_script]) if @linker_script
add_output_dir_dependency(get_task_name, res, true)
add_grouping_tasks(get_task_name)
setup_rake_dependencies(res, object_multitask)
# check that all source libs are checked even if they are not a real rake dependency (can happen if "build this project only")
begin
libChecker = task get_task_name+"LibChecker" do
if File.exists?(get_task_name) # otherwise the task will be executed anyway
all_dependencies.each do |bb|
if bb and StaticLibrary === bb
f = bb.get_task_name # = abs path of library
if not File.exists?(f) or File.mtime(f) > File.mtime(get_task_name)
def res.needed?
true
end
break
end
end
end
end
end
rescue
def res.needed?
true
end
end
libChecker.transparent_timestamp = true
res.enhance([libChecker])
return res
end
|
ruby
|
{
"resource": ""
}
|
q7946
|
Cxxproject.SharedLibrary.post_link_hook
|
train
|
def post_link_hook(linker)
basic_name = get_basic_name(linker)
soname = get_soname(linker)
symlink_lib_to basic_name
symlink_lib_to soname
end
|
ruby
|
{
"resource": ""
}
|
q7947
|
DuckMap.Attributes.sitemap_attributes
|
train
|
def sitemap_attributes(key = :default)
key = key.blank? ? :default : key.to_sym
# if the key exists and has a Hash value, cool. Otherwise, go back to :default.
# self.class.sitemap_attributes should ALWAYS return a Hash, so, no need to test for that.
# however, key may or may not be a Hash. should test for that.
unless self.class.sitemap_attributes[key].kind_of?(Hash)
key = :default
end
# the :default Hash SHOULD ALWAYS be there. If not, this might cause an exception!!
return self.class.sitemap_attributes[key]
end
|
ruby
|
{
"resource": ""
}
|
q7948
|
JBLAS.MatrixGeneralMixin.hcat
|
train
|
def hcat(y)
unless self.dims[0] == y.dims[0]
raise ArgumentError, "Matrices must have same number of rows"
end
DoubleMatrix.concat_horizontally(self, y)
end
|
ruby
|
{
"resource": ""
}
|
q7949
|
JBLAS.MatrixGeneralMixin.vcat
|
train
|
def vcat(y)
unless self.dims[1] == y.dims[1]
raise ArgumentError, "Matrices must have same number of columns"
end
DoubleMatrix.concat_vertically(self, y)
end
|
ruby
|
{
"resource": ""
}
|
q7950
|
RProgram.Option.arguments
|
train
|
def arguments(value)
case value
when true
[@flag]
when false, nil
[]
else
value = super(value)
if @multiple
args = []
value.each do |arg|
args += Array(@formatter.call(self,[arg]))
end
return args
else
value = [value.join(@separator)] if @separator
return Array(@formatter.call(self,value))
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7951
|
Specify.Database.close
|
train
|
def close
return if sessions.empty?
sessions.each do |session|
session.close
session.delete_observer self
end
# TODO: should close database connection
end
|
ruby
|
{
"resource": ""
}
|
q7952
|
Mei.WebServiceBase.get_json
|
train
|
def get_json(url)
r = Mei::WebServiceBase.fetch(url)
JSON.parse(r.body)
end
|
ruby
|
{
"resource": ""
}
|
q7953
|
Logan.Comment.creator=
|
train
|
def creator=(creator)
@creator = creator.is_a?(Hash) ? Logan::Person.new(creator) : creator
end
|
ruby
|
{
"resource": ""
}
|
q7954
|
Logan.Todo.assignee=
|
train
|
def assignee=(assignee)
@assignee = assignee.is_a?(Hash) ? Logan::Person.new(assignee) : assignee
end
|
ruby
|
{
"resource": ""
}
|
q7955
|
Logan.Todo.create_comment
|
train
|
def create_comment(comment)
post_params = {
:body => comment.post_json,
:headers => Logan::Client.headers.merge({'Content-Type' => 'application/json'})
}
response = Logan::Client.post "/projects/#{@project_id}/todos/#{@id}/comments.json", post_params
Logan::Comment.new response
end
|
ruby
|
{
"resource": ""
}
|
q7956
|
Firering.Room.today_transcript
|
train
|
def today_transcript(&callback)
connection.http(:get, "/room/#{id}/transcript.json") do |data, http|
callback.call(data[:messages].map { |msg| Firering::Message.instantiate(connection, msg) }) if callback
end
end
|
ruby
|
{
"resource": ""
}
|
q7957
|
Firering.Room.transcript
|
train
|
def transcript(year, month, day, &callback)
connection.http(:get, "/room/#{id}/transcript/#{year}/#{month}/#{day}.json") do |data, http|
callback.call(data[:messages].map { |msg| Firering::Message.instantiate(connection, msg) }) if callback
end
end
|
ruby
|
{
"resource": ""
}
|
q7958
|
Firering.Room.speak
|
train
|
def speak(data, &callback)
connection.http(:post, "/room/#{id}/speak.json", "message" => data) do |data, http| # Response Status: 201 Created
callback.call(Firering::Message.instantiate(connection, data, "message")) if callback
end
end
|
ruby
|
{
"resource": ""
}
|
q7959
|
Emotions.Emotion.ensure_valid_emotion_name
|
train
|
def ensure_valid_emotion_name
unless Emotions.emotions.include?(emotion.try(:to_sym))
errors.add :emotion, I18n.t(:invalid, scope: [:errors, :messages])
end
end
|
ruby
|
{
"resource": ""
}
|
q7960
|
Sjekksum.ISBN10.of
|
train
|
def of number
raise_on_type_mismatch number
digits = convert_number_to_digits(number)[0..9]
sum = digits.reverse_each.with_index.reduce(0) do |check, (digit, idx)|
check += digit * (idx+2)
end
check = (11 - sum % 11) % 11
check == 10 ? "X" : check
end
|
ruby
|
{
"resource": ""
}
|
q7961
|
Sjekksum.ISBN10.valid?
|
train
|
def valid? number
raise_on_type_mismatch number
num, check = split_isbn_number(number)
convert_number_to_digits(num).length == 9 && self.of(num) == check
end
|
ruby
|
{
"resource": ""
}
|
q7962
|
Sjekksum.ISBN10.convert
|
train
|
def convert number
raise_on_type_mismatch number
check = self.of(number)
if number.is_a?(String) or check.is_a?(String)
number.to_s << self.of(number).to_s
else
convert_to_int(number) * 10 + self.of(number)
end
end
|
ruby
|
{
"resource": ""
}
|
q7963
|
Calculated.Session.api_call
|
train
|
def api_call(method, path, params ={}, &proc)
if cache = caching? && (@cache[cache_key(path, params)])
return cache
else
if @logging
Calculated::Logging.log_calculated_api(method, path, params) do
api_call_without_logging(method, path, params, &proc)
end
else
api_call_without_logging(method, path, params, &proc)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q7964
|
IceCubeCron.ExpressionParser.split_parts_and_interval
|
train
|
def split_parts_and_interval(expression_str)
interval = nil
parts = expression_str.split(/ +/).map do |part|
part, part_interval = part.split('/')
interval = part_interval unless part_interval.blank?
next nil if part.blank? || part == '*'
part
end
[parts, interval]
end
|
ruby
|
{
"resource": ""
}
|
q7965
|
IceCubeCron.ExpressionParser.string_to_expression_parts
|
train
|
def string_to_expression_parts(expression_str)
return {} if expression_str.nil?
parts, interval = split_parts_and_interval(expression_str)
expression_parts = ::Hash[EXPRESSION_PART_KEYS.zip(parts)]
expression_parts.select! do |_key, value|
!value.nil?
end
expression_parts[:interval] = interval unless interval.nil?
expression_parts
end
|
ruby
|
{
"resource": ""
}
|
q7966
|
Rattler::Parsers.Assert.parse
|
train
|
def parse(scanner, rules, scope = ParserScope.empty)
pos = scanner.pos
result = (child.parse(scanner, rules, scope) && true)
scanner.pos = pos
result
end
|
ruby
|
{
"resource": ""
}
|
q7967
|
Cxxproject.OsxSharedLibs.post_link_hook
|
train
|
def post_link_hook(linker, bb)
basic_name = get_basic_name(linker, bb)
symlink_lib_to(basic_name, bb)
end
|
ruby
|
{
"resource": ""
}
|
q7968
|
T2Airtime.API.transaction_list
|
train
|
def transaction_list(start = (Time.now - 24.hours), stop = Time.now, msisdn = nil, destination = nil, code = nil)
@params = {
stop_date: to_yyyymmdd(stop),
start_date: to_yyyymmdd(start)
}
code && !code.empty? && @params[:code] = code
msisdn && !msisdn.empty? && @params[:msisdn] = msisdn
destination && !destination.empty? && @params[:destination_msisdn] = destination
run_action :trans_list
end
|
ruby
|
{
"resource": ""
}
|
q7969
|
ProfitBricks.Server.detach_volume
|
train
|
def detach_volume(volume_id)
volume = ProfitBricks::Volume.get(datacenterId, nil, volume_id)
volume.detach(id)
end
|
ruby
|
{
"resource": ""
}
|
q7970
|
DuckMap.ControllerHelpers.sitemap_setup
|
train
|
def sitemap_setup(options = {})
rows = []
DuckMap.logger.debug "sitemap_setup: action_name => #{options[:action_name]} source => #{options[:source]} model => #{options[:model]}"
attributes = self.sitemap_attributes(options[:action_name])
DuckMap.logger.debug "sitemap_setup: attributes => #{attributes}"
if attributes.kind_of?(Hash) && attributes[:handler].kind_of?(Hash) && !attributes[:handler][:action_name].blank?
config = {handler: attributes[:handler]}.merge(options)
rows = self.send(attributes[:handler][:action_name], config)
end
return rows
end
|
ruby
|
{
"resource": ""
}
|
q7971
|
Rattler::Util.ParserCLI.run
|
train
|
def run
show_result @parser_class.parse!(ARGF.read)
rescue Rattler::Runtime::SyntaxError => e
puts e
end
|
ruby
|
{
"resource": ""
}
|
q7972
|
PatternPatch.Methods.patch
|
train
|
def patch(name)
raise ConfigurationError, "patch_dir has not been set" if patch_dir.nil?
raise ConfigurationError, "patch_dir is not a directory" unless Dir.exist?(patch_dir)
Patch.from_yaml File.join(patch_dir, "#{name}.yml")
end
|
ruby
|
{
"resource": ""
}
|
q7973
|
Rattler.Runner.run
|
train
|
def run
if result = analyze
synthesize(result)
else
puts parser.failure
exit ERRNO_PARSE_ERROR
end
end
|
ruby
|
{
"resource": ""
}
|
q7974
|
Weather.Planner.get_dewpoints
|
train
|
def get_dewpoints(start_date, end_date, location)
response = get_planner_response(start_date, end_date, location)
return response['response']['error'] unless response['response']['error'].nil?
highs = response['trip']['dewpoint_high']
lows = response['trip']['dewpoint_low']
{
high: {
imperial: {
minimum: highs['min']['F'].to_i,
maximum: highs['max']['F'].to_i,
average: highs['avg']['F'].to_i
},
metric: {
minimum: highs['min']['C'].to_i,
maximum: highs['max']['C'].to_i,
average: highs['avg']['C'].to_i
}
},
low: {
imperial: {
minimum: lows['min']['F'].to_i,
maximum: lows['max']['F'].to_i,
average: lows['avg']['F'].to_i
},
metric: {
minimum: lows['min']['C'].to_i,
maximum: lows['max']['C'].to_i,
average: lows['avg']['C'].to_i
}
}
}
end
|
ruby
|
{
"resource": ""
}
|
q7975
|
Weather.Planner.get_planner_response
|
train
|
def get_planner_response(start_date, end_date, location)
start = start_date.strftime('%m%d')
final = end_date.strftime('%m%d')
get("planner_#{start}#{final}", location)
end
|
ruby
|
{
"resource": ""
}
|
q7976
|
BorrowDirect.Util.hash_key_path
|
train
|
def hash_key_path(hash, *path)
result = nil
path.each do |key|
return nil unless hash.respond_to? :"[]"
result = hash = hash[key]
end
return result
end
|
ruby
|
{
"resource": ""
}
|
q7977
|
Dirigible.Configuration.options
|
train
|
def options
VALID_OPTION_KEYS.inject({}) do |option, key|
option.merge!(key => send(key))
end
end
|
ruby
|
{
"resource": ""
}
|
q7978
|
Logan.ProjectTemplate.create_project
|
train
|
def create_project( name, description = nil)
post_params = {
:body => {name: name, description: description}.to_json,
:headers => Logan::Client.headers.merge({'Content-Type' => 'application/json'})
}
response = Logan::Client.post "/project_templates/#{@id}/projects.json", post_params
Logan::Project.new response
end
|
ruby
|
{
"resource": ""
}
|
q7979
|
DuckMap.ConfigHelpers.log_level
|
train
|
def log_level(value, options = {})
DuckMap::Logger.log_level = value
if options.has_key?(:full)
DuckMap.logger.full_exception = options[:full]
end
end
|
ruby
|
{
"resource": ""
}
|
q7980
|
RProgram.Task.leading_non_options
|
train
|
def leading_non_options
args = []
# add the task leading non-options
@options.each do |name,value|
non_opt = get_non_option(name)
if (non_opt && non_opt.leading?)
args += non_opt.arguments(value)
end
end
# add all leading subtask non-options
@subtasks.each_value do |task|
args += task.leading_non_options
end
return args
end
|
ruby
|
{
"resource": ""
}
|
q7981
|
RProgram.Task.options
|
train
|
def options
args = []
# add all subtask options
@subtasks.each_value do |task|
args += task.arguments
end
# add the task options
@options.each do |name,value|
opt = get_option(name)
args += opt.arguments(value) if opt
end
return args
end
|
ruby
|
{
"resource": ""
}
|
q7982
|
RProgram.Task.tailing_non_options
|
train
|
def tailing_non_options
args = []
# add all tailing subtask non-options
@subtasks.each_value do |task|
args += task.tailing_non_options
end
# add the task tailing non-options
@options.each do |name,value|
non_opt = get_non_option(name)
if (non_opt && non_opt.tailing?)
args += non_opt.arguments(value)
end
end
return args
end
|
ruby
|
{
"resource": ""
}
|
q7983
|
RProgram.Task.arguments
|
train
|
def arguments
tailing_args = tailing_non_options
if tailing_args.any? { |arg| arg[0,1] == '-' }
tailing_args.unshift('--')
end
return leading_non_options + options + tailing_args
end
|
ruby
|
{
"resource": ""
}
|
q7984
|
Rattler::Compiler::Optimizer.OptimizationSequence.deep_apply
|
train
|
def deep_apply(parser, context)
parser = apply(parser, context)
apply(parser.map_children { |child|
deep_apply(child, child_context(parser, context))
}, context)
end
|
ruby
|
{
"resource": ""
}
|
q7985
|
Rattler::Parsers.Choice.parse
|
train
|
def parse(scanner, rules, scope = ParserScope.empty)
for child in children
if r = child.parse(scanner, rules, scope)
return r
end
end
false
end
|
ruby
|
{
"resource": ""
}
|
q7986
|
Archive.Compress.compress
|
train
|
def compress(files, verbose=false)
if files.any? { |f| !File.file?(f) }
raise ArgumentError, "Files supplied must all be real, actual files -- not directories or symlinks."
end
configure_archive
compress_files(files, verbose)
free_archive
end
|
ruby
|
{
"resource": ""
}
|
q7987
|
CfWebserver.CfApiServlet.do_POST
|
train
|
def do_POST(req, resp)
# Split the path into piece
path = req.path[1..-1].split('/')
# We don't want to cache catflap login page so set response headers.
# Chrome and FF respect the no-store, while IE respects no-cache.
resp['Cache-Control'] = 'no-cache, no-store'
resp['Pragma'] = 'no-cache' # Legacy
resp['Expires'] = '-1' # Microsoft advises this for older IE browsers.
response_class = CfRestService.const_get 'CfRestService'
raise "#{response_class} not a Class" unless response_class.is_a?(Class)
raise HTTPStatus::NotFound unless path[1]
response_method = path[1].to_sym
# Make sure the method exists in the class
raise HTTPStatus::NotFound unless response_class
.respond_to? response_method
if :sync == response_method
resp.body = response_class.send response_method, req, resp, @cf
end
if :knock == response_method
resp.body = response_class.send response_method, req, resp, @cf
end
# Remaining path segments get passed in as arguments to the method
if path.length > 2
resp.body = response_class.send response_method, req, resp,
@cf, path[1..-1]
else
resp.body = response_class.send response_method, req, resp, @cf
end
raise HTTPStatus::OK
end
|
ruby
|
{
"resource": ""
}
|
q7988
|
Aptly.Snapshot.update!
|
train
|
def update!(**kwords)
kwords = kwords.map { |k, v| [k.to_s.capitalize, v] }.to_h
response = @connection.send(:put,
"/snapshots/#{self.Name}",
body: JSON.generate(kwords))
hash = JSON.parse(response.body, symbolize_names: true)
return nil if hash == marshal_dump
marshal_load(hash)
self
end
|
ruby
|
{
"resource": ""
}
|
q7989
|
Aptly.Snapshot.diff
|
train
|
def diff(other_snapshot)
endpoint = "/snapshots/#{self.Name}/diff/#{other_snapshot.Name}"
response = @connection.send(:get, endpoint)
JSON.parse(response.body)
end
|
ruby
|
{
"resource": ""
}
|
q7990
|
Codeqa.Configuration.git_root_till_home
|
train
|
def git_root_till_home
Pathname.new(Dir.pwd).ascend do |dir_pathname|
return dir_pathname if File.directory?("#{dir_pathname}/.git")
return nil if dir_pathname.to_s == home_dir
end
end
|
ruby
|
{
"resource": ""
}
|
q7991
|
PuppetDBQuery.MongoDB.node_properties
|
train
|
def node_properties
collection = connection[node_properties_collection]
result = {}
collection.find.batch_size(999).each do |values|
id = values.delete('_id')
result[id] = values
end
result
end
|
ruby
|
{
"resource": ""
}
|
q7992
|
PuppetDBQuery.MongoDB.all_nodes
|
train
|
def all_nodes
collection = connection[nodes_collection]
collection.find.batch_size(999).projection(_id: 1).map { |k| k[:_id] }
end
|
ruby
|
{
"resource": ""
}
|
q7993
|
PuppetDBQuery.MongoDB.query_nodes
|
train
|
def query_nodes(query)
collection = connection[nodes_collection]
collection.find(query).batch_size(999).projection(_id: 1).map { |k| k[:_id] }
end
|
ruby
|
{
"resource": ""
}
|
q7994
|
PuppetDBQuery.MongoDB.query_facts
|
train
|
def query_facts(query, facts = [])
fields = Hash[facts.collect { |fact| [fact.to_sym, 1] }]
collection = connection[nodes_collection]
result = {}
collection.find(query).batch_size(999).projection(fields).each do |values|
id = values.delete('_id')
result[id] = values
end
result
end
|
ruby
|
{
"resource": ""
}
|
q7995
|
PuppetDBQuery.MongoDB.query_facts_exist
|
train
|
def query_facts_exist(query, facts = [])
result = query_facts(query, facts)
unless facts.empty?
result.keep_if do |_k, v|
facts.any? { |f| !v[f].nil? }
end
end
result
end
|
ruby
|
{
"resource": ""
}
|
q7996
|
PuppetDBQuery.MongoDB.search_facts
|
train
|
def search_facts(query, pattern, facts = [], facts_found = [], check_names = false)
collection = connection[nodes_collection]
result = {}
collection.find(query).batch_size(999).each do |values|
id = values.delete('_id')
found = {}
values.each do |k, v|
if v =~ pattern
found[k] = v
elsif check_names && k =~ pattern
found[k] = v
end
end
next if found.empty?
facts_found.concat(found.keys).uniq!
facts.each do |f|
found[f] = values[f]
end
result[id] = found
end
result
end
|
ruby
|
{
"resource": ""
}
|
q7997
|
PuppetDBQuery.MongoDB.single_node_facts
|
train
|
def single_node_facts(node, facts)
fields = Hash[facts.collect { |fact| [fact.to_sym, 1] }]
collection = connection[nodes_collection]
result = collection.find(_id: node).limit(1).batch_size(1).projection(fields).to_a.first
result.delete("_id") if result
result
end
|
ruby
|
{
"resource": ""
}
|
q7998
|
PuppetDBQuery.MongoDB.meta
|
train
|
def meta
collection = connection[meta_collection]
result = collection.find.first
result.delete(:_id)
result
end
|
ruby
|
{
"resource": ""
}
|
q7999
|
PuppetDBQuery.MongoDB.node_update
|
train
|
def node_update(node, facts)
logger.debug " updating #{node}"
connection[nodes_collection].find(_id: node).replace_one(facts,
upsert: true,
bypass_document_validation: true,
check_keys: false,
validating_keys: false)
rescue ::Mongo::Error::OperationFailure => e
logger.warn " updating #{node} failed with: #{e.message}"
# mongodb doesn't support keys with a dot
# see https://docs.mongodb.com/manual/reference/limits/#Restrictions-on-Field-Names
# as a dirty workaround we delete the document and insert it ;-)
# The dotted field .. in .. is not valid for storage. (57)
# .. is an illegal key in MongoDB. Keys may not start with '$' or contain a '.'.
# (BSON::String::IllegalKey)
raise e unless e.message =~ /The dotted field / || e.message =~ /is an illegal key/
logger.warn " we transform the dots into underline characters"
begin
facts = Hash[facts.map { |k, v| [k.tr('.', '_'), v] }]
connection[nodes_collection].find(_id: node).replace_one(facts,
upsert: true,
bypass_document_validation: true,
check_keys: false,
validating_keys: false)
rescue
logger.error " inserting node #{node} failed again with: #{e.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.