_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q8100
|
BitMagic.BitsGenerator.each_value
|
train
|
def each_value(each_bits = nil, &block)
# Warning! This has exponential complexity (time and space)
# 2**n to be precise, use sparingly
yield 0
count = 1
if @options[:default] != 0
yield @options[:default]
count += 1
end
each_bits = self.bits if each_bits == nil
1.upto(each_bits.length).each do |i|
each_bits.combination(i).each do |bits_list|
num = bits_list.reduce(0) { |m, j| m |= (1 << j) }
yield num
count += 1
end
end
count
end
|
ruby
|
{
"resource": ""
}
|
q8101
|
BitMagic.BitsGenerator.all_values
|
train
|
def all_values(each_bits = nil, opts = {warn_threshold: 12})
# Shuffle things around so that people can call #all_values(warn_threshold: false)
if each_bits.is_a?(Hash)
opts = each_bits
each_bits = nil
end
each_bits = self.bits if each_bits == nil
if opts[:warn_threshold] and each_bits.length > opts[:warn_threshold]
warn "There are #{each_bits.length} bits. You will have #{2**(each_bits.length)} values in the result. Please carefully benchmark the execution time and memory usage of your use-case."
warn "You can disable this warning by using #all_values(warn_threshold: false)"
end
values = []
self.each_value(each_bits) {|num| values << num }
values
end
|
ruby
|
{
"resource": ""
}
|
q8102
|
BitMagic.BitsGenerator.equal_to
|
train
|
def equal_to(field_values = {})
all_num, none_num = self.equal_to_numbers(field_values)
[].tap do |list|
self.each_value { |num| list << num if (num & all_num) == all_num and (num & none_num) == 0 }
end
end
|
ruby
|
{
"resource": ""
}
|
q8103
|
BitMagic.BitsGenerator.equal_to_numbers
|
train
|
def equal_to_numbers(field_values = {})
fields = {}
field_values.each_pair do |field_name, v|
bits = self.bits_for(field_name)
fields[bits] = v if bits.length > 0
end
all_num = 0
none_num = 0
fields.each_pair { |field_bits, val|
field_bits.each_with_index do |bit, i|
if @options[:bool_caster].call(val[i])
all_num |= (1 << bit)
else
none_num |= (1 << bit)
end
end
}
[all_num, none_num]
end
|
ruby
|
{
"resource": ""
}
|
q8104
|
LatoBlog.Category::EntityHelpers.get_all_category_children
|
train
|
def get_all_category_children
direct_children = self.category_children
all_children = []
direct_children.each do |direct_child|
all_children.push(direct_child)
all_children = all_children + direct_child.get_all_category_children
end
all_children
end
|
ruby
|
{
"resource": ""
}
|
q8105
|
ChineseLunar.Lunar.lunar_date
|
train
|
def lunar_date()
l = convert(@date.year, @date.month, @date.day)
l[0].to_s + "-" + l[1].to_s + "-" + (/^\d+/.match(l[2].to_s)).to_s
end
|
ruby
|
{
"resource": ""
}
|
q8106
|
ChineseLunar.Lunar.days_in_lunar_date
|
train
|
def days_in_lunar_date(y)
sum = 348
i = 0x8000
while i > 0x8
if ((@@lunar_info[y - 1900] & i) != 0)
sum += 1
end
i >>= 1
end
sum + leap_days(y)
end
|
ruby
|
{
"resource": ""
}
|
q8107
|
::Sequel::Plugins::Crushyform.ClassMethods.to_dropdown
|
train
|
def to_dropdown(selection=nil, nil_name='** UNDEFINED **')
dropdown_cache.inject("<option value=''>#{nil_name}</option>\n") do |out, row|
selected = 'selected' if row[0]==selection
"%s%s%s%s" % [out, row[1], selected, row[2]]
end
end
|
ruby
|
{
"resource": ""
}
|
q8108
|
::Sequel::Plugins::Crushyform.InstanceMethods.crushyfield
|
train
|
def crushyfield(col, o={})
return '' if (o[:type]==:none || model.crushyform_schema[col][:type]==:none)
field_name = o[:name] || model.crushyform_schema[col][:name] || col.to_s.sub(/_id$/, '').tr('_', ' ').capitalize
error_list = errors.on(col).map{|e|" - #{e}"} if !errors.on(col).nil?
"<p class='crushyfield %s'><label for='%s'>%s</label><span class='crushyfield-error-list'>%s</span><br />\n%s</p>\n" % [error_list&&'crushyfield-error', crushyid_for(col), field_name, error_list, crushyinput(col, o)]
end
|
ruby
|
{
"resource": ""
}
|
q8109
|
::Sequel::Plugins::Crushyform.InstanceMethods.to_thumb
|
train
|
def to_thumb(c)
current = self.__send__(c)
if model.respond_to?(:stash_reflection) && model.stash_reflection.key?(c)
!current.nil? && current[:type][/^image\//] ? "<img src='#{file_url(c, 'stash_thumb.gif')}?#{::Time.now.to_i.to_s}' /><br />\n" : ''
else
"<img src='#{current}?#{::Time.now.to_i.to_s}' width='100' onerror=\"this.style.display='none'\" />\n"
end
end
|
ruby
|
{
"resource": ""
}
|
q8110
|
Ponder.Thaum.parse
|
train
|
def parse(message)
message.chomp!
if message =~ /^PING \S+$/
if @config.hide_ping_pongs
send_data message.sub(/PING/, 'PONG')
else
@loggers.info "<< #{message}"
raw message.sub(/PING/, 'PONG')
end
else
@loggers.info "<< #{message}"
event_data = IRC::Events::Parser.parse(message, @isupport['CHANTYPES'])
parse_event_data(event_data) unless event_data.empty?
end
end
|
ruby
|
{
"resource": ""
}
|
q8111
|
Ponder.Thaum.setup_default_callbacks
|
train
|
def setup_default_callbacks
on :query, /^\001PING \d+\001$/ do |event_data|
time = event_data[:message].scan(/\d+/)[0]
notice event_data[:nick], "\001PING #{time}\001"
end
on :query, /^\001VERSION\001$/ do |event_data|
notice event_data[:nick], "\001VERSION Ponder #{Ponder::VERSION} (https://github.com/tbuehlmann/ponder)\001"
end
on :query, /^\001TIME\001$/ do |event_data|
notice event_data[:nick], "\001TIME #{Time.now.strftime('%a %b %d %H:%M:%S %Y')}\001"
end
on 005 do |event_data|
@isupport.parse event_data[:params]
end
end
|
ruby
|
{
"resource": ""
}
|
q8112
|
LookUpTable.ClassMethods.lut_write_to_cache
|
train
|
def lut_write_to_cache(lut_key)
if lut_options(lut_key)[:sql_mode]
count = lut_write_to_cache_sql_mode(lut_key)
else
count = lut_write_to_cache_no_sql_mode(lut_key)
end
# HACK: Writing a \0 to terminate batch_items
lut_write_cache_item(lut_key, count, nil)
end
|
ruby
|
{
"resource": ""
}
|
q8113
|
Charmkit.Helpers.template
|
train
|
def template(src, dst, **context)
rendered = TemplateRenderer.render(File.read(src), context)
File.write(dst, rendered)
end
|
ruby
|
{
"resource": ""
}
|
q8114
|
Charmkit.Helpers.inline_template
|
train
|
def inline_template(name, dst, **context)
templates = {}
begin
app, data = File.read(caller.first.split(":").first).split("__END__", 2)
rescue Errno::ENOENT
app, data = nil
end
data.strip!
if data
template = nil
data.each_line do |line|
if line =~ /^@@\s*(.*\S)\s*$/
template = String.new
templates[$1.to_s] = template
elsif
template << line
end
end
begin
rendered = TemplateRenderer.render(templates[name], context)
rescue
puts "Unable to load inline template #{name}"
exit 1
end
File.write(dst, rendered)
end
end
|
ruby
|
{
"resource": ""
}
|
q8115
|
Raca.Container.delete
|
train
|
def delete(key)
log "deleting #{key} from #{container_path}"
object_path = File.join(container_path, Raca::Util.url_encode(key))
response = storage_client.delete(object_path)
(200..299).cover?(response.code.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q8116
|
Raca.Container.object_metadata
|
train
|
def object_metadata(key)
object_path = File.join(container_path, Raca::Util.url_encode(key))
log "Requesting metadata from #{object_path}"
response = storage_client.head(object_path)
{
:content_type => response["Content-Type"],
:bytes => response["Content-Length"].to_i
}
end
|
ruby
|
{
"resource": ""
}
|
q8117
|
Raca.Container.download
|
train
|
def download(key, filepath)
log "downloading #{key} from #{container_path}"
object_path = File.join(container_path, Raca::Util.url_encode(key))
outer_response = storage_client.get(object_path) do |response|
File.open(filepath, 'wb') do |io|
response.read_body do |chunk|
io.write(chunk)
end
end
end
outer_response["Content-Length"].to_i
end
|
ruby
|
{
"resource": ""
}
|
q8118
|
Raca.Container.list
|
train
|
def list(options = {})
max = options.fetch(:max, 100_000_000)
marker = options.fetch(:marker, nil)
prefix = options.fetch(:prefix, nil)
details = options.fetch(:details, nil)
limit = [max, MAX_ITEMS_PER_LIST].min
log "retrieving up to #{max} items from #{container_path}"
request_path = list_request_path(marker, prefix, details, limit)
result = storage_client.get(request_path).body || ""
if details
result = JSON.parse(result)
else
result = result.split("\n")
end
result.tap {|items|
if max <= limit
log "Got #{items.length} items; we don't need any more."
elsif items.length < limit
log "Got #{items.length} items; there can't be any more."
else
log "Got #{items.length} items; requesting #{limit} more."
details ? marker = items.last["name"] : marker = items.last
items.concat list(max: max-items.length, marker: marker, prefix: prefix, details: details)
end
}
end
|
ruby
|
{
"resource": ""
}
|
q8119
|
Raca.Container.metadata
|
train
|
def metadata
log "retrieving container metadata from #{container_path}"
response = storage_client.head(container_path)
custom = {}
response.each_capitalized_name { |name|
custom[name] = response[name] if name[/\AX-Container-Meta-/]
}
{
:objects => response["X-Container-Object-Count"].to_i,
:bytes => response["X-Container-Bytes-Used"].to_i,
:custom => custom,
}
end
|
ruby
|
{
"resource": ""
}
|
q8120
|
Raca.Container.set_metadata
|
train
|
def set_metadata(headers)
log "setting headers for container #{container_path}"
response = storage_client.post(container_path, '', headers)
(200..299).cover?(response.code.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q8121
|
Raca.Container.cdn_metadata
|
train
|
def cdn_metadata
log "retrieving container CDN metadata from #{container_path}"
response = cdn_client.head(container_path)
{
:cdn_enabled => response["X-CDN-Enabled"] == "True",
:host => response["X-CDN-URI"],
:ssl_host => response["X-CDN-SSL-URI"],
:streaming_host => response["X-CDN-STREAMING-URI"],
:ttl => response["X-TTL"].to_i,
:log_retention => response["X-Log-Retention"] == "True"
}
end
|
ruby
|
{
"resource": ""
}
|
q8122
|
Raca.Container.cdn_enable
|
train
|
def cdn_enable(ttl = 259200)
log "enabling CDN access to #{container_path} with a cache expiry of #{ttl / 60} minutes"
response = cdn_client.put(container_path, "X-TTL" => ttl.to_i.to_s)
(200..299).cover?(response.code.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q8123
|
Raca.Container.temp_url
|
train
|
def temp_url(object_key, temp_url_key, expires_at = Time.now.to_i + 60)
private_url("GET", object_key, temp_url_key, expires_at)
end
|
ruby
|
{
"resource": ""
}
|
q8124
|
Raca.Container.temp_upload_url
|
train
|
def temp_upload_url(object_key, temp_url_key, expires_at = Time.now.to_i + 60)
private_url("PUT", object_key, temp_url_key, expires_at)
end
|
ruby
|
{
"resource": ""
}
|
q8125
|
Raca.Container.list_request_path
|
train
|
def list_request_path(marker, prefix, details, limit)
query_string = "limit=#{limit}"
query_string += "&marker=#{Raca::Util.url_encode(marker)}" if marker
query_string += "&prefix=#{Raca::Util.url_encode(prefix)}" if prefix
query_string += "&format=json" if details
container_path + "?#{query_string}"
end
|
ruby
|
{
"resource": ""
}
|
q8126
|
SycTimeleap.TimeLeap.method_missing
|
train
|
def method_missing(name, *args)
add_regex = %r{
^([ib])(?:n|ack)?
(?:\.|_|-| )?
(\d+)
(?:\.|_|-| )?
([dwmy])
(?:ays?|eeks?|onths?|ears?)?$
}x
weekday_regex = %r{
^(tod|tom|y)(?:a?y?|o?r?r?o?w?|e?s?t?e?r?d?a?y?)?$
}xi
next_weekday_regex = %r{
^(n|p)(?:e?x?t|r?e?v?i?o?u?s?)?
(?:\.|_| |-)?
(mo|tu|we|th|fr|sa|su)
(?:n?|e?s?|d?n?e?s?|u?r?s?|i?|t?u?r?|n?)(?:d?a?y?)$
}xi
next_weekday_in_regex = %r{
^(mo|tu|we|th|fr|sa|su)
(?:n?|e?s?|d?n?e?s?|u?r?s?|i?|t?u?r?|n?)(?:d?a?y?)(?:_?)
(i|b)
(?:n?|a?c?k?)(?:_?)
(\d+)(?:_?)([dwmy])(?:a?y?s?|e?e?k?s?|o?n?t?h?s?|e?a?r?s?)$
}xi
return add($1, $2, $3) if name =~ add_regex
return weekday($1) if name =~ weekday_regex
return next_weekday($1, $2) if name =~ next_weekday_regex
return next_weekday_in($1, $2, $3, $4) if name =~ next_weekday_in_regex
super
end
|
ruby
|
{
"resource": ""
}
|
q8127
|
RUser.Person.convert
|
train
|
def convert(data)
data.each do |k, v|
k = KEYS[k] if KEYS.include?(k)
v = v.to_s if k.eql? 'zip'
if NIDT.include?(k)
instance_variable_set('@nidt', k)
k = 'nidn'
v = v.to_s
end
var_set(k, v)
end
end
|
ruby
|
{
"resource": ""
}
|
q8128
|
RUser.Person.var_set
|
train
|
def var_set(k, v)
varget = proc { instance_variable_get("@#{k}") }
varset = proc { |y| instance_variable_set("@#{k}", y) }
v.is_a?(Hash) ? convert(v) : instance_variable_set("@#{k}", v)
self.class.send(:define_method, k, varget)
self.class.send(:define_method, "#{k}=", varset)
end
|
ruby
|
{
"resource": ""
}
|
q8129
|
HasEnumeration.AggregateConditionsOverride.expand_hash_conditions_for_aggregates
|
train
|
def expand_hash_conditions_for_aggregates(attrs)
expanded_attrs = attrs.dup
attr_enumeration_mapping_classes.each do |attr, klass|
if expanded_attrs[attr].is_a?(Symbol)
expanded_attrs[attr] = klass.from_sym(expanded_attrs[attr])
end
end
super(expanded_attrs)
end
|
ruby
|
{
"resource": ""
}
|
q8130
|
Petra.ValueComparisonError.ignore!
|
train
|
def ignore!(update_value: false)
Petra.current_transaction.current_section.log_read_integrity_override(object,
attribute: attribute,
external_value: external_value,
update_value: update_value)
end
|
ruby
|
{
"resource": ""
}
|
q8131
|
Petra.WriteClashError.undo_changes!
|
train
|
def undo_changes!
Petra.current_transaction.current_section.log_attribute_change_veto(object,
attribute: attribute,
external_value: external_value)
end
|
ruby
|
{
"resource": ""
}
|
q8132
|
Sem4rCli.CommandReport.download
|
train
|
def download(args)
if args.length != 1
puts "missing report id for 'download' subcommand"
return false
end
report_id = args[0].to_i
report = @common_args.account.reports.find { |r| r.id == report_id }
if report.nil?
puts "report '#{report_id}' not found"
return false
end
if report.status != 'Completed'
puts "cannot download report with status '#{report.status}'"
return false
end
path_name = "test_report.xml"
puts "Download report #{report.id} in #{path_name}"
report.download(path_name)
true
end
|
ruby
|
{
"resource": ""
}
|
q8133
|
Sem4rCli.CommandReport.schedule
|
train
|
def schedule(argv)
report = @account.report do
name 'boh'
type 'Url'
aggregation 'Daily'
cross_client true
zero_impression true
start_day '2010-01-01'
end_day '2010-01-30'
column "CustomerName"
column "ExternalCustomerId"
column "CampaignStatus"
column "Campaign"
column "CampaignId"
column "AdGroup"
column "AdGroupId"
column "AdGroupStatus"
column "QualityScore"
column "FirstPageCpc"
column "Keyword"
column "KeywordId"
column "KeywordTypeDisplay"
column "DestinationURL"
column "Impressions"
column "Clicks"
column "CTR"
column "CPC"
column "MaximumCPC"
column "Cost"
column "AveragePosition"
end
unless report.validate
puts "report not valid"
exit
end
puts "scheduled job"
job = report.schedule
job.wait(10) { |report, status| puts "status #{status}" }
report.download("test_report.xml")
true
end
|
ruby
|
{
"resource": ""
}
|
q8134
|
TartanCloth::Matchers.TransformMatcher.make_patch
|
train
|
def make_patch( expected, actual )
diffs = Diff::LCS.sdiff( expected.split("\n"), actual.split("\n"),
Diff::LCS::ContextDiffCallbacks )
maxcol = diffs.flatten.
collect {|d| [d.old_element.to_s.length, d.new_element.to_s.length ] }.
flatten.max || 0
maxcol += 4
patch = " %#{maxcol}s | %s\n" % [ "Expected", "Actual" ]
patch << diffs.collect do |changeset|
changeset.collect do |change|
"%s [%03d, %03d]: %#{maxcol}s | %-#{maxcol}s" % [
change.action,
change.old_position,
change.new_position,
change.old_element.inspect,
change.new_element.inspect,
]
end.join("\n")
end.join("\n---\n")
end
|
ruby
|
{
"resource": ""
}
|
q8135
|
Polypaperclip.ClassMethods.initialize_polypaperclip
|
train
|
def initialize_polypaperclip
if polypaperclip_definitions.nil?
after_save :save_attached_files
before_destroy :destroy_attached_files
has_many_attachments_association
write_inheritable_attribute(:polypaperclip_definitions, {})
#sequence is important here - we have to override some paperclip stuff
include Paperclip::InstanceMethods
include InstanceMethods
end
end
|
ruby
|
{
"resource": ""
}
|
q8136
|
Statefully.State.method_missing
|
train
|
def method_missing(name, *args, &block)
sym_name = name.to_sym
return fetch(sym_name) if key?(sym_name)
str_name = name.to_s
modifier = str_name[-1]
return super unless %w[? !].include?(modifier)
base = str_name[0...-1].to_sym
known = key?(base)
return known if modifier == '?'
return fetch(base) if known
raise Errors::StateMissing, base
end
|
ruby
|
{
"resource": ""
}
|
q8137
|
Statefully.State.respond_to_missing?
|
train
|
def respond_to_missing?(name, _include_private = false)
str_name = name.to_s
key?(name.to_sym) || %w[? !].any?(&str_name.method(:end_with?)) || super
end
|
ruby
|
{
"resource": ""
}
|
q8138
|
Anvil.Versioner.bump!
|
train
|
def bump!(term)
fail NotSupportedTerm.new(term) unless TERMS.include?(term.to_sym)
new_version = clone
new_value = increment send(term)
new_version.send("#{term}=", new_value)
new_version.reset_terms_for(term)
end
|
ruby
|
{
"resource": ""
}
|
q8139
|
Anvil.Versioner.reset_terms_for
|
train
|
def reset_terms_for(term)
self.minor = 0 if term == :major
self.patch = 0 if term == :major || term == :minor
self.pre = nil if [:major, :minor, :patch].include? term
self.build = nil if [:major, :minor, :patch, :pre].include? term
self
end
|
ruby
|
{
"resource": ""
}
|
q8140
|
Telstra.SMS.send_sms
|
train
|
def send_sms(to: sms_to, body: sms_body)
[to, body]
generate_token
options = { body: {
body: body,
to: to
}.to_json,
headers: { "Content-Type" => "application/json", "Authorization" => "Bearer #{@token}" }}
response = HTTParty.post("https://api.telstra.com/v1/sms/messages", options)
return JSON.parse(response.body)
end
|
ruby
|
{
"resource": ""
}
|
q8141
|
Atlas.BoxProvider.save
|
train
|
def save
body = { provider: to_hash }
begin
response = Atlas.client.put(url_builder.box_provider_url, body: body)
rescue Atlas::Errors::NotFoundError
response = Atlas.client.post("#{url_builder.box_version_url}/providers",
body: body)
end
update_with_response(response)
end
|
ruby
|
{
"resource": ""
}
|
q8142
|
Atlas.BoxProvider.upload
|
train
|
def upload(file)
# get the path for upload
response = Atlas.client.get("#{url_builder.box_provider_url}/upload")
# upload the file
upload_url = response['upload_path']
Excon.put(upload_url, body: file)
end
|
ruby
|
{
"resource": ""
}
|
q8143
|
CronR.CronJob.runnable?
|
train
|
def runnable? time
result = [:minute,:hour,:day,:dow,:month].map{|ct|
if self[ct] == true then
true
else
case ct
when :month,:day,:hour
val = time.send(ct)
when :dow
val = time.wday
when :minute
val = time.min
end
case self[ct]
when Numeric # Should be Fixnum
self[ct] == val
else # Assume array-like thing...
self[ct].include?(val)
end
end
}
# Everything should be true to make us eligible for running:
[result.inject(true){|s,v| s && v},result]
end
|
ruby
|
{
"resource": ""
}
|
q8144
|
Permit.PermitRules.allow
|
train
|
def allow(roles, options = {})
actions = options.delete(:to)
rule = PermitRule.new(roles, options)
index_rule_by_actions @action_allow_rules, actions, rule
return rule
end
|
ruby
|
{
"resource": ""
}
|
q8145
|
Tinia.ActiveRecord.indexed_with_cloud_search
|
train
|
def indexed_with_cloud_search(&block)
mods = [
Tinia::Connection,
Tinia::Index,
Tinia::Search
]
mods.each do |mod|
unless self.included_modules.include?(mod)
self.send(:include, mod)
end
end
# config block
yield(self) if block_given?
# ensure config is all set
unless self.cloud_search_domain.present?
raise Tinia::MissingSearchDomain.new(self)
end
end
|
ruby
|
{
"resource": ""
}
|
q8146
|
Smsified.Subscriptions.create_inbound_subscription
|
train
|
def create_inbound_subscription(destination_address, options)
query = options.merge({ :destination_address => destination_address })
Response.new self.class.post("/smsmessaging/inbound/subscriptions",
:basic_auth => @auth,
:body => camelcase_keys(query),
:headers => SMSIFIED_HTTP_HEADERS
)
end
|
ruby
|
{
"resource": ""
}
|
q8147
|
YamledAcl.ControllerExtension.authorize_action
|
train
|
def authorize_action
YamledAcl.init(current_user_group_name, params[:controller])
allowed_to?(params[:action]) or raise(YamledAcl::AccessDenied)
end
|
ruby
|
{
"resource": ""
}
|
q8148
|
Mysticonfig.Loader.load
|
train
|
def load
config_file = find_file @filenames
config = Utils.load_auto config_file
config.empty? ? @default_config : @default_config.merge(config)
end
|
ruby
|
{
"resource": ""
}
|
q8149
|
Mysticonfig.Loader.load_json
|
train
|
def load_json
json_config_file = Utils.lookup_file @filenames[:json]
config = Utils.load_json json_config_file
config.empty? ? @default_config : @default_config.merge(config)
end
|
ruby
|
{
"resource": ""
}
|
q8150
|
Mysticonfig.Loader.load_yaml
|
train
|
def load_yaml
yaml_config_files = @filenames[:yaml]
yaml_config_file = nil
yaml_config_files.each do |file|
yaml_config_file = Utils.lookup_file file
unless yaml_config_file.nil?
config = Utils.load_yaml(yaml_config_file)
return config.empty? ? @default_config : @default_config.merge(config)
end
end
@default_config # Return default config when can't load config file
end
|
ruby
|
{
"resource": ""
}
|
q8151
|
VCSToolkit.Diff.new_content
|
train
|
def new_content(conflict_start='<<<', conflict_switch='>>>', conflict_end='===')
flat_map do |change|
if change.conflict?
version_one = change.diff_one.new_content(conflict_start, conflict_switch, conflict_end)
version_two = change.diff_two.new_content(conflict_start, conflict_switch, conflict_end)
[conflict_start] + version_one + [conflict_switch] + version_two + [conflict_end]
elsif change.deleting?
[]
else
[change.new_element]
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8152
|
RubyEdit.SourceFile.populate
|
train
|
def populate(content, **options)
generator.create_file(RubyEdit::SOURCE_FILE_LOCATION,
content,
force: true,
verbose: false,
**options)
end
|
ruby
|
{
"resource": ""
}
|
q8153
|
RImageAnalysisTools.Skeletonizer.compute_n
|
train
|
def compute_n(ic)
temp_ic = ImageCoordinate.cloneCoord(ic)
n = -1 # compensate for 0,0 case
x_off = [-1,0,1]
y_off = [-1,0,1]
x_off.each do |x|
y_off.each do |y|
temp_ic[:x] = ic[:x] + x
temp_ic[:y] = ic[:y] + y
if @im.inBounds(temp_ic) and @im[temp_ic] == @im[ic] then
n += 1
end
end
end
temp_ic.recycle
n
end
|
ruby
|
{
"resource": ""
}
|
q8154
|
BoardGameGrid.SquareSet.where
|
train
|
def where(hash)
res = hash.inject(squares) do |memo, (attribute, value)|
memo.select { |square| square.attribute_match?(attribute, value) }
end
self.class.new(squares: res)
end
|
ruby
|
{
"resource": ""
}
|
q8155
|
BoardGameGrid.SquareSet.find_by_x_and_y
|
train
|
def find_by_x_and_y(x, y)
select { |square| square.x == x && square.y == y }.first
end
|
ruby
|
{
"resource": ""
}
|
q8156
|
BoardGameGrid.SquareSet.in_range
|
train
|
def in_range(origin, distance)
select { |square| Vector.new(origin, square).magnitude <= distance }
end
|
ruby
|
{
"resource": ""
}
|
q8157
|
BoardGameGrid.SquareSet.at_range
|
train
|
def at_range(origin, distance)
select { |square| Vector.new(origin, square).magnitude == distance }
end
|
ruby
|
{
"resource": ""
}
|
q8158
|
BoardGameGrid.SquareSet.unblocked
|
train
|
def unblocked(origin, square_set)
select { |destination| square_set.between(origin, destination).all?(&:unoccupied?) }
end
|
ruby
|
{
"resource": ""
}
|
q8159
|
BoardGameGrid.SquareSet.between
|
train
|
def between(origin, destination)
vector = Vector.new(origin, destination)
if vector.diagonal? || vector.orthogonal?
point_counter = origin.point
direction = vector.direction
_squares = []
while point_counter != destination.point
point_counter = point_counter + direction
square = find_by_x_and_y(point_counter.x, point_counter.y)
if square && square.point != destination.point
_squares.push(square)
end
end
else
_squares = []
end
self.class.new(squares: _squares)
end
|
ruby
|
{
"resource": ""
}
|
q8160
|
Derelict.Parser::Version.version
|
train
|
def version
logger.debug "Parsing version from output using #{description}"
matches = output.match PARSE_VERSION_FROM_OUTPUT
raise InvalidFormat.new output if matches.nil?
matches.captures[0]
end
|
ruby
|
{
"resource": ""
}
|
q8161
|
Valr.Repo.full_changelog
|
train
|
def full_changelog(first_parent: false, range: nil, branch: nil, from_ancestor_with: nil)
changelog_list = changelog first_parent: first_parent, range: range, branch: branch, from_ancestor_with: from_ancestor_with
if !range.nil?
header = full_changelog_header_range range
elsif !branch.nil?
header = full_changelog_header_branch branch, from_ancestor_with
else
header = full_changelog_header_no_range
end
[header, changelog_list].join "\n"
end
|
ruby
|
{
"resource": ""
}
|
q8162
|
Valr.Repo.log_messages
|
train
|
def log_messages(first_parent = false, range = nil, branch = nil, from_ancestor_with = nil)
walker = Rugged::Walker.new @repo
if !range.nil?
begin
walker.push_range range
rescue Rugged::ReferenceError
raise Valr::NotValidRangeError.new range
end
elsif !branch.nil?
b = @repo.references["refs/heads/#{branch}"]
raise Valr::NotValidBranchError.new branch if b.nil?
if !from_ancestor_with.nil?
a = @repo.references["refs/heads/#{from_ancestor_with}"]
raise Valr::NotValidBranchError.new from_ancestor_with if a.nil?
base = @repo.merge_base b.target_id, a.target_id
walker.push_range "#{base}..#{b.target_id}"
else
walker.push b.target_id
end
else
walker.push @repo.head.target_id
end
walker.simplify_first_parent if first_parent
message_list = walker.inject([]) { |messages, c| messages << c.message }
walker.reset
message_list
end
|
ruby
|
{
"resource": ""
}
|
q8163
|
Valr.Repo.full_changelog_header_range
|
train
|
def full_changelog_header_range(range)
from, to = range.split '..'
from_commit, to_commit = [from, to].map { |ref| rev_parse ref }
Koios::Doc.write {
[pre(["from: #{from} <#{from_commit.oid}>",
"to: #{to} <#{to_commit.oid}>"])]
}
end
|
ruby
|
{
"resource": ""
}
|
q8164
|
Valr.Repo.full_changelog_header_branch
|
train
|
def full_changelog_header_branch(branch, ancestor)
h = ["branch: #{branch} <#{@repo.references["refs/heads/#{branch}"].target_id}>"]
h << "from ancestor with: #{ancestor} <#{@repo.references["refs/heads/#{ancestor}"].target_id}>" unless ancestor.nil?
Koios::Doc.write {[pre(h)]}
end
|
ruby
|
{
"resource": ""
}
|
q8165
|
Grayskull.Validator.match_node
|
train
|
def match_node(node,expected,label)
#check type
if !check_type(node,expected['type'],label,expected['ok_empty'])
@errors.push('Error: node ' + label + ' is not of an accepted type. Should be one of ' + expected['accepts'].join(', '))
return false
end
if (node.kind_of?(Hash) || node.kind_of?(Array))
if node.empty? && !expected['ok_empty']
@errors.push('Error: node ' + label + ' cannot be empty')
return false
elsif !node.empty? && expected.has_key?('accepts')
valid_content = false
if node.kind_of?(Hash)
matched = []
unmatched = []
node.each_pair{
|key,value|
expected['accepts'].each{
|accepts|
result = check_type(value,accepts,key)
if result
matched.push(key)
if !unmatched.find_index(key).nil?
unmatched.slice(unmatched.find_index(key))
end
break
else
unmatched.push(key)
end
}
}
if(matched.count==node.count)
valid_content = true
else
unmatched.each{
|node|
@errors.push('Error: node ' + node + ' is not of an accepted type. Should be one of ' + expected['accepts'].join(', '))
}
end
elsif node.kind_of?(Array)
matched = []
unmatched = []
node.each_index{
|n|
expected['accepts'].each{
|accepts|
key = label + '[' + n.to_s + ']'
result = check_type(node[n],accepts,key)
if result
matched.push(key)
if !unmatched.find_index(key).nil?
unmatched.slice(unmatched.find_index(key))
end
break
else
unmatched.push(key)
end
}
}
if(matched.count==node.count)
valid_content = true
else
unmatched.each{
|node|
@errors.push('Error: node ' + node + ' is not of an accepted type. Should be one of ' + expected['accepts'].join(', '))
}
end
end
if !valid_content
@errors.push('Error: node ' + label + ' contains an unaccepted type.')
return false
end
end
end
return true
end
|
ruby
|
{
"resource": ""
}
|
q8166
|
Grayskull.Validator.check_type
|
train
|
def check_type(node,expected_type,label,accept_nil = false)
valid_type = true;
if(@types.has_key?(expected_type))
valid_type = match_node(node,@types[expected_type],label)
elsif node.class.to_s != expected_type && !(node.kind_of?(NilClass) && (expected_type=='empty' || accept_nil))
valid_type = false
end
return valid_type
end
|
ruby
|
{
"resource": ""
}
|
q8167
|
MicroservicePrecompiler.Builder.cleanup
|
train
|
def cleanup(sprocket_assets = [:javascripts, :stylesheets])
# Remove previous dist path
FileUtils.rm_r build_path if File.exists?(build_path)
# Clean compass project
Compass::Exec::SubCommandUI.new(["clean", project_root]).run!
# Don't initialize Compass assets, the config will take care of it
sprocket_assets.each do |asset|
FileUtils.mkdir_p File.join(build_path, asset.to_s)
end
if mustaches_config_file_exists?
mustaches_yaml.each_key do |dir|
FileUtils.mkdir_p File.join(build_path, dir.to_s)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8168
|
MicroservicePrecompiler.Builder.sprockets_build
|
train
|
def sprockets_build(sprocket_assets = [:javascripts, :stylesheets])
sprocket_assets.each do |asset_type|
load_path = File.join(@project_root, asset_type.to_s)
next unless File.exists?(load_path)
sprockets_env.append_path load_path
Dir.new(load_path).each do |filename|
file = File.join(load_path, filename)
if File.file?(file)
asset = sprockets_env[filename]
attributes = sprockets_env.find_asset(asset.pathname)
# logical_path is the filename
build_file = File.join(build_path, asset_type.to_s, attributes.logical_path)
File.open(build_file, 'w') do |f|
extension = attributes.logical_path.split(".").last
f.write(minify(asset, extension))
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8169
|
MicroservicePrecompiler.Builder.mustache_template_build
|
train
|
def mustache_template_build(dir, template_file, logic_file)
# Get the class name from an underscore-named file
logic_class_name = underscore_to_camelcase(logic_file)
# Output file should match the syntax of the mustaches config
output_file = logic_file
# Now we can name the logic_file to underscored version
logic_file = camelcase_to_underscore(logic_file)
# Require logic file, used to generate content from template
require File.join(project_root, camelcase_to_underscore(dir), logic_file)
# Create relevant directory path
FileUtils.mkdir_p File.join(build_path, dir.to_s)
# Instantiate class from required file
mustache = Kernel.const_get(logic_class_name).new
# Set the template file
mustache.template_file = File.join(project_root, camelcase_to_underscore(dir), template_file) + ".html.mustache"
# Get the name of the file we will write to after it's template is processed
build_file = File.join(build_path, dir, "#{output_file}.html")
File.open(build_file, 'w') do |f|
f.write(mustache.render)
end
end
|
ruby
|
{
"resource": ""
}
|
q8170
|
MicroservicePrecompiler.Builder.underscore_to_camelcase
|
train
|
def underscore_to_camelcase(underscore_string)
underscore_string = underscore_string.gsub(/(_)/,' ').split(' ').each { |word| word.capitalize! }.join("") unless underscore_string.match(/_/).nil?
underscore_string = underscore_string if underscore_string.match(/_/).nil?
return underscore_string
end
|
ruby
|
{
"resource": ""
}
|
q8171
|
MicroservicePrecompiler.Builder.sprockets_env
|
train
|
def sprockets_env
@sprockets_env ||= Sprockets::Environment.new(project_root) { |env| env.logger = Logger.new(STDOUT) }
end
|
ruby
|
{
"resource": ""
}
|
q8172
|
MicroservicePrecompiler.Builder.minify
|
train
|
def minify(asset, format)
asset = asset.to_s
# Minify JS
return Uglifier.compile(asset) if format.eql?("js")
# Minify CSS
return YUI::CssCompressor.new.compress(asset) if format.eql?("css")
# Return string representation if not minimizing
return asset
end
|
ruby
|
{
"resource": ""
}
|
q8173
|
VirtualMonkey.DeploymentRunner.launch_all
|
train
|
def launch_all
@servers.each { |s|
begin
object_behavior(s, :start)
rescue Exception => e
raise e unless e.message =~ /AlreadyLaunchedError/
end
}
end
|
ruby
|
{
"resource": ""
}
|
q8174
|
VirtualMonkey.DeploymentRunner.check_monitoring
|
train
|
def check_monitoring
@servers.each do |server|
server.settings
response = nil
count = 0
until response || count > 20 do
begin
response = server.monitoring
rescue
response = nil
count += 1
sleep 10
end
end
raise "Fatal: Failed to verify that monitoring is operational" unless response
#TODO: pass in some list of plugin info to check multiple values. For now just
# hardcoding the cpu check
sleep 60 # This is to allow monitoring data to accumulate
monitor=server.get_sketchy_data({'start'=>-60,'end'=>-20,'plugin_name'=>"cpu-0",'plugin_type'=>"cpu-idle"})
idle_values = monitor['data']['value']
raise "No cpu idle data" unless idle_values.length > 0
raise "No idle time" unless idle_values[0] > 0
puts "Monitoring is OK for #{server.nickname}"
end
end
|
ruby
|
{
"resource": ""
}
|
q8175
|
RedisRecord.Model.add_attributes
|
train
|
def add_attributes(hash)
hash.each_pair do |k,v|
k = k.to_sym
#raise DuplicateAttribute.new("#{k}") unless (k == :id or !self.respond_to?(k))
if k == :id or !self.respond_to?(k)
@cached_attrs[k] = v
meta = class << self; self; end
meta.send(:define_method, k) { @cached_attrs[k] }
meta.send(:define_method, "#{k}=") do |new_value|
@cached_attrs[k] = new_value.is_a?(RedisRecord::Model) ? new_value.id : new_value
@stored_attrs.delete(k)
end
end
end
hash
end
|
ruby
|
{
"resource": ""
}
|
q8176
|
RedisRecord.Model.add_foreign_keys_as_attributes
|
train
|
def add_foreign_keys_as_attributes
@@reflections[self.class.name.to_sym][:belongs_to].each do |klass|
add_attribute klass.to_s.foreign_key.to_sym
end
end
|
ruby
|
{
"resource": ""
}
|
q8177
|
Dctl.Main.image_tag
|
train
|
def image_tag(image, version: current_version_for_image(image))
org = settings.org
project = settings.project
tag = "#{org}/#{project}-#{env}-#{image}"
if !version.nil?
version = version.to_i
tag +=
if version.negative?
current_version = current_version_for_image(image)
":#{current_version.to_i + version}"
else
":#{version}"
end
end
tag
end
|
ruby
|
{
"resource": ""
}
|
q8178
|
Dctl.Main.config_path
|
train
|
def config_path
path = File.expand_path ".dctl.yml", Dir.pwd
unless File.exist? path
error = "Could not find config file at #{path}"
puts Rainbow(error).red
exit 1
end
path
end
|
ruby
|
{
"resource": ""
}
|
q8179
|
Dctl.Main.define_custom_commands
|
train
|
def define_custom_commands(klass)
Array(settings.custom_commands).each do |command, args|
klass.send(:desc, command, "[Custom Command] #{command}")
# Concat with string so we can use exec rather than executing multiple
# subshells. Exec allows us to reuse the shell in which dctl is being
# executed, so we get to do things like reuse sudo authorizations
# rather than always having to prmopt.
concatenated = Array(args).join(" && ").strip
klass.send(:define_method, command, -> do
stream_output(concatenated, exec: true)
end)
end
end
|
ruby
|
{
"resource": ""
}
|
q8180
|
Dctl.Main.check_settings!
|
train
|
def check_settings!
required_keys = %w(
org
project
)
required_keys.each do |key|
unless Settings.send key
error = "Config is missing required key '#{key}'. Please add it " \
"to #{config_path} and try again."
error += "\n\nFor more info, see https://github.com/jutonz/dctl_rb#required-keys"
puts Rainbow(error).red
exit 1
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8181
|
Sinatra.Helpers.select_options
|
train
|
def select_options(pairs, current = nil, prompt = nil)
pairs.unshift([prompt, '']) if prompt
pairs.map { |label, value|
tag(:option, label, :value => value, :selected => (current == value))
}.join("\n")
end
|
ruby
|
{
"resource": ""
}
|
q8182
|
Sinatra.Helpers.errors_on
|
train
|
def errors_on(object, options = { :class => 'errors' }, &block)
return if object.errors.empty?
lines = if object.errors.respond_to?(:full_messages)
object.errors.full_messages
else
HamlErrorPresenter.new(object.errors).present(self, &block)
end
haml_tag(:div, options) do
haml_tag(:ul) do
lines.each do |error|
haml_tag(:li, error)
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8183
|
Sinatra.Helpers.percentage
|
train
|
def percentage(number, precision = 2)
return if number.to_s.empty?
ret = "%02.#{ precision }f%" % number
ret.gsub(/\.0*%$/, '%')
end
|
ruby
|
{
"resource": ""
}
|
q8184
|
BuoyData.NoaaBuoyObservation.google_chart_url
|
train
|
def google_chart_url
max = 120
response = get_all
return unless response
historical_data = []
response.each_with_index do |row, index|
break if index >= max
next if row.match(/^#/)
row = row.split(/ ?/)
historical_data << row[5]
end
return if historical_data.blank?
historical_data = historical_data.join(',')
[ self.class.google_chart_base(@buoy_id), '&chd=t:', historical_data ].join
end
|
ruby
|
{
"resource": ""
}
|
q8185
|
Octo.ContactUs.send_email
|
train
|
def send_email
# Send thankyou mail
subject = 'Thanks for contacting us - Octo.ai'
opts = {
text: 'Hey we will get in touch with you shortly. Thanks :)',
name: self.firstname + ' ' + self.lastname
}
Octo::Email.send(self.email, subject, opts)
# Send mail to aron and param
Octo.get_config(:email_to).each { |x|
opts1 = {
text: self.email + ' \n\r ' + self.typeofrequest + '\n\r' + self.message,
name: x.fetch('name')
}
Octo::Email.send(x.fetch('email'), subject, opts1)
}
end
|
ruby
|
{
"resource": ""
}
|
q8186
|
Raca.Servers.create
|
train
|
def create(server_name, flavor_name, image_name, files = {})
request = {
"server" => {
"name" => server_name,
"imageRef" => image_name_to_id(image_name),
"flavorRef" => flavor_name_to_id(flavor_name),
}
}
files.each do |path, blob|
request['server']['personality'] ||= []
request['server']['personality'] << {
'path' => path,
'contents' => Base64.encode64(blob)
}
end
response = servers_client.post(servers_path, JSON.dump(request), json_headers)
data = JSON.parse(response.body)['server']
Raca::Server.new(@account, @region, data['id'])
end
|
ruby
|
{
"resource": ""
}
|
q8187
|
Codependency.Graph.require
|
train
|
def require( file )
return if key?( file )
self[ file ] = deps( file )
self[ file ].each do |dependency|
self.require dependency
end
end
|
ruby
|
{
"resource": ""
}
|
q8188
|
Codependency.Graph.scan
|
train
|
def scan( glob )
Dir[ glob ].flat_map { |f| deps( f ) }.uniq.each do |dependency|
self.require dependency
end
end
|
ruby
|
{
"resource": ""
}
|
q8189
|
Codependency.Graph.deps
|
train
|
def deps( file )
parser.parse( file ).map { |f| path_to path[ f ] }
end
|
ruby
|
{
"resource": ""
}
|
q8190
|
Rescuetime.Loop.run
|
train
|
def run
running!
@current_app = Application.create(:debug => debug?)
while true
sleep 1 # TODO: move to config
focus_changed if @current_app.finished? || backup?
end
end
|
ruby
|
{
"resource": ""
}
|
q8191
|
FlexibleAccessibility.ControllerMethods.has_access?
|
train
|
def has_access?(permission, user)
raise UnknownUserException if user.nil?
AccessProvider.action_permitted_for_user?(permission, user)
end
|
ruby
|
{
"resource": ""
}
|
q8192
|
PuppetBox.PuppetBox.run_puppet
|
train
|
def run_puppet(driver_instance, puppet_tests, logger:nil, reset_after_run:true)
# use supplied logger in preference to the default puppetbox logger instance
logger = logger || @logger
logger.debug("#{driver_instance.node_name} running #{puppet_tests.size} tests")
if driver_instance.open
logger.debug("#{driver_instance.node_name} started")
if driver_instance.self_test
logger.debug("#{driver_instance.node_name} self_test OK, running puppet")
puppet_tests.each { |test_name, puppet_code|
if @result_set.class_size(driver_instance.node_name) > 0 and reset_after_run
# purge and reboot the vm - this will save approximately 1 second
# per class on the self-test which we now know will succeed
driver_instance.reset
end
setup_test(driver_instance, test_name)
logger.info("running test #{driver_instance.node_name} - #{test_name}")
# write out the local test file
relative_puppet_file = commit_testcase(
puppet_tests, driver_instance.node_name, test_name
)
driver_instance.sync_testcase(driver_instance.node_name, test_name)
puppet_file_remote = File.join(PUPPET_TESTCASE_DIR, relative_puppet_file)
driver_instance.run_puppet_x2(puppet_file_remote)
@logger.debug("Saved result #{driver_instance.node_name} #{test_name} #{driver_instance.result.passed?}")
@result_set.save(driver_instance.node_name, test_name, driver_instance.result)
Report::log_test_result_or_errors(
@logger,
driver_instance.node_name,
test_name,
driver_instance.result,
)
}
logger.debug("#{driver_instance.node_name} test completed, closing instance")
else
raise "#{driver_instance.node_name} self test failed, unable to continue"
end
else
raise "#{driver_instance.node_name} failed to start, unable to continue"
end
driver_instance.close
end
|
ruby
|
{
"resource": ""
}
|
q8193
|
Shapewear::Request.RequestHandler.extract_parameters
|
train
|
def extract_parameters(op_options, node)
logger.debug "Operation node: #{node.inspect}"
r = []
op_options[:parameters].each do |p|
logger.debug " Looking for: tns:#{p.first.camelize_if_symbol(:lower)}"
v = node.xpath("tns:#{p.first.camelize_if_symbol(:lower)}", namespaces).first
if v.nil?
# does nothing
elsif p.last == Fixnum
v = v.text.to_i
elsif p.last == DateTime
v = DateTime.parse(v.text) # TODO: add tests
else
v = v.text
end
logger.debug " Found: #{v.inspect}"
r << v
end
r
end
|
ruby
|
{
"resource": ""
}
|
q8194
|
Shapewear::Request.RequestHandler.serialize_soap_result
|
train
|
def serialize_soap_result(op_options, r)
xb = Builder::XmlMarkup.new
xb.instruct!
xb.Envelope :xmlns => soap_env_ns, 'xmlns:xsi' => namespaces['xsi'] do |xenv|
xenv.Body do |xbody|
xbody.tag! "#{op_options[:public_name]}Response", :xmlns => namespaces['tns'] do |xresp|
if r.nil?
xresp.tag! "#{op_options[:public_name]}Result", 'xsi:nil' => 'true'
else
ret = op_options[:returns] rescue nil
case ret
when NilClass, Class
xresp.tag! "#{op_options[:public_name]}Result", r
when Hash
xresp.tag! "#{op_options[:public_name]}Result" do |xres|
ret.each do |k, v|
extract_and_serialize_value(xres, r, k, v)
end
end
else
raise "Unsupported return type: #{ret.inspect}"
end
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8195
|
Shapewear::Request.RequestHandler.extract_and_serialize_value
|
train
|
def extract_and_serialize_value(builder, obj, field, type)
v = if obj.is_a?(Hash)
obj[field] or obj[field.to_sym] or obj[field.to_s.underscore] or obj[field.to_s.underscore.to_sym]
elsif obj.respond_to?(field)
obj.send(field)
elsif obj.respond_to?(field.underscore)
obj.send(field.underscore)
else
raise "Could not extract #{field.inspect} from object: #{obj.inspect}"
end
if v.nil?
builder.tag! field.camelize_if_symbol, 'xsi:nil' => 'true'
else
builder.tag! field.camelize_if_symbol, v
end
end
|
ruby
|
{
"resource": ""
}
|
q8196
|
Shapewear::Request.RequestHandler.serialize_soap_fault
|
train
|
def serialize_soap_fault(ex)
logger.debug "Serializing SOAP Fault: #{ex.inspect}"
xb = Builder::XmlMarkup.new
xb.instruct!
xb.tag! 'e:Envelope', 'xmlns:e' => soap_env_ns do |xenv|
xenv.tag! 'e:Body' do |xbody|
xbody.tag! 'e:Fault' do |xf|
case soap_version
when :soap11
xf.faultcode "e:Server.#{ex.class.name}"
xf.faultstring ex.message
when :soap12
xf.tag! 'e:Code' do |xcode|
xcode.tag! 'e:Value', 'e:Receiver'
xcode.tag! 'e:Subcode' do |xsubcode|
xsubcode.tag! 'e:Value', ex.class.name
end
end
xf.tag! 'e:Reason', ex.message
else
raise "Unsupported SOAP version: #{soap_version}"
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8197
|
Sinatra.EasyBreadcrumbs.view_variables
|
train
|
def view_variables
instance_variables
.select { |var| additional_var?(var) }
.map { |var| fetch_ivar_value(var) }
end
|
ruby
|
{
"resource": ""
}
|
q8198
|
Gricer.CaptureController.index
|
train
|
def index
gricer_request = ::Gricer.config.request_model.first_by_id(params[:id])
gricer_session = ::Gricer.config.session_model.first_by_id(session[:gricer_session])
if gricer_session
gricer_session.javascript = true
gricer_session.java = params[:j]
gricer_session.flash_version = params[:f] unless params[:f] == 'false'
gricer_session.silverlight_version = params[:sl] unless params[:sl] == 'false'
gricer_session.screen_width = params[:sx]
gricer_session.screen_height = params[:sy]
gricer_session.screen_size = "#{params[:sx]}x#{params[:sy]}" unless params[:sx].blank? or params[:sy].blank?
gricer_session.screen_depth = params[:sd]
gricer_session.save
if gricer_request and gricer_request.session == gricer_session
gricer_request.javascript = true
gricer_request.window_width = params[:wx]
gricer_request.window_height = params[:wy]
if gricer_request.save
render text: 'ok'
else
render text: 'session only', status: 500
end
return
else
render text: 'session only'
return
end
end
render text: 'failed', status: 500
end
|
ruby
|
{
"resource": ""
}
|
q8199
|
Optser.OptSet.get
|
train
|
def get(key, default=nil, &block)
value = options[key]
value = default if value.nil?
value = block.call if value.nil? && block
return value
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.