_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q5800
|
FalkorLib.Git.delete_branch
|
train
|
def delete_branch(branch, path = Dir.pwd, opts = { :force => false })
g = MiniGit.new(path)
error "'#{branch}' is not a valid existing branch" unless list_branch(path).include?( branch )
g.branch ((opts[:force]) ? :D : :d) => branch.to_s
end
|
ruby
|
{
"resource": ""
}
|
q5801
|
FalkorLib.Git.grab
|
train
|
def grab(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
if branches.include? "remotes/#{remote}/#{branch}"
info "Grab the branch '#{remote}/#{branch}'"
exit_status = execute_in_dir(FalkorLib::Git.rootdir( path ), "git branch --track #{branch} #{remote}/#{branch}")
else
warning "the remote branch '#{remote}/#{branch}' cannot be found"
end
exit_status
end
|
ruby
|
{
"resource": ""
}
|
q5802
|
FalkorLib.Git.publish
|
train
|
def publish(branch, path = Dir.pwd, remote = 'origin')
exit_status = 1
error "no branch provided" if branch.nil?
#remotes = FalkorLib::Git.remotes(path)
branches = FalkorLib::Git.list_branch(path)
Dir.chdir(FalkorLib::Git.rootdir( path ) ) do
if branches.include? "remotes/#{remote}/#{branch}"
warning "the remote branch '#{remote}/#{branch}' already exists"
else
info "Publish the branch '#{branch}' on the remote '#{remote}'"
exit_status = run %(
git push #{remote} #{branch}:refs/heads/#{branch}
git fetch #{remote}
git branch -u #{remote}/#{branch} #{branch}
)
end
end
exit_status
end
|
ruby
|
{
"resource": ""
}
|
q5803
|
FalkorLib.Git.list_files
|
train
|
def list_files(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.ls_files.split
end
|
ruby
|
{
"resource": ""
}
|
q5804
|
FalkorLib.Git.last_tag_commit
|
train
|
def last_tag_commit(path = Dir.pwd)
res = ""
g = MiniGit.new(path)
unless (g.capturing.tag :list => true).empty?
# git rev-list --tags --max-count=1
res = (g.capturing.rev_list :tags => true, :max_count => 1).chomp
end
res
end
|
ruby
|
{
"resource": ""
}
|
q5805
|
FalkorLib.Git.remotes
|
train
|
def remotes(path = Dir.pwd)
g = MiniGit.new(path)
g.capturing.remote.split
end
|
ruby
|
{
"resource": ""
}
|
q5806
|
FalkorLib.Git.subtree_init?
|
train
|
def subtree_init?(path = Dir.pwd)
res = true
FalkorLib.config.git[:subtrees].keys.each do |dir|
res &&= File.directory?(File.join(path, dir))
end
res
end
|
ruby
|
{
"resource": ""
}
|
q5807
|
FalkorLib.Git.subtree_up
|
train
|
def subtree_up(path = Dir.pwd)
error "Unable to pull subtree(s): Dirty Git repository" if FalkorLib::Git.dirty?( path )
exit_status = 0
git_root_dir = rootdir(path)
Dir.chdir(git_root_dir) do
FalkorLib.config.git[:subtrees].each do |dir, conf|
next if conf[:url].nil?
#url = conf[:url]
remote = dir.gsub(/\//, '-')
branch = (conf[:branch].nil?) ? 'master' : conf[:branch]
remotes = FalkorLib::Git.remotes
info "Pulling changes into subtree '#{dir}' using remote '#{remote}/#{branch}'"
raise IOError, "The git remote '#{remote}' is not configured" unless remotes.include?( remote )
info "\t\\__ fetching remote '#{remotes.join(',')}'"
FalkorLib::Git.fetch( git_root_dir )
raise IOError, "The git subtree directory '#{dir}' does not exists" unless File.directory?( File.join(git_root_dir, dir) )
info "\t\\__ pulling changes"
exit_status = execute "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
#exit_status = puts "git subtree pull --prefix #{dir} --squash #{remote} #{branch}"
end
end
exit_status
end
|
ruby
|
{
"resource": ""
}
|
q5808
|
DEVS.CalendarQueue.local_init
|
train
|
def local_init(bucket_count, bucket_width, start_priority)
@width = bucket_width
old = @buckets
@buckets = if @cached_buckets == nil
Array.new(bucket_count) { [] }
else
n = @cached_buckets.size
if bucket_count < n
# shrink the array
@cached_buckets.slice!(bucket_count, n)
else
# expand the array
@cached_buckets.fill(n, bucket_count - n) { [] }
end
@cached_buckets
end
@cached_buckets = old
@last_priority = start_priority
i = start_priority / bucket_width # virtual bucket
@last_bucket = (i % bucket_count).to_i
@bucket_top = (i+1) * bucket_width + 0.5 * bucket_width
# set up queue size change thresholds
@shrink_threshold = bucket_count / 2 - 2
@expand_threshold = 2 * bucket_count
end
|
ruby
|
{
"resource": ""
}
|
q5809
|
DEVS.CalendarQueue.resize
|
train
|
def resize(new_size)
return unless @resize_enabled
bucket_width = new_width # find new bucket width
local_init(new_size, bucket_width, @last_priority)
i = 0
while i < @cached_buckets.size
bucket = @cached_buckets[i]
@size -= bucket.size
while obj = bucket.pop
self << obj
end
i += 1
end
end
|
ruby
|
{
"resource": ""
}
|
q5810
|
DEVS.CalendarQueue.new_width
|
train
|
def new_width
# decides how many queue elements to sample
return 1.0 if @size < 2
n = if @size <= 5
@size
else
5 + (@size / 10).to_i
end
n = 25 if n > 25
# record variables
tmp_last_bucket = @last_bucket
tmp_last_priority = @last_priority
tmp_bucket_top = @bucket_top
# dequeue n events from the queue and record their priorities with
# resize_enabled set to false.
@resize_enabled = false
tmp = Array.new(n)
average = 0.0
i = 0
while i < n
# dequeue events to get a test sample
tmp[i] = self.pop
# and sum up the differences in time
average += tmp[i].time_next - tmp[i-1].time_next if i > 0
i += 1
end
# calculate average separation of sampled events
average = average / (n-1).to_f
# put the first sample back onto the queue
self << tmp[0]
# recalculate average using only separations smaller than twice the
# original average
new_average = 0.0
j = 0
i = 1
while i < n
sub = tmp[i].time_next - tmp[i-1].time_next
if sub < average * 2.0
new_average += sub
j += 1
end
# put the remaining samples back onto the queue
self << tmp[i]
i += 1
end
new_average = new_average / j.to_f
# restore variables
@resize_enabled = true
@last_bucket = tmp_last_bucket
@last_priority = tmp_last_priority
@bucket_top = tmp_bucket_top
# this is the new width
if new_average > 0.0
new_average * 3.0
elsif average > 0.0
average * 2.0
else
1.0
end
end
|
ruby
|
{
"resource": ""
}
|
q5811
|
DEVS.Simulation.transition_stats
|
train
|
def transition_stats
if done?
@transition_stats ||= (
stats = {}
hierarchy = @processor.children.dup
i = 0
while i < hierarchy.size
child = hierarchy[i]
if child.model.coupled?
hierarchy.concat(child.children)
else
stats[child.model.name] = child.transition_stats
end
i+=1
end
total = Hash.new(0)
stats.values.each { |h| h.each { |k, v| total[k] += v }}
stats[:TOTAL] = total
stats
)
end
end
|
ruby
|
{
"resource": ""
}
|
q5812
|
RepoManager.BaseView.partial
|
train
|
def partial(filename)
filename = partial_path(filename)
raise "unable to find partial file: #{filename}" unless File.exists?(filename)
contents = File.open(filename, "rb") {|f| f.read}
# TODO: detect template EOL and match it to the partial's EOL
# force unix eol
contents.gsub!(/\r\n/, "\n") if contents.match("\r\n")
contents
end
|
ruby
|
{
"resource": ""
}
|
q5813
|
RepoManager.BaseView.partial_path
|
train
|
def partial_path(filename)
return filename if filename.nil? || Pathname.new(filename).absolute?
# try relative to template
if template
base_folder = File.dirname(template)
filename = File.expand_path(File.join(base_folder, filename))
return filename if File.exists?(filename)
end
# try relative to PWD
filename = File.expand_path(File.join(FileUtils.pwd, filename))
return filename if File.exists?(filename)
# try built in template folder
filename = File.expand_path(File.join('../templates', filename), __FILE__)
end
|
ruby
|
{
"resource": ""
}
|
q5814
|
Network.Client.set_logger
|
train
|
def set_logger
@logger = if block_given?
yield
elsif defined?(Rails)
Rails.logger
else
logger = Logger.new(STDOUT)
logger.level = Logger::DEBUG
logger
end
end
|
ruby
|
{
"resource": ""
}
|
q5815
|
Snapi.Validator.valid_input?
|
train
|
def valid_input?(key,string)
raise InvalidFormatError unless valid_regex_format?(key)
boolarray = validation_regex[key].map do |regxp|
(string =~ regxp) == 0 ? true : false
end
return true if boolarray.include?(true)
false
end
|
ruby
|
{
"resource": ""
}
|
q5816
|
Snapi.Validator.validation_regex
|
train
|
def validation_regex
{
:address => [HOSTNAME_REGEX, DOMAIN_REGEX, IP_V4_REGEX, IP_V6_REGEX],
:anything => [/.*/],
:bool => [TRUEFALSE_REGEX],
:command => [SIMPLE_COMMAND_REGEX],
:gsm_adapter => [ADAPTER_REGEX],
:hostname => [HOSTNAME_REGEX],
:interface => [INTERFACE_REGEX],
:ip => [IP_V4_REGEX, IP_V6_REGEX],
:ipv6 => [IP_V6_REGEX],
:ipv4 => [IP_V4_REGEX],
:json => [JsonValidator],
:mac => [MAC_REGEX],
:snapi_function_name => [SNAPI_FUNCTION_NAME],
:on_off => [ON_OFF_REGEX],
:port => [PORT_REGEX],
:uri => [URI_REGEX],
}
end
|
ruby
|
{
"resource": ""
}
|
q5817
|
RepoManager.TaskManager.load_tasks
|
train
|
def load_tasks
return if @loaded
# By convention, the '*_helper.rb' files are helpers and need to be loaded first. Load
# them into the Thor::Sandbox namespace
Dir.glob( File.join(File.dirname(__FILE__), '**', '*.rb') ).each do |task|
if task.match(/_helper\.rb$/)
#logger.debug "load_thorfile helper: #{task}"
::Thor::Util.load_thorfile task
end
end
# Now load the thor files
Dir.glob( File.join(File.dirname(__FILE__), '**', '*.rb') ).each do |task|
unless task.match(/_helper\.rb$/)
#logger.debug "load_thorfile: #{task}"
::Thor::Util.load_thorfile task
end
end
# load user tasks
if user_tasks_folder
Dir.glob( File.join([user_tasks_folder, '**', '*.{rb,thor}']) ).each { |task| ::Thor::Util.load_thorfile task if task.match(/_helper\.rb$/) }
Dir.glob( File.join([user_tasks_folder, '**', '*.{rb,thor}']) ).each { |task| ::Thor::Util.load_thorfile task unless task.match(/_helper\.rb$/) }
end
@loaded = true
end
|
ruby
|
{
"resource": ""
}
|
q5818
|
RepoManager.TaskManager.task_help
|
train
|
def task_help(name)
load_tasks
klass, task = find_by_namespace(name)
# set '$thor_runner' to true to display full namespace
$thor_runner = true
klass.task_help(shell , task)
end
|
ruby
|
{
"resource": ""
}
|
q5819
|
RepoManager.TaskManager.list_tasks
|
train
|
def list_tasks
load_tasks
# set '$thor_runner' to true to display full namespace
$thor_runner = true
list = [] #Thor.printable_tasks(all = true, subcommand = true)
Thor::Base.subclasses.each do |klass|
list += klass.printable_tasks(false) unless klass == Thor
end
list.sort!{ |a,b| a[0] <=> b[0] }
title = "repo_manager tasks"
shell.say shell.set_color(title, :blue, bold=true)
shell.say "-" * title.size
shell.print_table(list, :ident => 2, :truncate => true)
end
|
ruby
|
{
"resource": ""
}
|
q5820
|
RepoManager.TaskManager.list_bare_tasks
|
train
|
def list_bare_tasks
load_tasks
Thor::Base.subclasses.each do |klass|
unless klass == Thor
klass.tasks.each do |t|
puts "#{klass.namespace}:#{t[0]}"
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q5821
|
GuideboxWrapper.GuideboxTv.search_for_by_provider
|
train
|
def search_for_by_provider(name, provider)
url = build_query(name)
url += '/fuzzy/' + provider + "/web"
data = @client.query(url)
data["results"]
end
|
ruby
|
{
"resource": ""
}
|
q5822
|
GuideboxWrapper.GuideboxTv.search_by_db_id
|
train
|
def search_by_db_id(id, type)
url = @base_url
url += "/search/id/"
case type
when "tvdb"
url += "tvdb/"
url += id.to_s
when "themoviedb"
url += "themoviedb/"
url += id.to_s
when "imdb"
url += "imdb/"
url += id
else
puts "That id type does not exist"
return
end
@client.query(url)
end
|
ruby
|
{
"resource": ""
}
|
q5823
|
GuideboxWrapper.GuideboxTv.show_information
|
train
|
def show_information(name)
id = self.search_for(name).first["id"]
url = @base_url
url += "/show/" + id.to_s
@client.query(url)
end
|
ruby
|
{
"resource": ""
}
|
q5824
|
DEVS.Coordinator.min_time_next
|
train
|
def min_time_next
tn = DEVS::INFINITY
if (obj = @scheduler.peek)
tn = obj.time_next
end
tn
end
|
ruby
|
{
"resource": ""
}
|
q5825
|
DEVS.Coordinator.max_time_last
|
train
|
def max_time_last
max = 0
i = 0
while i < @children.size
tl = @children[i].time_last
max = tl if tl > max
i += 1
end
max
end
|
ruby
|
{
"resource": ""
}
|
q5826
|
Rack.AcceptMediaTypes.order
|
train
|
def order(types) #:nodoc:
types.map {|type| AcceptMediaType.new(type) }.reverse.sort.reverse.select {|type| type.valid? }.map {|type| type.range }
end
|
ruby
|
{
"resource": ""
}
|
q5827
|
Lev.BetterActiveModelErrors.set
|
train
|
def set(key, value)
types[key.to_sym] = (value == [] ? [] : (value.is_a?(Symbol) ? value : nil))
messages[key.to_sym] = value
end
|
ruby
|
{
"resource": ""
}
|
q5828
|
Lev.BetterActiveModelErrors.delete
|
train
|
def delete(key)
key = key.to_sym
types.delete(key)
messages.delete(key)
end
|
ruby
|
{
"resource": ""
}
|
q5829
|
Lev.BetterActiveModelErrors.empty?
|
train
|
def empty?
all? { |k, v| v && v.empty? && !v.is_a?(String) }
end
|
ruby
|
{
"resource": ""
}
|
q5830
|
Lev.BetterActiveModelErrors.add_on_empty
|
train
|
def add_on_empty(attributes, options = {})
[attributes].flatten.each do |attribute|
value = @base.send(:read_attribute_for_validation, attribute)
is_empty = value.respond_to?(:empty?) ? value.empty? : false
add(attribute, :empty, options) if value.nil? || is_empty
end
end
|
ruby
|
{
"resource": ""
}
|
q5831
|
Guard.Inch.start
|
train
|
def start
message = 'Guard::Inch is running'
message << ' in pedantic mode' if options[:pedantic]
message << ' and inspecting private fields' if options[:private]
::Guard::UI.info message
run_all if options[:all_on_start]
end
|
ruby
|
{
"resource": ""
}
|
q5832
|
ConstructorPages.Page.update_fields_values
|
train
|
def update_fields_values(params)
params || return
fields.each {|f| f.find_or_create_type_object(self).tap {|t| t || next
params[f.code_name.to_sym].tap {|v| v && t.value = v}
t.save }}
end
|
ruby
|
{
"resource": ""
}
|
q5833
|
ConstructorPages.Page.find_page_in_branch
|
train
|
def find_page_in_branch(cname)
Template.find_by(code_name: cname.singularize).tap {|t| t || return
(descendants.where(template_id: t.id) if cname == cname.pluralize).tap {|r| r ||= []
return r.empty? ? ancestors.find_by(template_id: t.id) : r}}
end
|
ruby
|
{
"resource": ""
}
|
q5834
|
ConstructorPages.Page.as_json
|
train
|
def as_json(options = {})
{name: self.name, title: self.title}.merge(options).tap do |options|
fields.each {|f| options.merge!({f.code_name.to_sym => f.get_value_for(self)})}
end
end
|
ruby
|
{
"resource": ""
}
|
q5835
|
TopHat.MetaHelper.meta_tag
|
train
|
def meta_tag(options, open=false, escape=true)
tag(:meta, options, open, escape)
end
|
ruby
|
{
"resource": ""
}
|
q5836
|
FFI.BitMasks.bit_mask
|
train
|
def bit_mask(name,flags,type=:uint)
bit_mask = BitMask.new(flags,type)
typedef(bit_mask,name)
return bit_mask
end
|
ruby
|
{
"resource": ""
}
|
q5837
|
Cassie::Schema.Migrator.build_up_commands
|
train
|
def build_up_commands
local_versions.select{ |v| v > current_version && v <= target_version }
.map{ |v| ApplyCommand.new(v) }
end
|
ruby
|
{
"resource": ""
}
|
q5838
|
Cassie::Schema.Migrator.build_down_commands
|
train
|
def build_down_commands
rollbacks = rollback_versions.map{ |v| RollbackCommand.new(v) }
missing = missing_versions_before(rollbacks.last.version).map{ |v| ApplyCommand.new(v) }
rollbacks + missing
end
|
ruby
|
{
"resource": ""
}
|
q5839
|
Cassie::Schema.Migrator.missing_versions_before
|
train
|
def missing_versions_before(last_rollback)
return [] unless last_rollback
rollback_index = applied_versions.index(last_rollback)
stop = if rollback_index == applied_versions.length - 1
# rolled back to oldest version, a rollback
# would put us in a versionless state.
# Any versions up to target should be applied
Version.new('0')
else
applied_versions[rollback_index + 1]
end
return [] if stop == target_version
local_versions.select{ |v| v > stop && v <= target_version }
end
|
ruby
|
{
"resource": ""
}
|
q5840
|
Ciesta.ClassMethods.field
|
train
|
def field(name, **options)
name = name.to_sym
definitions[name] = options
proxy.instance_eval do
define_method(name) { fields[name] }
define_method("#{name}=") { |value| fields[name] = value }
end
end
|
ruby
|
{
"resource": ""
}
|
q5841
|
Resync.BaseChangeList.changes
|
train
|
def changes(of_type: nil, in_range: nil)
resources.select do |r|
is_of_type = of_type ? r.change == of_type : true
is_in_range = in_range ? in_range.cover?(r.modified_time) : true
is_of_type && is_in_range
end
end
|
ruby
|
{
"resource": ""
}
|
q5842
|
Opee.Actor.on_idle
|
train
|
def on_idle(op, *args)
@idle_mutex.synchronize {
@idle.insert(0, Act.new(op, args))
}
@loop.wakeup() if RUNNING == @state
end
|
ruby
|
{
"resource": ""
}
|
q5843
|
Opee.Actor.method_missing
|
train
|
def method_missing(m, *args, &blk)
raise NoMethodError.new("undefined method '#{m}' for #{self.class}", m, args) unless respond_to?(m, true)
ask(m, *args)
end
|
ruby
|
{
"resource": ""
}
|
q5844
|
StatModule.Stat.findings=
|
train
|
def findings=(findings)
raise TypeException unless findings.is_a?(Array)
findings.each { |item|
raise TypeException unless item.is_a?(StatModule::Finding)
raise DuplicateElementException if @findings.include?(item)
@findings.push(item)
}
end
|
ruby
|
{
"resource": ""
}
|
q5845
|
StatModule.Stat.print_header
|
train
|
def print_header
@finding_print_index = 0
hash = {}
hash['statVersion'] = @statVersion
hash['process'] = @process
hash['findings'] = []
result = hash.to_json
result = result[0..result.length - 3]
puts(result)
puts
$stdout.flush
end
|
ruby
|
{
"resource": ""
}
|
q5846
|
StatModule.Stat.print_finding
|
train
|
def print_finding
if @finding_print_index < @findings.length
result = @findings[@finding_print_index].to_json
result += ',' unless @finding_print_index >= @findings.length - 1
puts result
puts
$stdout.flush
@finding_print_index += 1
else
raise IndexOutOfBoundException
end
end
|
ruby
|
{
"resource": ""
}
|
q5847
|
StatModule.Stat.summary_print
|
train
|
def summary_print(formatted = false)
errors = 0
warnings = 0
findings.each { |finding|
if finding.failure
errors += 1
else
warnings += 1
end
}
if errors == 0 && warnings == 0
result = "#{FORMATTING_CHECKMARK} PASSED with no warning".colorize(:green)
elsif errors == 0
result = "#{FORMATTING_WARNING} PASSED with #{warnings} warning".colorize(:yellow)
elsif warnings == 0
result = "#{FORMATTING_BALL} FAILED with #{errors} error".colorize(:red)
else
result = "#{FORMATTING_BALL} FAILED with #{errors} error and #{warnings} warning".colorize(:red)
end
if formatted
result
else
result[result.index(' ') + 1..result.length]
end
end
|
ruby
|
{
"resource": ""
}
|
q5848
|
Wanikani.User.gravatar_url
|
train
|
def gravatar_url(options = {})
raise ArgumentError, "The size parameter must be an integer" if options[:size] && !options[:size].is_a?(Integer)
response = api_response("user-information")
hash = response["user_information"]["gravatar"]
return nil if hash.nil?
return build_gravatar_url(hash, options)
end
|
ruby
|
{
"resource": ""
}
|
q5849
|
HtmlSlicer.ActionViewExtension.slice
|
train
|
def slice(object, options = {}, &block)
slicer = HtmlSlicer::Helpers::Slicer.new self, object.options.reverse_merge(options).reverse_merge(:current_slice => object.current_slice, :slice_number => object.slice_number, :remote => false)
slicer.to_s
end
|
ruby
|
{
"resource": ""
}
|
q5850
|
YamlTranslator.Locale.save
|
train
|
def save(dir = Dir.pwd, options = {})
prefix = options[:prefix] if options.key?(:prefix)
write_file(File.join(dir, "#{prefix}#{lang}.yml"), options)
end
|
ruby
|
{
"resource": ""
}
|
q5851
|
YamlTranslator.Locale.compact_of
|
train
|
def compact_of(values = {}, path = KeyPath.new)
result = {}
values.each_with_index do |(i, v)|
path.move_to(i)
if v.is_a?(Hash)
result.merge!(compact_of(v, path))
else
result[path.to_s] = v
end
path.leave
end
result
end
|
ruby
|
{
"resource": ""
}
|
q5852
|
YamlTranslator.Locale.tree_of
|
train
|
def tree_of(values)
result = {}
current = result
values.each do |k, v|
keys = k.to_s.split('.')
last_key = keys.pop
keys.each do |ks|
current = if current.key?(ks)
current[ks]
else
current[ks] = {}
current[ks]
end
end
current[last_key] = v
current = result
end
result
end
|
ruby
|
{
"resource": ""
}
|
q5853
|
Cmtool.ApplicationHelper.collapsible_content
|
train
|
def collapsible_content(options = {}, &blk)
options = {title: options} if options.is_a?(String) # Single argument is title
content = capture(&blk) if blk.present?
content ||= options[:content]
options[:collapsed] = true unless options.has_key?(:collapsed)
classes = Array.wrap(options[:class]) | ["collapsible-container", options[:collapsed] ? 'collapsed' : nil]
title_tag = content_tag(:div, "<span></span>#{options[:title]}".html_safe, class: 'collapsible-title')
content_tag(:div, title_tag + content_tag(:div, content, class: 'collapsible-content'), class: classes)
end
|
ruby
|
{
"resource": ""
}
|
q5854
|
Praxis.View.attribute
|
train
|
def attribute(name, **opts, &block)
raise AttributorException, "Attribute names must be symbols, got: #{name.inspect}" unless name.is_a? ::Symbol
attribute = schema.attributes.fetch(name) do
raise "Displaying :#{name} is not allowed in view :#{self.name} of #{schema}. This attribute does not exist in the mediatype"
end
if block_given?
type = attribute.type
@contents[name] = if type < Attributor::Collection
CollectionView.new(name, type.member_attribute.type, &block)
else
View.new(name, attribute, &block)
end
else
type = attribute.type
if type < Attributor::Collection
is_collection = true
type = type.member_attribute.type
end
if type < Praxis::Blueprint
view_name = opts[:view] || :default
view = type.views.fetch(view_name) do
raise "view with name '#{view_name.inspect}' is not defined in #{type}"
end
@contents[name] = if is_collection
Praxis::CollectionView.new(view_name, type, view)
else
view
end
else
@contents[name] = attribute # , opts]
end
end
end
|
ruby
|
{
"resource": ""
}
|
q5855
|
RepoManager.AssetConfiguration.save
|
train
|
def save(attrs=nil)
raise "a Hash of attributes to save must be specified" unless attrs && attrs.is_a?(Hash)
raise "folder must be set prior to saving attributes" unless folder
# merge attributes to asset that contains parent attributes
@asset.attributes.merge!(attrs)
# load contents of the user folder and merge in attributes passed to save
# so that we don't save parent attributes
contents = {}
if File.exists?(folder)
contents = load_contents(folder)
raise "expected contents to be a hash" unless contents.is_a?(Hash)
end
contents = contents.merge!(attrs)
write_contents(folder, contents)
end
|
ruby
|
{
"resource": ""
}
|
q5856
|
RepoManager.AssetConfiguration.load
|
train
|
def load(ds=nil)
@folder ||= ds
contents = load_contents(folder)
# if a global parent folder is defined, load it first
parent = contents.delete(:parent) || parent
if parent
parent_folder = File.join(parent)
unless Pathname.new(parent_folder).absolute?
base_folder = File.dirname(folder)
parent_folder = File.join(base_folder, parent_folder)
end
logger.debug "AssetConfiguration loading parent: #{parent_folder}"
parent_configuration = RepoManager::AssetConfiguration.new(asset)
begin
parent_configuration.load(parent_folder)
rescue Exception => e
logger.warn "AssetConfiguration parent configuration load failed on: '#{parent_folder}' with: '#{e.message}'"
end
end
# Load all attributes as hash 'attributes' so that merging
# and adding new attributes doesn't require code changes. Note
# that the 'parent' setting is not merged to attributes
@asset.attributes.merge!(contents)
@asset.create_accessors(@asset.attributes[:user_attributes])
@asset
end
|
ruby
|
{
"resource": ""
}
|
q5857
|
RepoManager.AssetConfiguration.load_contents
|
train
|
def load_contents(asset_folder)
file = File.join(asset_folder, 'asset.conf')
if File.exists?(file)
contents = YAML.load(ERB.new(File.open(file, "rb").read).result(@asset.get_binding))
if contents && contents.is_a?(Hash)
contents.recursively_symbolize_keys!
else
{}
end
else
{}
end
end
|
ruby
|
{
"resource": ""
}
|
q5858
|
RepoManager.AssetConfiguration.write_contents
|
train
|
def write_contents(asset_folder, contents)
contents.recursively_stringify_keys!
FileUtils.mkdir(asset_folder) unless File.exists?(asset_folder)
filename = File.join(asset_folder, 'asset.conf')
#TODO, use "wb" and write CRLF on Windows
File.open(filename, "w") do |f|
f.write(contents.to_conf)
end
end
|
ruby
|
{
"resource": ""
}
|
q5859
|
Inaho.Entry.to_xml
|
train
|
def to_xml
return nil if title.nil? || body.nil?
xml = ""
xml << "<d:entry id=\"#{self.id}\" d:title=\"#{self.title}\">\n"
@index.each do |i|
xml << "\t<d:index d:value=\"#{i}\" d:title=\"#{title}\" "
xml << "d:yomi=\"#{yomi}\"" if !self.yomi.nil?
xml << "/>\n"
end
xml << "\t<div>\n"
xml << "\t\t#{@body}\n"
xml << "\t</div>\n"
xml << "</d:entry>\n"
return xml
end
|
ruby
|
{
"resource": ""
}
|
q5860
|
InstagramPublicApi.Client.location_media
|
train
|
def location_media(location_id, request_parameters: {limit: 1000}, limit: 10)
location = extract_location_media(location_id, request_parameters: request_parameters)
# check if we should get more data
paging_info = location.paging_info
# poll more data
while location.total_media_count < limit && paging_info[:has_next_page] do
request_opts = {}.merge(request_parameters)
if paging_info && paging_info[:end_cursor]
request_opts[:max_id] = paging_info[:end_cursor]
end
next_page_location = extract_location_media(location_id, request_parameters: request_opts)
location.add_media(next_page_location.media)
paging_info = next_page_location.paging_info
location
end
location
end
|
ruby
|
{
"resource": ""
}
|
q5861
|
InstagramPublicApi.Client.extract_location_media
|
train
|
def extract_location_media(location_id, request_parameters: {})
uri = "explore/locations/#{location_id}/"
data = request(uri: uri, parameters: request_parameters)
body = data.body[:location]
location = Entities::Location.new
attrs = %i[name lat lng id]
attrs.each do |attribute|
location.send("#{attribute}=", body[attribute])
end
media = {}
body[:media].fetch(:nodes, []).each do |medium|
media[medium[:id]] = Entities::MediumNode.new(medium)
end
location.media = media.values
location.top_posts = body[:top_posts].fetch(:nodes, []).map {|d| Entities::MediumNode.new(d)}
location.paging_info = body[:media].fetch(:page_info, {})
location
end
|
ruby
|
{
"resource": ""
}
|
q5862
|
InstagramPublicApi.Client.request
|
train
|
def request(uri:, request_options: {}, parameters: {})
opts = {
uri: uri,
request_options: request_options,
parameters: @default_parameters.merge(parameters)
}
parse_response(http_service.perform_request(opts))
end
|
ruby
|
{
"resource": ""
}
|
q5863
|
InstagramPublicApi.Client.parse_response
|
train
|
def parse_response(response)
OpenStruct.new(
raw_response: response,
body: JSON.parse(response.body, symbolize_names: true)
)
end
|
ruby
|
{
"resource": ""
}
|
q5864
|
Saddle.Options.default_options
|
train
|
def default_options
{
:host => host,
:port => port,
:path_prefix => path_prefix,
:use_ssl => use_ssl,
:request_style => request_style,
:num_retries => num_retries,
:timeout => timeout,
:extra_env => extra_env,
:http_adapter => http_adapter,
:stubs => stubs,
:return_full_response => return_full_response,
:additional_middlewares => self.additional_middlewares
}
end
|
ruby
|
{
"resource": ""
}
|
q5865
|
HeimdallApm.Probe.instrument
|
train
|
def instrument(type, name, opts = {})
txn = ::HeimdallApm::TransactionManager.current
segment = ::HeimdallApm::Segment.new(type, name)
txn.start_segment(segment)
# TODO: maybe yield the segment here to have the block pass additional
# informations
yield
ensure
txn.stop_segment
end
|
ruby
|
{
"resource": ""
}
|
q5866
|
RubiGen.Source.names
|
train
|
def names(filter = nil)
inject([]) do |mem, spec|
case filter
when :visible
mem << spec.name if spec.visible?
end
mem
end.sort
end
|
ruby
|
{
"resource": ""
}
|
q5867
|
RubiGen.PathSource.each
|
train
|
def each
Dir["#{path}/[a-z]*"].each do |dir|
if File.directory?(dir)
yield Spec.new(File.basename(dir), dir, label)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q5868
|
RubiGen.GemPathSource.each
|
train
|
def each
generator_full_paths.each do |generator|
yield Spec.new(File.basename(generator).sub(/_generator.rb$/, ''), File.dirname(generator), label)
end
end
|
ruby
|
{
"resource": ""
}
|
q5869
|
MinimalistAuthentication.VerifiableToken.secure_match?
|
train
|
def secure_match?(token)
ActiveSupport::SecurityUtils.secure_compare(
::Digest::SHA256.hexdigest(token),
::Digest::SHA256.hexdigest(verification_token)
)
end
|
ruby
|
{
"resource": ""
}
|
q5870
|
MxitRails.Page.render
|
train
|
def render *arguments
if @_mxit_emulator
output = render_to_string *arguments
output = MxitRails::Styles.add_emoticons output
super :inline => output
else
super *arguments
end
end
|
ruby
|
{
"resource": ""
}
|
q5871
|
MxitApi.Client.user_code_request_uri
|
train
|
def user_code_request_uri(redirect_uri, state, scopes)
if scopes.empty?
raise MxitApi::Exception.new("No scopes were provided.")
end
# build parameters
parameters = {
:response_type => "code",
:client_id => @client_id,
:redirect_uri => redirect_uri,
:state => state,
:scope => scopes.join(' ')
}
path = MXIT_AUTH_CODE_URI + "?#{URI.encode_www_form(parameters)}"
end
|
ruby
|
{
"resource": ""
}
|
q5872
|
GameOverseer.MessageManager.message
|
train
|
def message(client_id, string, reliable = false, channel = ChannelManager::CHAT)
GameOverseer::ENetServer.instance.transmit(client_id, string, reliable, channel)
end
|
ruby
|
{
"resource": ""
}
|
q5873
|
GameOverseer.MessageManager.broadcast
|
train
|
def broadcast(string, reliable = false, channel = ChannelManager::CHAT)
GameOverseer::ENetServer.instance.broadcast(string, reliable, channel)
end
|
ruby
|
{
"resource": ""
}
|
q5874
|
ClarkKent.Report.row_class
|
train
|
def row_class
report_columns = viable_report_columns
@row_class ||= Class.new do
report_columns.each do |report_column|
attr_accessor report_column.column_name.to_sym
end
def initialize params = {}
params.each { |key, value| send "#{key}=", value }
end
def [](key)
self.send key
end
end
end
|
ruby
|
{
"resource": ""
}
|
q5875
|
ClarkKent.Report.report_filter_params
|
train
|
def report_filter_params
Hash[*viable_report_filters.map{|filter| filter.filter_match_params}.flatten].
merge(order: self.sorter)
end
|
ruby
|
{
"resource": ""
}
|
q5876
|
ClarkKent.Report.custom_filters
|
train
|
def custom_filters
self.resource_class.report_filter_options.select{|filter| viable_report_filters.map(&:filter_name).exclude? filter.param}
end
|
ruby
|
{
"resource": ""
}
|
q5877
|
ClarkKent.Report.available_columns
|
train
|
def available_columns
column_options.reject{|column| viable_report_columns.map(&:column_name).include? column.name.to_s}
end
|
ruby
|
{
"resource": ""
}
|
q5878
|
PropLogic.Term.each_sat
|
train
|
def each_sat
return to_enum(:each_sat) unless block_given?
sat_loop(self) do |sat, solver|
yield sat
negated_vars = sat.terms.map do |t|
t.is_a?(NotTerm) ? t.terms[0] : ~t
end
solver << PropLogic.all_or(*negated_vars)
end
end
|
ruby
|
{
"resource": ""
}
|
q5879
|
AppRepo.Analyser.fetch_app_version
|
train
|
def fetch_app_version(options)
metadata = AppRepo::Uploader.new(options).download_manifest_only
FastlaneCore::UI.command_output('TODO: Parse version out from metadata')
puts JSON.pretty_generate(metadata) unless metadata.nil?
FastlaneCore::UI.important('TODO: parse out the bundle-version')
metadata['bundle-version']
end
|
ruby
|
{
"resource": ""
}
|
q5880
|
Todonotes.FixmeFormatter.format
|
train
|
def format(event)
#@@basicformat "%*s %s"
#~ buff = sprintf("%-*s %-5s", Log4r::MaxLevelLength, Log4r::LNAMES[event.level],
#~ event.data.is_a?(Array) ? event.data.first : event.name)
buff = "%5s" % (event.data.is_a?(Array) ? event.data.first : event.name)
#~ buff += (event.tracer.nil? ? "" : "(#{event.tracer[2]})") + ": "
buff << ": "
buff << format_object(event.data.is_a?(Array) ? event.data.last : event.data)
buff << (event.tracer.nil? ? "" : " (#{event.tracer.join('/')})")
buff << "\n"
buff
end
|
ruby
|
{
"resource": ""
}
|
q5881
|
Wanikani.Client.valid_api_key?
|
train
|
def valid_api_key?(api_key = nil)
api_key ||= @api_key
return false if api_key.empty?
res = client.get("/api/#{@api_version}/user/#{api_key}/user-information")
return false if !res.success? || res.body.has_key?("error")
return true
end
|
ruby
|
{
"resource": ""
}
|
q5882
|
Wanikani.Client.client
|
train
|
def client
Faraday.new(url: Wanikani::API_ENDPOINT) do |conn|
conn.response :json, :content_type => /\bjson$/
conn.adapter Faraday.default_adapter
end
end
|
ruby
|
{
"resource": ""
}
|
q5883
|
Wanikani.Client.api_response
|
train
|
def api_response(resource, optional_arg = nil)
raise ArgumentError, "You must define a resource to query WaniKani" if resource.nil? || resource.empty?
begin
res = client.get("/api/#{@api_version}/user/#{@api_key}/#{resource}/#{optional_arg}")
if !res.success? || res.body.has_key?("error")
raise_exception(res)
else
return res.body
end
rescue => error
raise Exception, "There was an error: #{error.message}"
end
end
|
ruby
|
{
"resource": ""
}
|
q5884
|
Wanikani.Client.raise_exception
|
train
|
def raise_exception(response)
raise Wanikani::InvalidKey, "The API key used for this request is invalid." and return if response.status == 401
message = if response.body.is_a?(Hash) and response.body.has_key?("error")
response.body["error"]["message"]
else
"Status code: #{response.status}"
end
raise Wanikani::Exception, "There was an error fetching the data from WaniKani (#{message})"
end
|
ruby
|
{
"resource": ""
}
|
q5885
|
LazyJson.Object.[]
|
train
|
def [](key)
if ! @fields.has_key?(key) && ! @fseq.empty?
while true
@fseq = @fseq.skip_whitespace
if @fseq.first == 125 # '}'.ord
@fseq = @fseq.skip_byte(125).skip_whitespace # '}'.ord
break
end
new_key, new_value = read_field_and_consume
@fields[new_key] = new_value
break if new_key == key
end
end
@fields[key]
end
|
ruby
|
{
"resource": ""
}
|
q5886
|
LazyJson.Array.[]
|
train
|
def [](i)
if @elements.size <= i && ! @eseq.empty?
while true
@eseq = @eseq.skip_whitespace
if @eseq.first == 93 # ']'.ord
@eseq = @eseq.skip_byte(93).skip_whitespace # ']'.ord
break
end
new_value = read_value_and_consume
@elements << new_value
break if @elements.size > i
end
end
@elements[i]
end
|
ruby
|
{
"resource": ""
}
|
q5887
|
RepoManager.ThorHelper.configuration
|
train
|
def configuration(configuration_file=nil)
return @configuration if @configuration
logger.debug "getting repo_manager configuration"
app_options = {}
app_options[:config] = configuration_file || options[:config]
@configuration = ::RepoManager::Settings.new(nil, app_options)
end
|
ruby
|
{
"resource": ""
}
|
q5888
|
Saddle.BaseEndpoint.request
|
train
|
def request(method, action, params={}, options={})
# Augment in interesting options
options[:call_chain] = _path_array
options[:action] = action
@requester.send(method, _path(action), params, options)
end
|
ruby
|
{
"resource": ""
}
|
q5889
|
Saddle.BaseEndpoint._build_and_attach_node
|
train
|
def _build_and_attach_node(endpoint_class, method_name=nil)
# Create the new endpoint
endpoint_instance = endpoint_class.new(@requester, method_name, self)
# Attach the endpoint as an instance variable and method
method_name ||= endpoint_class.name.demodulize.underscore
self.instance_variable_set("@#{method_name}", endpoint_instance)
self.define_singleton_method(method_name.to_s) { endpoint_instance }
endpoint_instance
end
|
ruby
|
{
"resource": ""
}
|
q5890
|
Saddle.BaseEndpoint._endpoint_chain
|
train
|
def _endpoint_chain
chain = []
node = self
while node.is_a?(BaseEndpoint)
chain << node
node = node.parent
end
chain.reverse
end
|
ruby
|
{
"resource": ""
}
|
q5891
|
InstagramPublicApi.HTTPService.perform_request
|
train
|
def perform_request(request_options: {}, parameters: {}, uri:)
args = parameters
request_options = request_options.merge(faraday_options)
# figure out our options for this request
# set up our Faraday connection
connection = Faraday.new(faraday_options) do |faraday|
faraday.adapter Faraday.default_adapter
end
connection.get(uri, args)
end
|
ruby
|
{
"resource": ""
}
|
q5892
|
D3MPQ.Analyzer.write_game_balance
|
train
|
def write_game_balance
write_single_file("analyze")
dir = File.join("analyze", parser_name)
dir = File.join(dir, @field.to_s) if @field
write_analyzed(dir)
end
|
ruby
|
{
"resource": ""
}
|
q5893
|
D3MPQ.Analyzer.write_analyzed
|
train
|
def write_analyzed(dir)
FileUtils.mkdir_p(dir)
attributes.each do |a, v|
path = File.join(dir, a.to_s)
s = "Count|Value\n" + v.map { |e| "#{e[:count]}|#{e[:value]}" }.join("\n")
File.open("#{path}.csv", 'w') { |f| f.write(s) }
end
end
|
ruby
|
{
"resource": ""
}
|
q5894
|
D3MPQ.Analyzer.attributes
|
train
|
def attributes
return @attributes if @attributes
unsorted = Hash.new { |h,k| h[k] = Hash.new(0) }
snapshots.each do |attributes|
attributes = attributes[@field] if @field
attributes.each do |h|
h.each { |attribute, value| unsorted[attribute][value] += 1 }
end
end
@attributes = Hash.new { |h,k| h[k] = [] }
unsorted.each do |name, h|
h.each do |value, count|
@attributes[name] << { :value => value, :count => count }
end
@attributes[name].sort! { |x,y| y[:count] <=> x[:count] }
end
return @attributes
end
|
ruby
|
{
"resource": ""
}
|
q5895
|
Memcached.Packet.to_s
|
train
|
def to_s
extras_s = extras_to_s
key_s = self[:key].to_s
value_s = self[:value].to_s
self[:extras_length] = extras_s.length
self[:key_length] = key_s.length
self[:total_body_length] = extras_s.length + key_s.length + value_s.length
header_to_s + extras_s + key_s + value_s
end
|
ruby
|
{
"resource": ""
}
|
q5896
|
MongoidCart.ViewHelpers.remove_from_cart_link
|
train
|
def remove_from_cart_link(item)
link_to(mongoid_cart.remove_item_path(item: {type: item.class.to_s, id: item._id}), {class: "btn btn-default"}) do
(tag :i, class: 'fa fa-cart-plus').concat('Remove from cart')
end
end
|
ruby
|
{
"resource": ""
}
|
q5897
|
MongoidCart.ViewHelpers.add_to_cart_link
|
train
|
def add_to_cart_link(item)
link_to(mongoid_cart.add_item_path(item: {type: item.class.to_s, id: item._id}), {class: "btn btn-default"}) do
(tag :i, class: 'fa fa-cart-plus').concat('Add to cart')
end
end
|
ruby
|
{
"resource": ""
}
|
q5898
|
Memcached.Client.stats
|
train
|
def stats(contents={}, &callback)
send_request Request::Stats.new(contents) do |result|
callback.call result
if result[:status] == Errors::NO_ERROR && result[:key] != ''
:proceed
end
end
end
|
ruby
|
{
"resource": ""
}
|
q5899
|
Numerals.Numeral.to_base
|
train
|
def to_base(other_base)
if other_base == @radix
dup
else
normalization = exact? ? :exact : :approximate
Numeral.from_quotient to_quotient, base: other_base, normalize: normalization
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.