_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q9200 | Fishbans.BlockEngine.get_block | train | def get_block(id, metadata = nil, size = 42)
url = "http://blocks.fishbans.com/#{id}"
url += "-#{metadata}" unless metadata.nil?
url += "/#{size}" if size != 42
response = get(url, false)
ChunkyPNG::Image.from_blob(response.body)
end | ruby | {
"resource": ""
} |
q9201 | Fishbans.BlockEngine.get_monster | train | def get_monster(id, three = false, size = 42)
id = id.to_s
url = 'http://blocks.fishbans.com'
url += "/#{id}" if id =~ /^m/
url += "/m#{id}" if id !~ /^m/
url += '-3d' if three
url += "/#{size}" if size != 42
response = get(url, false)
ChunkyPNG::Image.from_blob(response.body)
end | ruby | {
"resource": ""
} |
q9202 | Osiris.ZipFileGenerator.write | train | def write(inputDir, outputFile)
entries = Dir.entries(inputDir); entries.delete("."); entries.delete("..")
io = Zip::File.open(outputFile, Zip::File::CREATE);
writeEntries(entries, "", io, inputDir)
io.close();
end | ruby | {
"resource": ""
} |
q9203 | FileBlobs.ActiveRecordFixtureSetExtensions.file_blob_id | train | def file_blob_id(path)
file_path = Rails.root.join('test/fixtures'.freeze).join(path)
blob_contents = File.binread file_path
# This needs to be kept in sync with blob_model.rb.
Base64.urlsafe_encode64(Digest::SHA256.digest(blob_contents)).inspect
end | ruby | {
"resource": ""
} |
q9204 | FileBlobs.ActiveRecordFixtureSetExtensions.file_blob_data | train | def file_blob_data(path, options = {})
# The line with base64 data must be indented further than the current line.
indent = ' ' * ((options[:indent] || 2) + 2)
file_path = Rails.root.join('test/fixtures'.freeze).join(path)
blob_contents = File.binread file_path
base64_data = Base64.encode64 blob_contents
base64_data.gsub! "\n", "\n#{indent}"
base64_data.strip!
"!!binary |\n#{indent}#{base64_data}"
end | ruby | {
"resource": ""
} |
q9205 | FileBlobs.ActiveRecordFixtureSetExtensions.file_blob_size | train | def file_blob_size(path)
file_path = Rails.root.join('test/fixtures'.freeze).join(path)
File.stat(file_path).size
end | ruby | {
"resource": ""
} |
q9206 | StorageRoom.File.set_with_filename | train | def set_with_filename(path)
return if path.blank?
self.filename = ::File.basename(path)
self.content_type = ::MIME::Types.type_for(path).first.content_type
self.data = ::Base64.encode64(::File.read(path))
end | ruby | {
"resource": ""
} |
q9207 | StorageRoom.File.download_to_directory | train | def download_to_directory(path)
Dir.mkdir(path) unless ::File.directory?(path)
download_file(self[:@url], ::File.join(path, local_filename))
true
end | ruby | {
"resource": ""
} |
q9208 | ICU.Player.to_sp_text | train | def to_sp_text(rounds, columns, formats)
values = columns.inject([]) do |vals,col|
val = send(col).to_s
val.sub!(/\.0/, '') if col == :points
vals << val
end
(1..rounds).each do |r|
result = find_result(r)
values << (result ? result.to_sp_text : " : ")
end
formats % values
end | ruby | {
"resource": ""
} |
q9209 | NumericArray.InstanceMethods.variance | train | def variance(sample = false)
a = numerify
avg = a.average
sum = a.inject(0) { |sum, value| sum + (value - avg) ** 2}
(1 / (a.length.to_f - (sample ? 1 : 0)) * sum)
end | ruby | {
"resource": ""
} |
q9210 | GemFootprintAnalyzer.CLI.run | train | def run(args = ARGV)
opts.parse!(args)
if !analyze_gemfile? && args.empty?
puts opts.parser
exit 1
end
print_requires(options, args)
end | ruby | {
"resource": ""
} |
q9211 | StreamBot.Tracker.start | train | def start
keywords = self.params["keywords"]
@thread = Thread.new do
@client.filter_by_keywords(keywords) do |status|
if retweet?(status)
before_retweet.trigger(status)
@retweet.retweet(status["id"])
after_retweet.trigger(status)
end
end
end
@thread.join
end | ruby | {
"resource": ""
} |
q9212 | StreamBot.Tracker.load_filters | train | def load_filters
filters_config = self.params["filters_config" ]
if !filters_config.nil? && File.exists?(filters_config)
begin
YAML::load_file(filters_config)
rescue
on_error.trigger($!, $@)
end
end
end | ruby | {
"resource": ""
} |
q9213 | StreamBot.Tracker.retweet? | train | def retweet?(status)
filters = load_filters
retweet = true
if !filters.nil?
filters.each_pair do |path, value|
array = []
array << value
array.flatten.each do |filter_value|
path_value = StreamBot::ArrayPath.get_path(status, path)
if path_value.eql?(filter_value) || path_value.include?(filter_value)
on_match.trigger(status, path, filter_value)
retweet = false
end
end
end
end
retweet
end | ruby | {
"resource": ""
} |
q9214 | HTML.AutoTag.tag | train | def tag( params )
# TODO: make these method args if possible
tag = params['tag']
attr = params['attr']
cdata = params['cdata']
unless attr.kind_of?( HTML::AutoAttr )
attr = HTML::AutoAttr.new( attr, @sorted )
end
# emtpy tag
unless cdata and cdata.to_s.length
return ( @indent * @level ) + '<' + tag + attr.to_s + ' />' + @newline
end
rendered = ''
no_indent = 0
if cdata.kind_of?( Array )
if cdata[0].kind_of?( Hash )
@level += 1
rendered = @newline
cdata.each do |hash|
rendered += tag( hash )
end
@level -= 1
else
str = ''
cdata.each do |scalar|
str += tag( 'tag' => tag, 'attr' => attr, 'cdata' => scalar )
end
return str
end
elsif cdata.kind_of?( Hash )
@level += 1
rendered = @newline + tag( cdata )
@level -= 1
else
rendered = @encode ? @encoder.encode( cdata, @encodes ) : cdata
no_indent = 1
end
return (@indent * @level) \
+ '<' + tag + attr.to_s + '>' \
+ rendered.to_s + ( no_indent == 1 ? '' : ( @indent * @level ) ) \
+ '</' + tag + '>' + @newline
end | ruby | {
"resource": ""
} |
q9215 | Chartify.ChartBase.darken_color | train | def darken_color(hex_color, amount=0.4)
hex_color = hex_color.gsub('#', '')
rgb = hex_color.scan(/../).map { |color| color.hex }
rgb[0] = (rgb[0].to_i * amount).round
rgb[1] = (rgb[1].to_i * amount).round
rgb[2] = (rgb[2].to_i * amount).round
"#%02x%02x%02x" % rgb
end | ruby | {
"resource": ""
} |
q9216 | Chartify.ChartBase.lighten_color | train | def lighten_color(hex_color, amount=0.6)
hex_color = hex_color.gsub('#', '')
rgb = hex_color.scan(/../).map { |color| color.hex }
rgb[0] = [(rgb[0].to_i + 255 * amount).round, 255].min
rgb[1] = [(rgb[1].to_i + 255 * amount).round, 255].min
rgb[2] = [(rgb[2].to_i + 255 * amount).round, 255].min
"#%02x%02x%02x" % rgb
end | ruby | {
"resource": ""
} |
q9217 | Trax.Model.reverse_assign_attributes | train | def reverse_assign_attributes(attributes_hash)
attributes_to_assign = attributes_hash.keys.reject{|_attribute_name| attribute_present?(_attribute_name) }
assign_attributes(attributes_hash.slice(attributes_to_assign))
end | ruby | {
"resource": ""
} |
q9218 | MediaWiki.Query.pages | train | def pages
result_map = map_query_to_results
query_result["query"]["pages"].each do |key, value|
page_title = value["title"]
original_query = find_result_map_match_for_title(result_map, page_title)
@page_hash[original_query] = MediaWiki::Page.new(value)
end
@page_hash
end | ruby | {
"resource": ""
} |
q9219 | MediaWiki.Query.map_query_to_results | train | def map_query_to_results
#Initalize map
result_map = initialize_map
# Apply the normalization to the result map
normalized = get_query_map("normalized")
if normalized
result_map = get_normalizations_for(result_map, normalized)
end
# Apply the redirects to the result map
redirects = get_query_map("redirects")
if redirects
result_map = get_redirects_for(result_map, redirects)
end
result_map
end | ruby | {
"resource": ""
} |
q9220 | WhereWasI.Track.add_point | train | def add_point(lat:, lon:, elevation:, time:)
time = Time.parse(time) if ! time.is_a?(Time)
current = [lat, lon, elevation]
if @start_time.nil? || time < @start_time
@start_time = time
@start_location = current
end
if @end_time.nil? || time > @end_time
@end_time = time
@end_location = current
end
@points[time.to_i] = current
true
end | ruby | {
"resource": ""
} |
q9221 | WhereWasI.Track.in_time_range? | train | def in_time_range?(time)
time = Time.parse(time) if ! time.is_a?(Time)
time_range.cover?(time)
end | ruby | {
"resource": ""
} |
q9222 | WhereWasI.Track.at | train | def at(time)
if time.is_a?(String)
time = Time.parse(time)
end
if time.is_a?(Integer)
time = Time.at(time)
end
raise ArgumentError, "time must be a Time,String, or Fixnum" if ! time.is_a?(Time)
return nil if ! in_time_range?(time)
@interp ||= Interpolate::Points.new(@points)
data = @interp.at(time.to_i)
self.class.array_to_hash(data)
end | ruby | {
"resource": ""
} |
q9223 | LooseLeaf.TaskHelpers.sh_in_dir | train | def sh_in_dir(dir, shell_commands)
shell_commands = [shell_commands] if shell_commands.is_a?(String)
shell_commands.each { |shell_command| sh %(cd #{dir} && #{shell_command.strip}) }
end | ruby | {
"resource": ""
} |
q9224 | ODBA.Cache.create_deferred_indices | train | def create_deferred_indices(drop_existing = false)
@deferred_indices.each { |definition|
name = definition.index_name
if(drop_existing && self.indices.include?(name))
drop_index(name)
end
unless(self.indices.include?(name))
index = create_index(definition)
if(index.target_klass.respond_to?(:odba_extent))
index.fill(index.target_klass.odba_extent)
end
end
}
end | ruby | {
"resource": ""
} |
q9225 | ODBA.Cache.create_index | train | def create_index(index_definition, origin_module=Object)
transaction {
klass = if(index_definition.fulltext)
FulltextIndex
elsif(index_definition.resolve_search_term.is_a?(Hash))
ConditionIndex
else
Index
end
index = klass.new(index_definition, origin_module)
indices.store(index_definition.index_name, index)
indices.odba_store_unsaved
index
}
end | ruby | {
"resource": ""
} |
q9226 | ODBA.Cache.delete | train | def delete(odba_object)
odba_id = odba_object.odba_id
name = odba_object.odba_name
odba_object.odba_notify_observers(:delete, odba_id, odba_object.object_id)
rows = ODBA.storage.retrieve_connected_objects(odba_id)
rows.each { |row|
id = row.first
# Self-Referencing objects don't have to be resaved
begin
if(connected_object = fetch(id, nil))
connected_object.odba_cut_connection(odba_object)
connected_object.odba_isolated_store
end
rescue OdbaError
warn "OdbaError ### deleting #{odba_object.class}:#{odba_id}"
warn " ### while looking for connected object #{id}"
end
}
delete_cache_entry(odba_id)
delete_cache_entry(name)
ODBA.storage.delete_persistable(odba_id)
delete_index_element(odba_object)
odba_object
end | ruby | {
"resource": ""
} |
q9227 | ODBA.Cache.drop_index | train | def drop_index(index_name)
transaction {
ODBA.storage.drop_index(index_name)
self.delete(self.indices[index_name])
}
end | ruby | {
"resource": ""
} |
q9228 | ODBA.Cache.next_id | train | def next_id
if @file_lock
dbname = ODBA.storage.instance_variable_get('@dbi').dbi_args.first.split(/:/).last
id = new_id(dbname, ODBA.storage)
else
id = ODBA.storage.next_id
end
@peers.each do |peer|
peer.reserve_next_id id rescue DRb::DRbError
end
id
rescue OdbaDuplicateIdError
retry
end | ruby | {
"resource": ""
} |
q9229 | ODBA.Cache.retrieve_from_index | train | def retrieve_from_index(index_name, search_term, meta=nil)
index = indices.fetch(index_name)
ids = index.fetch_ids(search_term, meta)
if meta.respond_to?(:error_limit) && (limit = meta.error_limit) \
&& (size = ids.size) > limit.to_i
error = OdbaResultLimitError.new
error.limit = limit
error.size = size
error.index = index_name
error.search_term = search_term
error.meta = meta
raise error
end
bulk_fetch(ids, nil)
end | ruby | {
"resource": ""
} |
q9230 | ODBA.Cache.store | train | def store(object)
odba_id = object.odba_id
name = object.odba_name
object.odba_notify_observers(:store, odba_id, object.object_id)
if(ids = Thread.current[:txids])
ids.unshift([odba_id,name])
end
## get target_ids before anything else
target_ids = object.odba_target_ids
changes = store_collection_elements(object)
prefetchable = object.odba_prefetch?
dump = object.odba_isolated_dump
ODBA.storage.store(odba_id, dump, name, prefetchable, object.class)
store_object_connections(odba_id, target_ids)
update_references(target_ids, object)
object = store_cache_entry(odba_id, object, name)
update_indices(object)
@peers.each do |peer|
peer.invalidate! odba_id rescue DRb::DRbError
end
object
end | ruby | {
"resource": ""
} |
q9231 | I18nAdminUtils.ApplicationHelper.translation_missing_icon | train | def translation_missing_icon(translation)
missing_translations = translation.missing_translations
color_id = (missing_translations.size.to_f/translation.translations.size.to_f*5).ceil-1
if missing_translations.size == 0
content_tag 'span', '', :class => 'glyphicon glyphicon-ok greentext',
:title => 'This key has been translated in all languages', :rel => 'tooltip'
else
content_tag 'span', "(#{missing_translations.size})", :class => "color-range-#{color_id} bold",
:title => missing_translations.keys.join(','), :rel => 'tooltip'
end
end | ruby | {
"resource": ""
} |
q9232 | Blueprint.Namespace.add_namespace | train | def add_namespace(html, namespace)
html.gsub!(/(class=")([a-zA-Z0-9\-_ ]*)(")/) do |m|
classes = m.to_s.split('"')[1].split(' ')
classes.map! { |c| c = namespace + c }
'class="' + classes.join(' ') + '"'
end
html
end | ruby | {
"resource": ""
} |
q9233 | StixSchemaSpy.SimpleType.enumeration_values | train | def enumeration_values
enumeration = @xml.xpath('./xs:restriction/xs:enumeration', {'xs' => 'http://www.w3.org/2001/XMLSchema'})
if enumeration.length > 0
return enumeration.map {|elem| [elem.attributes['value'].value, elem.xpath('./xs:annotation/xs:documentation', {'xs' => 'http://www.w3.org/2001/XMLSchema'}).text]}
else
raise "Not an enumeration"
end
end | ruby | {
"resource": ""
} |
q9234 | LatoBlog.Interface::Categories.blog__create_default_category | train | def blog__create_default_category
category_parent = LatoBlog::CategoryParent.find_by(meta_default: true)
return if category_parent
category_parent = LatoBlog::CategoryParent.new(meta_default: true)
throw 'Impossible to create default category parent' unless category_parent.save
languages = blog__get_languages_identifier
languages.each do |language|
category = LatoBlog::Category.new(
title: 'Default',
meta_permalink: "default_#{language}",
meta_language: language,
lato_core_superuser_creator_id: 1,
lato_blog_category_parent_id: category_parent.id
)
throw 'Impossible to create default category' unless category.save
end
end | ruby | {
"resource": ""
} |
q9235 | LatoBlog.Interface::Categories.blog__clean_category_parents | train | def blog__clean_category_parents
category_parents = LatoBlog::CategoryParent.all
category_parents.map { |cp| cp.destroy if cp.categories.empty? }
end | ruby | {
"resource": ""
} |
q9236 | LatoBlog.Interface::Categories.blog__get_categories | train | def blog__get_categories(
order: nil,
language: nil,
search: nil,
page: nil,
per_page: nil
)
categories = LatoBlog::Category.all
# apply filters
order = order && order == 'ASC' ? 'ASC' : 'DESC'
categories = _categories_filter_by_order(categories, order)
categories = _categories_filter_by_language(categories, language)
categories = _categories_filter_search(categories, search)
# take categories uniqueness
categories = categories.uniq(&:id)
# save total categories
total = categories.length
# manage pagination
page = page&.to_i || 1
per_page = per_page&.to_i || 20
categories = core__paginate_array(categories, per_page, page)
# return result
{
categories: categories && !categories.empty? ? categories.map(&:serialize) : [],
page: page,
per_page: per_page,
order: order,
total: total
}
end | ruby | {
"resource": ""
} |
q9237 | SugarfreeConfig.Config.fetch_config | train | def fetch_config
Rails.logger.debug "Loading #{@file}::#{@env}" if Object.const_defined?('Rails') && Rails.logger.present?
YAML::load_file(@file)[@env.to_s]
end | ruby | {
"resource": ""
} |
q9238 | SugarfreeConfig.Config.default_options | train | def default_options
if Object.const_defined?('Rails')
{
:file => Rails.root.join('config', 'config.yml'),
:reload => Rails.env.development?,
:env => Rails.env
}
else
{
:file => File.expand_path("config.yml"),
:reload => false,
:env => "development"
}
end
end | ruby | {
"resource": ""
} |
q9239 | SugarfreeConfig.ConfigIterator.next | train | def next
if (value = @scoped_config[@path_elements.last]).nil?
raise ConfigKeyException.new(@path_elements)
elsif value.is_a?(Hash)
@scoped_config = value
self
else
value
end
end | ruby | {
"resource": ""
} |
q9240 | ClassProxy.ClassMethods.proxy_methods | train | def proxy_methods(*methods)
@_methods ||= {}
methods.each do |method|
if method.is_a? Symbol
# If given a symbol, store as a method to overwrite and use the default loader
proxy_method method
elsif method.is_a? Hash
# If its a hash it will include methods to overwrite along with custom loaders
method.each { |method_name, proc| proxy_method method_name, proc }
end
end
end | ruby | {
"resource": ""
} |
q9241 | ClassProxy.ClassMethods.fetch | train | def fetch(args, options={})
@primary_fetch.is_a?(Proc) ? @primary_fetch[args] : (raise NotFound)
rescue NotFound
return nil if options[:skip_fallback]
run_fallback(args)
end | ruby | {
"resource": ""
} |
q9242 | Weechat.Plugin.unload | train | def unload(force = false)
if name == "ruby" and !force
Weechat.puts "Won't unload the ruby plugin unless you force it."
false
else
Weechat.exec("/plugin unload #{name}")
true
end
end | ruby | {
"resource": ""
} |
q9243 | Weechat.Plugin.scripts | train | def scripts
scripts = []
Infolist.parse("#{name}_script").each do |script|
scripts << Script.new(script[:pointer], self)
end
scripts
end | ruby | {
"resource": ""
} |
q9244 | HttpPing.HttpPing::WMI.ping | train | def ping(host = @host, options = {})
super(host)
lhost = Socket.gethostname
cs = "winmgmts:{impersonationLevel=impersonate}!//#{lhost}/root/cimv2"
wmi = WIN32OLE.connect(cs)
query = "select * from win32_pingstatus where address = '#{host}'"
unless options.empty?
options.each{ |key, value|
if value.is_a?(String)
query << " and #{key} = '#{value}'"
else
query << " and #{key} = #{value}"
end
}
end
status = Struct::PingStatus.new
wmi.execquery(query).each{ |obj|
status.address = obj.Address
status.buffer_size = obj.BufferSize
status.no_fragmentation = obj.NoFragmentation
status.primary_address_resolution_status = obj.PrimaryAddressResolutionStatus
status.protocol_address = obj.ProtocolAddress
status.protocol_address_resolved = obj.ProtocolAddressResolved
status.record_route = obj.RecordRoute
status.reply_inconsistency = obj.ReplyInconsistency
status.reply_size = obj.ReplySize
status.resolve_address_names = obj.ResolveAddressNames
status.response_time = obj.ResponseTime
status.response_time_to_live = obj.ResponseTimeToLive
status.route_record = obj.RouteRecord
status.route_record_resolved = obj.RouteRecordResolved
status.source_route = obj.SourceRoute
status.source_route_type = obj.SourceRouteType
status.status_code = obj.StatusCode
status.timeout = obj.Timeout
status.timestamp_record = obj.TimeStampRecord
status.timestamp_record_address = obj.TimeStampRecordAddress
status.timestamp_record_address_resolved = obj.TimeStampRecordAddressResolved
status.timestamp_route = obj.TimeStampRoute
status.time_to_live = obj.TimeToLive
status.type_of_service = obj.TypeOfService
}
status.freeze # Read-only data
end | ruby | {
"resource": ""
} |
q9245 | CaRuby.SQLExecutor.query | train | def query(sql, *args, &block)
fetched = nil
execute do |dbh|
result = dbh.execute(sql, *args)
if block_given? then
result.each(&block)
else
fetched = result.fetch(:all)
end
result.finish
end
fetched
end | ruby | {
"resource": ""
} |
q9246 | CaRuby.SQLExecutor.transact | train | def transact(sql=nil, *args)
# Work-around for rcbi nil substitution.
if sql then
sql, *args = replace_nil_binds(sql, args)
transact { |dbh| dbh.execute(sql, *args) }
elsif block_given? then
execute { |dbh| dbh.transaction { yield dbh } }
else
raise ArgumentError.new("SQL executor is missing the required execution block")
end
end | ruby | {
"resource": ""
} |
q9247 | CaRuby.SQLExecutor.replace_nil_binds | train | def replace_nil_binds(sql, args)
nils = []
args.each_with_index { |value, i| nils << i if value.nil? }
unless nils.empty? then
logger.debug { "SQL executor working around RDBI bug by eliminating the nil arguments #{nils.to_series} for the SQL:\n#{sql}..." }
# Quoted ? is too much of a pain for this hack; bail out.
raise ArgumentError.new("RDBI work-around does not support quoted ? in transactional SQL: #{sql}") if sql =~ /'[^,]*[?][^,]*'/
prefix, binds_s, suffix = /(.+\s*values\s*\()([^)]*)(\).*)/i.match(sql).captures
sql = prefix
binds = binds_s.split('?')
last = binds_s[-1, 1]
del_cnt = 0
binds.each_with_index do |s, i|
sql << s
if nils.include?(i) then
args.delete_at(i - del_cnt)
del_cnt += 1
sql << 'NULL'
elsif i < binds.size - 1 or last == '?'
sql << '?'
end
end
sql << suffix
end
logger.debug { "SQL executor converted the SQL to:\n#{sql}\nwith arguments #{args.qp}" }
return args.unshift(sql)
end | ruby | {
"resource": ""
} |
q9248 | BuoyData.NoaaStation.current_reading | train | def current_reading(doc)
reading = {}
xpath = "//table/caption[@class='titleDataHeader']["
xpath += "contains(text(),'Conditions')"
xpath += " and "
xpath += "not(contains(text(),'Solar Radiation'))"
xpath += "]"
# Get the reading timestamp
source_updated_at = reading_timestamp(doc, xpath)
reading[:source_updated_at] = source_updated_at
# Get the reading data
xpath += "/../tr"
elements = doc.xpath xpath
unless elements.empty?
elements.each do |element|
r = scrape_condition_from_element(element)
reading.merge! r unless r.empty?
end
end
reading
end | ruby | {
"resource": ""
} |
q9249 | VcenterLib.VmConverter.facts | train | def facts
logger.debug "get complete data of all VMs in all datacenters: begin"
result = Hash[vm_mos_to_h(@vcenter.vms).map do |h|
[h['name'], Hash[h.map { |k, v| [k.tr('.', '_'), v] }]]
end]
logger.debug "get complete data of all VMs in all datacenters: end"
result
end | ruby | {
"resource": ""
} |
q9250 | Duxml.ElementGuts.traverse | train | def traverse(node=nil, &block)
return self.to_enum unless block_given?
node_stack = [node || self]
until node_stack.empty?
current = node_stack.shift
if current
yield current
node_stack = node_stack.insert(0, *current.nodes) if current.respond_to?(:nodes)
end
end
node || self if block_given?
end | ruby | {
"resource": ""
} |
q9251 | Cloudpassage.Base.method_missing | train | def method_missing(sym, *args, &block)
if (data && data[sym])
data[sym]
else
super(sym, *args, &block)
end
end | ruby | {
"resource": ""
} |
q9252 | Derelict.Instance.validate! | train | def validate!
logger.debug "Starting validation for #{description}"
raise NotFound.new path unless File.exists? path
raise NonDirectory.new path unless File.directory? path
raise MissingBinary.new vagrant unless File.exists? vagrant
raise MissingBinary.new vagrant unless File.executable? vagrant
logger.info "Successfully validated #{description}"
self
end | ruby | {
"resource": ""
} |
q9253 | Derelict.Instance.version | train | def version
logger.info "Determining Vagrant version for #{description}"
output = execute!("--version").stdout
Derelict::Parser::Version.new(output).version
end | ruby | {
"resource": ""
} |
q9254 | Derelict.Instance.execute | train | def execute(subcommand, *arguments, &block)
options = arguments.last.is_a?(Hash) ? arguments.pop : Hash.new
command = command(subcommand, *arguments)
command = "sudo -- #{command}" if options.delete(:sudo)
logger.debug "Executing #{command} using #{description}"
Executer.execute command, options, &block
end | ruby | {
"resource": ""
} |
q9255 | Derelict.Instance.command | train | def command(subcommand, *arguments)
args = [vagrant, subcommand.to_s, arguments].flatten
args.map {|a| Shellwords.escape a }.join(' ').tap do |command|
logger.debug "Generated command '#{command}' from " +
"subcommand '#{subcommand.to_s}' with arguments " +
arguments.inspect
end
end | ruby | {
"resource": ""
} |
q9256 | Deas.ShowExceptions.call! | train | def call!(env)
status, headers, body = @app.call(env)
if error = env['deas.error']
error_body = Body.new(error)
headers['Content-Length'] = error_body.size.to_s
headers['Content-Type'] = error_body.mime_type.to_s
body = [error_body.content]
end
[status, headers, body]
end | ruby | {
"resource": ""
} |
q9257 | HelpDeskAPI.Client.sign_in | train | def sign_in
# Contact sign in page to set cookies.
begin
sign_in_res = RestClient.get(Endpoints::SIGN_IN)
rescue RestClient::ExceptionWithResponse => error
fail HelpDeskAPI::Exceptions.SignInError, "Error contacting #{Endpoints::SIGN_IN}: #{error}"
end
# Parse authenticity_token from sign in form.
page = Nokogiri::HTML(sign_in_res)
HelpDeskAPI::Authentication.authenticity_token = page.css('form').css('input')[1]['value']
unless HelpDeskAPI::Authentication.authenticity_token
fail HelpDeskAPI::Exceptions.AuthenticityTokenError, 'Error parsing authenticity_token: Token not found.'
end
# Parse sign_in HTML for csrf-token
page.css('meta').each do |tag|
HelpDeskAPI::Authentication.csrf_token = tag['content'] if tag['name'] == 'csrf-token'
end
unless HelpDeskAPI::Authentication.csrf_token
fail HelpDeskAPI::Exceptions.CsrfTokenError, 'No csrf-token found'
end
# Set cookies for later requests
HelpDeskAPI::Authentication.cookies = sign_in_res.cookies
# Simulate sign in form submit button.
body = {
'authenticity_token': HelpDeskAPI::Authentication.authenticity_token,
'user[email_address]': HelpDeskAPI::Authentication.username,
'user[password]': HelpDeskAPI::Authentication.password
}
RestClient.post(Endpoints::SESSIONS, body, {:cookies => HelpDeskAPI::Authentication.cookies}) do |response, request, result, &block|
# Response should be a 302 redirect from /sessions
if Request::responseError?(response)
fail HelpDeskAPI::Exceptions.SessionsError, "Error contacting #{Endpoints::SESSIONS}: #{error}"
end
# Update cookies just incase
HelpDeskAPI::Authentication.cookies = response.cookies
end
end | ruby | {
"resource": ""
} |
q9258 | XS.Message.copy_in_bytes | train | def copy_in_bytes bytes, len
data_buffer = LibC.malloc len
# writes the exact number of bytes, no null byte to terminate string
data_buffer.write_string bytes, len
# use libC to call free on the data buffer; earlier versions used an
# FFI::Function here that called back into Ruby, but Rubinius won't
# support that and there are issues with the other runtimes too
LibXS.xs_msg_init_data @pointer, data_buffer, len, LibC::Free, nil
end | ruby | {
"resource": ""
} |
q9259 | Confuse.Config.check | train | def check
@definition.namespaces.each do |(namespace, ns)|
ns.items.each do |key, _|
lookup(namespace, key)
end
end
end | ruby | {
"resource": ""
} |
q9260 | Rsxml.Util.check_opts | train | def check_opts(constraints, opts)
opts ||= {}
opts.each{|k,v| raise "opt not permitted: #{k.inspect}" if !constraints.has_key?(k)}
Hash[constraints.map do |k,constraint|
if opts.has_key?(k)
v = opts[k]
if constraint.is_a?(Array)
raise "unknown value for opt #{k.inspect}: #{v.inspect}. permitted values are: #{constraint.inspect}" if !constraint.include?(v)
[k,v]
elsif constraint.is_a?(Hash)
raise "opt #{k.inspect} must be a Hash" if !v.is_a?(Hash)
[k,check_opts(constraint, v || {})]
else
[k,v]
end
end
end]
end | ruby | {
"resource": ""
} |
q9261 | Align.PairwiseAlgorithm.max4 | train | def max4(a,b,c,d)
x = a >= b ? a : b
y = c >= d ? c : d
(x >= y) ? x : y
end | ruby | {
"resource": ""
} |
q9262 | MultiForecast.Client.get_complex | train | def get_complex(path)
client(path).get_complex(service_name(path), section_name(path), graph_name(path)).tap do |graph|
graph['base_uri'] = client(path).base_uri
graph['path'] = path
end
end | ruby | {
"resource": ""
} |
q9263 | MultiForecast.Client.delete_complex | train | def delete_complex(path)
client(path).delete_complex(service_name(path), section_name(path), graph_name(path))
end | ruby | {
"resource": ""
} |
q9264 | FentonShell.Certificate.certificate_create | train | def certificate_create(global_options, options)
result = Excon.post(
"#{global_options[:fenton_server_url]}/certificates.json",
body: certificate_json(options),
headers: { 'Content-Type' => 'application/json' }
)
write_client_certificate(
public_key_cert_location(options[:public_key]),
JSON.parse(result.body)['data']['attributes']['certificate']
)
[result.status, JSON.parse(result.body)]
end | ruby | {
"resource": ""
} |
q9265 | HasMedia.ClassMethods.set_relations | train | def set_relations(context, relation)
@contexts ||= {}
@contexts[relation] ||= []
@media_relation_set ||= []
if @contexts[relation].include?(context)
raise Exception.new("You should NOT use same context identifier for several has_one or has_many relation to media")
end
@contexts[relation] << context
return if @media_relation_set.include? self
has_many :media, :through => :media_links, :dependent => :destroy
@media_relation_set << self
end | ruby | {
"resource": ""
} |
q9266 | Some.API.method_missing | train | def method_missing meth, *args, &block
meth_s = meth.to_s
if @method && meth_s =~ API_REGEX
if meth_s.end_with?('!')
# `foo! bar' is syntactic sugar for `foo.! bar'
self[meth_s[0...-1]].!(args[0] || {})
else
# chain the method name onto URL path
self[meth_s]
end
else
super
end
end | ruby | {
"resource": ""
} |
q9267 | Xcellus.Instance.save | train | def save(path)
unless path.kind_of? String
raise ArgumentError, 'save expects a string path'
end
Xcellus::_save(@handle, path)
end | ruby | {
"resource": ""
} |
q9268 | Samsao.Helpers.changelog_modified? | train | def changelog_modified?(*changelogs)
changelogs = config.changelogs if changelogs.nil? || changelogs.empty?
changelogs.any? { |changelog| git.modified_files.include?(changelog) }
end | ruby | {
"resource": ""
} |
q9269 | Samsao.Helpers.has_app_changes? | train | def has_app_changes?(*sources)
sources = config.sources if sources.nil? || sources.empty?
sources.any? do |source|
pattern = Samsao::Regexp.from_matcher(source, when_string_pattern_prefix_with: '^')
modified_file?(pattern)
end
end | ruby | {
"resource": ""
} |
q9270 | Samsao.Helpers.truncate | train | def truncate(input, max = 30)
return input if input.nil? || input.length <= max
input[0..max - 1].gsub(/\s\w+\s*$/, '...')
end | ruby | {
"resource": ""
} |
q9271 | MultiGit.Ref.resolve | train | def resolve
@leaf ||= begin
ref = self
loop do
break ref unless ref.target.kind_of? MultiGit::Ref
ref = ref.target
end
end
end | ruby | {
"resource": ""
} |
q9272 | MultiGit.Ref.commit | train | def commit(options = {}, &block)
resolve.update(options.fetch(:lock, :optimistic)) do |current|
Commit::Builder.new(current, &block)
end
return reload
end | ruby | {
"resource": ""
} |
q9273 | Authpwn.ControllerInstanceMethods.set_session_current_user | train | def set_session_current_user(user)
self.current_user = user
# Try to reuse existing sessions.
if session[:authpwn_suid]
token = Tokens::SessionUid.with_code(session[:authpwn_suid]).first
if token
if token.user == user
token.touch
return user
else
token.destroy
end
end
end
if user
session[:authpwn_suid] = Tokens::SessionUid.random_for(user,
request.remote_ip, request.user_agent || 'N/A').suid
else
session.delete :authpwn_suid
end
end | ruby | {
"resource": ""
} |
q9274 | Authpwn.ControllerInstanceMethods.authenticate_using_session | train | def authenticate_using_session
return if current_user
session_uid = session[:authpwn_suid]
user = session_uid && Tokens::SessionUid.authenticate(session_uid)
self.current_user = user if user && !user.instance_of?(Symbol)
end | ruby | {
"resource": ""
} |
q9275 | Tkar.Canvas.del | train | def del tkar_id
tkaroid = @objects[tkar_id]
if tkaroid
if @follow_id == tkar_id
follow nil
end
delete tkaroid.tag
@objects.delete tkar_id
@changed.delete tkar_id
get_objects_by_layer(tkaroid.layer).delete tkaroid
end
end | ruby | {
"resource": ""
} |
q9276 | Aptly.Repo.add | train | def add path, kwargs={}
remove_files = kwargs.arg :remove_files, false
cmd = 'aptly repo add'
cmd += ' -remove-files' if remove_files
cmd += " #{@name.quote} #{path}"
Aptly::runcmd cmd
end | ruby | {
"resource": ""
} |
q9277 | Aptly.Repo.import | train | def import from_mirror, kwargs={}
deps = kwargs.arg :deps, false
packages = kwargs.arg :packages, []
if packages.length == 0
raise AptlyError.new '1 or more packages are required'
end
cmd = 'aptly repo import'
cmd += ' -with-deps' if deps
cmd += " #{from_mirror.quote} #{@name.quote}"
packages.each {|p| cmd += " #{p.quote}"}
Aptly::runcmd cmd
end | ruby | {
"resource": ""
} |
q9278 | Aptly.Repo.copy | train | def copy from_repo, to_repo, kwargs={}
deps = kwargs.arg :deps, false
packages = kwargs.arg :packages, []
if packages.length == 0
raise AptlyError.new '1 or more packages are required'
end
cmd = 'aptly repo copy'
cmd += ' -with-deps' if deps
cmd += " #{from_repo.quote} #{to_repo.quote}"
packages.each {|p| cmd += " #{p.quote}"}
Aptly::runcmd cmd
end | ruby | {
"resource": ""
} |
q9279 | GoogleApi.Session.login | train | def login(code = nil)
@client = Google::APIClient.new
@client.authorization.client_id = c('client_id')
@client.authorization.client_secret = c('client_secret')
@client.authorization.scope = @scope
@client.authorization.redirect_uri = c('redirect_uri')
@api = @client.discovered_api(@name_api, @version_api)
unless code
return @client.authorization.authorization_uri.to_s
end
begin
@client.authorization.code = code
@client.authorization.fetch_access_token!
rescue
return false
end
return true
end | ruby | {
"resource": ""
} |
q9280 | GoogleApi.Session.login_by_line | train | def login_by_line(server = 'http://localhost/oauth2callback', port = 0)
begin
require "launchy" # open browser
rescue
raise GoogleApi::RequireError, "You don't have launchy gem. Firt install it: gem install launchy."
end
require "socket" # make tcp server
require "uri" # parse uri
uri = URI(server)
# Start webserver.
webserver = TCPServer.new(uri.host, port)
# By default port is 0. It means that TCPServer will get first free port.
# Port is required for redirect_uri.
uri.port = webserver.addr[1]
# Add redirect_uri for google oauth 2 callback.
_config.send(@config_name).redirect_uri = uri.to_s
# Open browser.
Launchy.open(login)
# Wait for new session.
session = webserver.accept
# Parse header for query.
request = session.gets.gsub(/GET\ \//, '').gsub(/\ HTTP.*/, '')
request = Hash[URI.decode_www_form(URI(request).query)]
# Failure login
to_return = false
message = "You have not been logged. Please try again."
if login(request['code'])
message = "You have been successfully logged. Now you can close the browser."
to_return = true
end
session.write(message)
# Close session and webserver.
session.close
return to_return
end | ruby | {
"resource": ""
} |
q9281 | Vtasks.Docker.add_namespace | train | def add_namespace(image, path)
namespace path.to_sym do |_args|
require 'rspec/core/rake_task'
::RSpec::Core::RakeTask.new(:spec) do |task|
task.pattern = "#{path}/spec/*_spec.rb"
end
docker_image = Vtasks::Docker::Image.new(image, path, args)
lint_image(path)
desc 'Build and tag docker image'
task :build do
docker_image.build_with_tags
end
desc 'Publish docker image'
task :push do
docker_image.push
end
end
end | ruby | {
"resource": ""
} |
q9282 | Vtasks.Docker.dockerfiles | train | def dockerfiles
@dockerfiles = Dir.glob('*').select do |dir|
File.directory?(dir) && File.exist?("#{dir}/Dockerfile")
end
end | ruby | {
"resource": ""
} |
q9283 | Vtasks.Docker.list_images | train | def list_images
desc 'List all Docker images'
task :list do
info dockerfiles.map { |image| File.basename(image) }
end
end | ruby | {
"resource": ""
} |
q9284 | ReturnHook.FormTagHelper.html_options_for_form | train | def html_options_for_form(url_for_options, options)
options.stringify_keys.tap do |html_options|
html_options["enctype"] = "multipart/form-data" if html_options.delete("multipart")
# The following URL is unescaped, this is just a hash of options, and it is the
# responsibility of the caller to escape all the values.
## OVERRIDDEN HERE:
html_options["action"] = forward_return_hook(url_for(url_for_options))
html_options["accept-charset"] = "UTF-8"
html_options["data-remote"] = true if html_options.delete("remote")
if html_options["data-remote"] &&
!embed_authenticity_token_in_remote_forms &&
html_options["authenticity_token"].blank?
# The authenticity token is taken from the meta tag in this case
html_options["authenticity_token"] = false
elsif html_options["authenticity_token"] == true
# Include the default authenticity_token, which is only generated when its set to nil,
# but we needed the true value to override the default of no authenticity_token on data-remote.
html_options["authenticity_token"] = nil
end
end
end | ruby | {
"resource": ""
} |
q9285 | Beanstalkify.Environment.deploy! | train | def deploy!(app, settings=[])
@beanstalk.update_environment({
version_label: app.version,
environment_name: self.name,
option_settings: settings
})
end | ruby | {
"resource": ""
} |
q9286 | Beanstalkify.Environment.create! | train | def create!(archive, stack, cnames, settings=[])
params = {
application_name: archive.app_name,
version_label: archive.version,
environment_name: self.name,
solution_stack_name: stack,
option_settings: settings
}
cnames.each do |c|
if dns_available(c)
params[:cname_prefix] = c
break
else
puts "CNAME #{c} is unavailable."
end
end
@beanstalk.create_environment(params)
end | ruby | {
"resource": ""
} |
q9287 | DR.Encoding.fix_utf8 | train | def fix_utf8(s=nil)
s=self if s.nil? #if we are included
if String.method_defined?(:scrub)
#Ruby 2.1
#cf http://ruby-doc.org/core-2.1.0/String.html#method-i-scrub
return s.scrub {|bytes| '<'+bytes.unpack('H*')[0]+'>' }
else
return DR::Encoding.to_utf8(s)
end
end | ruby | {
"resource": ""
} |
q9288 | DR.Encoding.to_utf8! | train | def to_utf8!(s=nil,from:nil)
s=self if s.nil? #if we are included
from=s.encoding if from.nil?
return s.encode!('UTF-8',from, :invalid => :replace, :undef => :replace,
:fallback => Proc.new { |bytes| '<'+bytes.unpack('H*')[0]+'>' }
)
end | ruby | {
"resource": ""
} |
q9289 | Muack.Spy.__mock_dispatch_spy | train | def __mock_dispatch_spy
@stub.__mock_disps.values.flatten.each do |disp|
next unless __mock_defis.key?(disp.msg) # ignore undefined spies
defis = __mock_defis[disp.msg]
if idx = __mock_find_checked_difi(defis, disp.args, :index)
__mock_disps_push(defis.delete_at(idx)) # found, dispatch it
elsif defis.empty? # show called candidates
__mock_failed(disp.msg, disp.args)
else # show expected candidates
__mock_failed(disp.msg, disp.args, defis)
end
end
end | ruby | {
"resource": ""
} |
q9290 | Samsao.Actions.check_non_single_commit_feature | train | def check_non_single_commit_feature(level = :fail)
commit_count = git.commits.size
message = "Your feature branch should have a single commit but found #{commit_count}, squash them together!"
report(level, message) if feature_branch? && commit_count > 1
end | ruby | {
"resource": ""
} |
q9291 | Samsao.Actions.check_feature_jira_issue_number | train | def check_feature_jira_issue_number(level = :fail)
return if samsao.trivial_change? || !samsao.feature_branch?
return report(:fail, 'Your Danger config is missing a `jira_project_key` value.') unless jira_project_key?
message = 'The PR title must starts with JIRA issue number between square brackets'\
" (i.e. [#{config.jira_project_key}-XXX])."
report(level, message) unless contains_jira_issue_number?(github.pr_title)
end | ruby | {
"resource": ""
} |
q9292 | Samsao.Actions.check_fix_jira_issue_number | train | def check_fix_jira_issue_number(level = :warn)
return if samsao.trivial_change? || !samsao.fix_branch?
return report(:fail, 'Your Danger config is missing a `jira_project_key` value.') unless jira_project_key?
git.commits.each do |commit|
check_commit_contains_jira_issue_number(commit, level)
end
end | ruby | {
"resource": ""
} |
q9293 | Samsao.Actions.check_acceptance_criteria | train | def check_acceptance_criteria(level = :warn)
return unless samsao.feature_branch?
message = 'The PR description should have the acceptance criteria in the body.'
report(level, message) if (/acceptance criteria/i =~ github.pr_body).nil?
end | ruby | {
"resource": ""
} |
q9294 | Samsao.Actions.check_label_pr | train | def check_label_pr(level = :fail)
message = 'The PR should have at least one label added to it.'
report(level, message) if github.pr_labels.nil? || github.pr_labels.empty?
end | ruby | {
"resource": ""
} |
q9295 | Samsao.Actions.report | train | def report(level, content)
case level
when :warn
warn content
when :fail
fail content
when :message
message content
else
raise "Report level '#{level}' is invalid."
end
end | ruby | {
"resource": ""
} |
q9296 | Samsao.Actions.check_commit_contains_jira_issue_number | train | def check_commit_contains_jira_issue_number(commit, type)
commit_id = "#{shorten_sha(commit.sha)} ('#{truncate(commit.message)}')"
jira_project_key = config.jira_project_key
message = "The commit message #{commit_id} should contain JIRA issue number" \
" between square brackets (i.e. [#{jira_project_key}-XXX]), multiple allowed" \
" (i.e. [#{jira_project_key}-XXX, #{jira_project_key}-YYY, #{jira_project_key}-ZZZ])"
report(type, message) unless contains_jira_issue_number?(commit.message)
end | ruby | {
"resource": ""
} |
q9297 | ZTK.Background.wait | train | def wait
config.ui.logger.debug { "wait" }
pid, status = (Process.wait2(@pid) rescue nil)
if !pid.nil? && !status.nil?
data = (Marshal.load(Base64.decode64(@parent_reader.read.to_s)) rescue nil)
config.ui.logger.debug { "read(#{data.inspect})" }
!data.nil? and @result = data
@parent_reader.close
@parent_writer.close
return [pid, status, data]
end
nil
end | ruby | {
"resource": ""
} |
q9298 | SycLink.Formatter.extract_columns | train | def extract_columns(rows, header)
columns = []
header.each do |h|
columns << rows.map do |r|
r.send(h)
end
end
columns
end | ruby | {
"resource": ""
} |
q9299 | SycLink.Formatter.max_column_widths | train | def max_column_widths(columns, header, opts = {})
row_column_widths = columns.map do |c|
c.reduce(0) { |m, v| [m, v.nil? ? 0 : v.length].max }
end
header_column_widths = header.map { |h| h.length }
row_column_widths = header_column_widths if row_column_widths.empty?
widths = row_column_widths.zip(header_column_widths).map do |column|
column.reduce(0) { |m, v| [m, v].max }
end
widths.empty? ? [] : scale_widths(widths, opts)
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.