_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q21200 | Rfd.Commands.ctrl_a | train | def ctrl_a
mark = marked_items.size != (items.size - 2) # exclude . and ..
items.each {|i| i.toggle_mark unless i.marked? == mark}
draw_items
draw_marked_items
move_cursor current_row
end | ruby | {
"resource": ""
} |
q21201 | Rfd.Commands.enter | train | def enter
if current_item.name == '.' # do nothing
elsif current_item.name == '..'
cd '..'
elsif in_zip?
v
elsif current_item.directory? || current_item.zip?
cd current_item
else
v
end
end | ruby | {
"resource": ""
} |
q21202 | Rfd.Commands.del | train | def del
if current_dir.path != '/'
dir_was = times == 1 ? current_dir.name : File.basename(current_dir.join(['..'] * (times - 1)))
cd File.expand_path(current_dir.join(['..'] * times))
find dir_was
end
end | ruby | {
"resource": ""
} |
q21203 | Riot.Message.method_missing | train | def method_missing(meth, *phrases, &block)
push(meth.to_s.gsub('_', ' '))
_inspect(phrases)
end | ruby | {
"resource": ""
} |
q21204 | Riot.Reporter.report | train | def report(description, response)
code, result = *response
case code
when :pass then
@passes += 1
pass(description, result)
when :fail then
@failures += 1
message, line, file = *response[1..-1]
fail(description, message, line, file)
when :error, :setup_error, :context_error then
@errors += 1
error(description, result)
end
end | ruby | {
"resource": ""
} |
q21205 | UTF8Cleaner.URIString.encoded_char_array | train | def encoded_char_array
char_array = []
index = 0
while (index < data.length) do
char = data[index]
if char == '%'
# Skip the next two characters, which are the encoded byte
# indicates by this %. (We'll change this later for multibyte characters.)
skip_next = 2
# If the next character is not a hex char, drop the percent and it
unless data[index + 1] =~ HEX_CHARS_REGEX
index += 2
next
end
# If the character after that is not a hex char, drop the percent and
# both of the following chars.
unless data[index + 2] =~ HEX_CHARS_REGEX
index += 3
next
end
# How long is this character?
first_byte = '0x' + (data[index + 1] + data[index + 2]).upcase
bytes = utf8_char_length_in_bytes(first_byte)
# Grab the specified number of encoded bytes
utf8_char_encoded_bytes = next_n_bytes_from(index, bytes)
# Did we get the right number of bytes?
if utf8_char_encoded_bytes.length == bytes
# We did. Is it a valid character?
utf8_char_encoded = utf8_char_encoded_bytes.join
if valid_uri_encoded_utf8(utf8_char_encoded)
# It's valid!
char_array << utf8_char_encoded
# If we're dealing with a multibyte character, skip more than two
# of the next characters, which have already been processed.
skip_next = bytes * 3 - 1
end
end
index += skip_next
else
# This was not an encoded character, so just add it and move to the next.
char_array << char
end
index += 1
end
char_array
end | ruby | {
"resource": ""
} |
q21206 | UTF8Cleaner.URIString.utf8_char_length_in_bytes | train | def utf8_char_length_in_bytes(first_byte)
if first_byte.hex < 'C0'.hex
1
elsif first_byte.hex < 'DF'.hex
2
elsif first_byte.hex < 'EF'.hex
3
else
4
end
end | ruby | {
"resource": ""
} |
q21207 | Stax.Ecs.ecs_services_with_ids | train | def ecs_services_with_ids(*ids)
if ids.empty?
ecs_services
else
ecs_services.select do |s|
ids.include?(s.logical_resource_id)
end
end
end | ruby | {
"resource": ""
} |
q21208 | Maestrano::Connector::Rails.AllSynchronizationsJob.perform | train | def perform(name = nil, count = nil)
Maestrano::Connector::Rails::Organization
.where.not(oauth_provider: nil, encrypted_oauth_token: nil)
.where(sync_enabled: true)
.select(:id)
.find_each do |organization|
Maestrano::Connector::Rails::SynchronizationJob.set(wait: rand(3600)).perform_later(organization.id, {})
end
end | ruby | {
"resource": ""
} |
q21209 | FeedNormalizer.ElementCleaner.clean! | train | def clean!
self.class::SIMPLE_ELEMENTS.each do |element|
val = self.send(element)
send("#{element}=", (val.is_a?(Array) ?
val.collect{|v| HtmlCleaner.flatten(v.to_s)} : HtmlCleaner.flatten(val.to_s)))
end
self.class::HTML_ELEMENTS.each do |element|
send("#{element}=", HtmlCleaner.clean(self.send(element).to_s))
end
self.class::BLENDED_ELEMENTS.each do |element|
self.send(element).collect{|v| v.clean!}
end
end | ruby | {
"resource": ""
} |
q21210 | Stax.Ssm.ssm_parameter_tmpfile | train | def ssm_parameter_tmpfile(name)
Tempfile.new(stack_name).tap do |file|
file.write(ssm_parameter_get(name))
File.chmod(0400, file.path)
file.close
end
end | ruby | {
"resource": ""
} |
q21211 | Stax.Ssm.ssm_run_shellscript | train | def ssm_run_shellscript(*cmd)
Aws::Ssm.run(
document_name: 'AWS-RunShellScript',
targets: [{key: 'tag:aws:cloudformation:stack-name', values: [stack_name]}],
parameters: {commands: cmd}
)&.command_id.tap(&method(:puts))
end | ruby | {
"resource": ""
} |
q21212 | SnipSnip.Reporter.report | train | def report(controller)
return if results.empty?
action_display = "#{controller.controller_name}##{controller.action_name}"
SnipSnip.logger.info(action_display)
results.sort_by(&:report).each do |result|
SnipSnip.logger.info(" #{result.report}")
end
ensure
Registry.clear
end | ruby | {
"resource": ""
} |
q21213 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.fold_references | train | def fold_references(mapped_external_entity, references, organization)
references = format_references(references)
mapped_external_entity = mapped_external_entity.with_indifferent_access
# Use both record_references and id_references + the id
(references.values.flatten + ['id']).each do |reference|
fold_references_helper(mapped_external_entity, reference.split('/'), organization)
end
mapped_external_entity
end | ruby | {
"resource": ""
} |
q21214 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.id_hash | train | def id_hash(id, organization)
{
id: id,
provider: organization.oauth_provider,
realm: organization.oauth_uid
}
end | ruby | {
"resource": ""
} |
q21215 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.fold_references_helper | train | def fold_references_helper(entity, array_of_refs, organization)
ref = array_of_refs.shift
field = entity[ref]
return if field.blank?
# Follow embedment path, remplace if it's not an array or a hash
case field
when Array
field.each do |f|
fold_references_helper(f, array_of_refs.dup, organization)
end
when Hash
fold_references_helper(entity[ref], array_of_refs, organization)
else
id = field
entity[ref] = [id_hash(id, organization)]
end
end | ruby | {
"resource": ""
} |
q21216 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.unfold_references_helper | train | def unfold_references_helper(entity, array_of_refs, organization)
ref = array_of_refs.shift
field = entity[ref]
# Unfold the id
if array_of_refs.empty? && field
return entity.delete(ref) if field.is_a?(String) # ~retro-compatibility to ease transition aroud Connec! idmaps rework. Should be removed eventually.
id_hash = field.find { |id| id[:provider] == organization.oauth_provider && id[:realm] == organization.oauth_uid }
if id_hash
entity[ref] = id_hash['id']
elsif field.find { |id| id[:provider] == 'connec' } # Should always be true as ids will always contain a connec id
# We may enqueue a fetch on the endpoint of the missing association, followed by a re-fetch on this one.
# However it's expected to be an edge case, so for now we rely on the fact that the webhooks should be relativly in order.
# Worst case it'll be done on following sync
entity.delete(ref)
return nil
end
true
# Follow embedment path
else
return true if field.blank?
case field
when Array
bool = true
field.each do |f|
bool &= unfold_references_helper(f, array_of_refs.dup, organization)
end
bool
when Hash
unfold_references_helper(entity[ref], array_of_refs, organization)
end
end
end | ruby | {
"resource": ""
} |
q21217 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.filter_connec_entity_for_id_refs | train | def filter_connec_entity_for_id_refs(connec_entity, id_references)
return {} if id_references.empty?
entity = connec_entity.dup.with_indifferent_access
tree = build_id_references_tree(id_references)
filter_connec_entity_for_id_refs_helper(entity, tree)
# TODO, improve performance by returning an empty hash if all the id_references have their id in the connec hash
# We should still return all of them if at least one is missing as we are relying on the id
entity
end | ruby | {
"resource": ""
} |
q21218 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.filter_connec_entity_for_id_refs_helper | train | def filter_connec_entity_for_id_refs_helper(entity_hash, tree)
return if tree.empty?
entity_hash.slice!(*tree.keys)
tree.each do |key, children|
case entity_hash[key]
when Array
entity_hash[key].each do |hash|
filter_connec_entity_for_id_refs_helper(hash, children)
end
when Hash
filter_connec_entity_for_id_refs_helper(entity_hash[key], children)
end
end
end | ruby | {
"resource": ""
} |
q21219 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.merge_id_hashes | train | def merge_id_hashes(dist, src, id_references)
dist = dist.with_indifferent_access
src = src.with_indifferent_access
id_references.each do |id_reference|
array_of_refs = id_reference.split('/')
merge_id_hashes_helper(dist, array_of_refs, src)
end
dist
end | ruby | {
"resource": ""
} |
q21220 | Maestrano::Connector::Rails::Concerns::ConnecHelper.ClassMethods.merge_id_hashes_helper | train | def merge_id_hashes_helper(hash, array_of_refs, src, path = [])
ref = array_of_refs.shift
field = hash[ref]
if array_of_refs.empty? && field
value = value_from_hash(src, path + [ref])
if value.is_a?(Array)
hash[ref] = (field + value).uniq
else
hash.delete(ref)
end
else
case field
when Array
field.each_with_index do |f, index|
merge_id_hashes_helper(f, array_of_refs.dup, src, path + [ref, index])
end
when Hash
merge_id_hashes_helper(field, array_of_refs, src, path + [ref])
end
end
end | ruby | {
"resource": ""
} |
q21221 | Stax.Ecs.taskdef_to_hash | train | def taskdef_to_hash(taskdef)
args = %i[family cpu memory requires_compatibilities task_role_arn execution_role_arn network_mode container_definitions volumes placement_constraints]
taskdef.to_hash.slice(*args)
end | ruby | {
"resource": ""
} |
q21222 | Stax.Ecs.ecs_deploy | train | def ecs_deploy(id, &block)
service = Aws::Ecs.services(ecs_cluster_name, [resource(id)]).first
taskdef = get_taskdef(service)
## convert to a hash and modify in block
hash = taskdef_to_hash(taskdef)
yield(hash) if block_given?
taskdef = register_taskdef(hash)
update_service(service, taskdef)
end | ruby | {
"resource": ""
} |
q21223 | Rpub.Compressor.store_file | train | def store_file(filename, content)
zip.put_next_entry filename, nil, nil, Zip::Entry::STORED, Zlib::NO_COMPRESSION
zip.write content.to_s
end | ruby | {
"resource": ""
} |
q21224 | Rpub.Compressor.compress_file | train | def compress_file(filename, content)
zip.put_next_entry filename, nil, nil, Zip::Entry::DEFLATED, Zlib::BEST_COMPRESSION
zip.write content.to_s
end | ruby | {
"resource": ""
} |
q21225 | PostgreSQLCursor.Cursor.pluck | train | def pluck(*cols)
options = cols.last.is_a?(Hash) ? cols.pop : {}
@options.merge!(options)
@options[:symbolize_keys] = true
self.iterate_type(options[:class]) if options[:class]
cols = cols.map {|c| c.to_sym }
result = []
self.each() do |row|
row = row.symbolize_keys if row.is_a?(Hash)
result << cols.map { |c| row[c] }
end
result.flatten! if cols.size == 1
result
end | ruby | {
"resource": ""
} |
q21226 | Reckon.App.weighted_account_match | train | def weighted_account_match( row )
query_tokens = tokenize(row[:description])
search_vector = []
account_vectors = {}
query_tokens.each do |token|
idf = Math.log((accounts.keys.length + 1) / ((tokens[token] || {}).keys.length.to_f + 1))
tf = 1.0 / query_tokens.length.to_f
search_vector << tf*idf
accounts.each do |account, total_terms|
tf = (tokens[token] && tokens[token][account]) ? tokens[token][account] / total_terms.to_f : 0
account_vectors[account] ||= []
account_vectors[account] << tf*idf
end
end
# Should I normalize the vectors? Probably unnecessary due to tf-idf and short documents.
account_vectors = account_vectors.to_a.map do |account, account_vector|
{ :cosine => (0...account_vector.length).to_a.inject(0) { |m, i| m + search_vector[i] * account_vector[i] },
:account => account }
end
account_vectors.sort! {|a, b| b[:cosine] <=> a[:cosine] }
# Return empty set if no accounts matched so that we can fallback to the defaults in the unattended mode
if options[:unattended]
if account_vectors.first && account_vectors.first[:account]
account_vectors = [] if account_vectors.first[:cosine] == 0
end
end
return account_vectors
end | ruby | {
"resource": ""
} |
q21227 | Danger.DangerTextlint.lint | train | def lint
return if target_files.empty?
bin = textlint_path
result_json = run_textlint(bin, target_files)
errors = parse(result_json)
send_comment(errors)
end | ruby | {
"resource": ""
} |
q21228 | Capistrano.Karafka.set_defaults | train | def set_defaults
set_if_empty :karafka_role, :karafka
set_if_empty :karafka_processes, 1
set_if_empty :karafka_consumer_groups, []
set_if_empty :karafka_default_hooks, -> { true }
set_if_empty :karafka_env, -> { fetch(:karafka_env, fetch(:environment)) }
set_if_empty :karafka_pid, -> { File.join(shared_path, 'tmp', 'pids', 'karafka.pid') }
end | ruby | {
"resource": ""
} |
q21229 | Weary.Resource.meets_requirements? | train | def meets_requirements?(params)
requirements.reject {|k| params.keys.map(&:to_s).include? k.to_s }.empty?
end | ruby | {
"resource": ""
} |
q21230 | Weary.Resource.request | train | def request(params={})
normalize_parameters params
raise UnmetRequirementsError, "Required parameters: #{requirements}" \
unless meets_requirements? params
credentials = pull_credentials params
pairs = pull_url_pairs params
request = construct_request expand_url(pairs), params, credentials
yield request if block_given?
request
end | ruby | {
"resource": ""
} |
q21231 | PGN.MoveCalculator.compute_origin | train | def compute_origin
return nil if move.castle
possibilities = case move.piece
when /[brq]/i then direction_origins
when /[kn]/i then move_origins
when /p/i then pawn_origins
end
if possibilities.length > 1
possibilities = disambiguate(possibilities)
end
self.board.position_for(possibilities.first)
end | ruby | {
"resource": ""
} |
q21232 | PGN.MoveCalculator.direction_origins | train | def direction_origins
directions = DIRECTIONS[move.piece.downcase]
possibilities = []
directions.each do |dir|
piece, square = first_piece(destination_coords, dir)
possibilities << square if piece == self.move.piece
end
possibilities
end | ruby | {
"resource": ""
} |
q21233 | PGN.MoveCalculator.move_origins | train | def move_origins(moves = nil)
moves ||= MOVES[move.piece.downcase]
possibilities = []
file, rank = destination_coords
moves.each do |i, j|
f = file + i
r = rank + j
if valid_square?(f, r) && self.board.at(f, r) == move.piece
possibilities << [f, r]
end
end
possibilities
end | ruby | {
"resource": ""
} |
q21234 | PGN.MoveCalculator.pawn_origins | train | def pawn_origins
_, rank = destination_coords
double_rank = (rank == 3 && self.move.white?) || (rank == 4 && self.move.black?)
pawn_moves = PAWN_MOVES[self.move.piece]
moves = self.move.capture ? pawn_moves[:capture] : pawn_moves[:normal]
moves += pawn_moves[:double] if double_rank
move_origins(moves)
end | ruby | {
"resource": ""
} |
q21235 | PGN.MoveCalculator.disambiguate_san | train | def disambiguate_san(possibilities)
move.disambiguation ?
possibilities.select {|p| self.board.position_for(p).match(move.disambiguation) } :
possibilities
end | ruby | {
"resource": ""
} |
q21236 | PGN.MoveCalculator.disambiguate_pawns | train | def disambiguate_pawns(possibilities)
self.move.piece.match(/p/i) && !self.move.capture ?
possibilities.reject {|p| self.board.position_for(p).match(/2|7/) } :
possibilities
end | ruby | {
"resource": ""
} |
q21237 | PGN.MoveCalculator.disambiguate_discovered_check | train | def disambiguate_discovered_check(possibilities)
DIRECTIONS.each do |attacking_piece, directions|
attacking_piece = attacking_piece.upcase if self.move.black?
directions.each do |dir|
piece, square = first_piece(king_position, dir)
next unless piece == self.move.piece && possibilities.include?(square)
piece, _ = first_piece(square, dir)
possibilities.reject! {|p| p == square } if piece == attacking_piece
end
end
possibilities
end | ruby | {
"resource": ""
} |
q21238 | PGN.MoveCalculator.en_passant_capture | train | def en_passant_capture
return nil if self.move.castle
if !self.board.at(self.move.destination) && self.move.capture
self.move.destination[0] + self.origin[1]
end
end | ruby | {
"resource": ""
} |
q21239 | Weary.Request.call | train | def call(environment)
app = adapter.new
middlewares = @middlewares || []
stack = Rack::Builder.new do
middlewares.each do |middleware|
klass, *args = middleware
use klass, *args[0...-1].flatten, &args.last
end
run app
end
stack.call rack_env_defaults.merge(environment.update(env))
end | ruby | {
"resource": ""
} |
q21240 | Weary.Request.perform | train | def perform
future do
status, headers, body = call(rack_env_defaults)
response = Weary::Response.new body, status, headers
yield response if block_given?
response
end
end | ruby | {
"resource": ""
} |
q21241 | Weary.Request.query_params_from_hash | train | def query_params_from_hash(value, prefix = nil)
case value
when Array
value.map { |v| query_params_from_hash(v, "#{prefix}%5B%5D") }.join("&")
when Hash
value.map { |k, v|
query_params_from_hash(v, prefix ? "#{prefix}%5B#{Rack::Utils.escape_path(k)}%5D" : Rack::Utils.escape_path(k))
}.join("&")
when NilClass
prefix
else
raise ArgumentError, "value must be a Hash" if prefix.nil?
"#{prefix}=#{Rack::Utils.escape_path(value)}"
end
end | ruby | {
"resource": ""
} |
q21242 | SAPOCI.Document.to_html | train | def to_html(options = {})
html = []
self.items.each do |item|
html << item.to_html(options)
end
html.join
end | ruby | {
"resource": ""
} |
q21243 | TicketEvolution.AffiliateCommissions.find_by_office_order | train | def find_by_office_order(office_id, order_link_id, params=nil)
request(:GET, "/#{office_id}/orders/#{order_link_id}", params) do |response|
singular_class.new(response.body.merge({
:status_code => response.response_code,
:server_message => response.server_message,
:connection => response.body[:connection]
}))
end
end | ruby | {
"resource": ""
} |
q21244 | PGN.Game.play | train | def play
index = 0
hist = Array.new(3, "")
loop do
puts "\e[H\e[2J"
puts self.positions[index].inspect
hist[0..2] = (hist[1..2] << STDIN.getch)
case hist.join
when LEFT
index -= 1 if index > 0
when RIGHT
index += 1 if index < self.moves.length
when EXIT
break
end
end
end | ruby | {
"resource": ""
} |
q21245 | GrabzIt.Client.url_to_animation | train | def url_to_animation(url, options = nil)
if options == nil
options = AnimationOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLGet + "takeanimation.ashx", false, options, url)
return nil
end | ruby | {
"resource": ""
} |
q21246 | GrabzIt.Client.url_to_image | train | def url_to_image(url, options = nil)
if options == nil
options = ImageOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLGet + TakePicture, false, options, url)
return nil
end | ruby | {
"resource": ""
} |
q21247 | GrabzIt.Client.html_to_image | train | def html_to_image(html, options = nil)
if options == nil
options = ImageOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLPost + TakePicture, true, options, html)
return nil
end | ruby | {
"resource": ""
} |
q21248 | GrabzIt.Client.url_to_table | train | def url_to_table(url, options = nil)
if options == nil
options = TableOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLGet + TakeTable, false, options, url)
return nil
end | ruby | {
"resource": ""
} |
q21249 | GrabzIt.Client.html_to_table | train | def html_to_table(html, options = nil)
if options == nil
options = TableOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLPost + TakeTable, true, options, html)
return nil
end | ruby | {
"resource": ""
} |
q21250 | GrabzIt.Client.url_to_pdf | train | def url_to_pdf(url, options = nil)
if options == nil
options = PDFOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLGet + TakePDF, false, options, url)
return nil
end | ruby | {
"resource": ""
} |
q21251 | GrabzIt.Client.html_to_pdf | train | def html_to_pdf(html, options = nil)
if options == nil
options = PDFOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLPost + TakePDF, true, options, html)
return nil
end | ruby | {
"resource": ""
} |
q21252 | GrabzIt.Client.url_to_docx | train | def url_to_docx(url, options = nil)
if options == nil
options = DOCXOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLGet + TakeDOCX, false, options, url)
return nil
end | ruby | {
"resource": ""
} |
q21253 | GrabzIt.Client.html_to_docx | train | def html_to_docx(html, options = nil)
if options == nil
options = DOCXOptions.new()
end
@request = Request.new(@protocol + WebServicesBaseURLPost + TakeDOCX, true, options, html)
return nil
end | ruby | {
"resource": ""
} |
q21254 | GrabzIt.Client.save | train | def save(callBackURL = nil)
if @request == nil
raise GrabzItException.new("No parameters have been set.", GrabzItException::PARAMETER_MISSING_PARAMETERS)
end
sig = encode(@request.options()._getSignatureString(GrabzIt::Utility.nil_check(@applicationSecret), callBackURL, @request.getTargetUrl()))
data = take(sig, callBackURL)
if data == nil || data == ""
data = take(sig, callBackURL)
end
if data == nil || data == ""
raise GrabzItException.new("An unknown network error occurred, please try calling this method again.", GrabzItException::NETWORK_GENERAL_ERROR)
end
return get_result_value(data, "ID")
end | ruby | {
"resource": ""
} |
q21255 | GrabzIt.Client.save_to | train | def save_to(saveToFile = nil)
id = save()
if id == nil || id == ""
return false
end
#Wait for it to be possibly ready
sleep((@request.options().startDelay() / 1000) + 3)
#Wait for it to be ready.
while true do
status = get_status(id)
if !status.cached && !status.processing
raise GrabzItException.new("The capture did not complete with the error: " + status.message, GrabzItException::RENDERING_ERROR)
break
elsif status.cached
result = get_result(id)
if !result
raise GrabzItException.new("The capture could not be found on GrabzIt.", GrabzItException::RENDERING_MISSING_SCREENSHOT)
break
end
if saveToFile == nil || saveToFile == ""
return result
end
screenshot = File.new(saveToFile, "wb")
screenshot.write(result)
screenshot.close
break
end
sleep(3)
end
return true
end | ruby | {
"resource": ""
} |
q21256 | GrabzIt.Client.get_status | train | def get_status(id)
if id == nil || id == ""
return nil
end
result = get(@protocol + WebServicesBaseURLGet + "getstatus.ashx?id=" + GrabzIt::Utility.nil_check(id))
doc = REXML::Document.new(result)
processing = doc.root.elements["Processing"].text()
cached = doc.root.elements["Cached"].text()
expired = doc.root.elements["Expired"].text()
message = doc.root.elements["Message"].text()
return ScreenShotStatus.new((processing == TrueString), (cached == TrueString), (expired == TrueString), message)
end | ruby | {
"resource": ""
} |
q21257 | GrabzIt.Client.get_result | train | def get_result(id)
if id == nil || id == ""
return nil
end
return get(@protocol + WebServicesBaseURLGet + "getfile.ashx?id=" + GrabzIt::Utility.nil_check(id))
end | ruby | {
"resource": ""
} |
q21258 | GrabzIt.Client.get_cookies | train | def get_cookies(domain)
sig = encode(GrabzIt::Utility.nil_check(@applicationSecret)+"|"+GrabzIt::Utility.nil_check(domain))
qs = "key="
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(@applicationKey)))
qs.concat("&domain=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(domain)))
qs.concat("&sig=")
qs.concat(sig)
result = get(@protocol + WebServicesBaseURLGet + "getcookies.ashx?" + qs)
doc = REXML::Document.new(result)
check_for_exception(doc)
cookies = Array.new
xml_cookies = doc.elements.to_a("//WebResult/Cookies/Cookie")
xml_cookies.each do |cookie|
expires = nil
if cookie.elements["Expires"] != nil
expires = cookie.elements["Expires"].text
end
grabzItCookie = GrabzIt::Cookie.new(cookie.elements["Name"].text, cookie.elements["Domain"].text, cookie.elements["Value"].text, cookie.elements["Path"].text, (cookie.elements["HttpOnly"].text == TrueString), expires, cookie.elements["Type"].text)
cookies << grabzItCookie
end
return cookies
end | ruby | {
"resource": ""
} |
q21259 | GrabzIt.Client.set_cookie | train | def set_cookie(name, domain, value = "", path = "/", httponly = false, expires = "")
sig = encode(GrabzIt::Utility.nil_check(@applicationSecret)+"|"+GrabzIt::Utility.nil_check(name)+"|"+GrabzIt::Utility.nil_check(domain)+
"|"+GrabzIt::Utility.nil_check(value)+"|"+GrabzIt::Utility.nil_check(path)+"|"+GrabzIt::Utility.b_to_str(httponly)+
"|"+GrabzIt::Utility.nil_check(expires)+"|0")
qs = "key="
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(@applicationKey)))
qs.concat("&domain=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(domain)))
qs.concat("&name=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(name)))
qs.concat("&value=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(value)))
qs.concat("&path=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(path)))
qs.concat("&httponly=")
qs.concat(GrabzIt::Utility.b_to_str(httponly))
qs.concat("&expires=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(expires)))
qs.concat("&sig=")
qs.concat(sig)
return (get_result_value(get(@protocol + WebServicesBaseURLGet + "setcookie.ashx?" + qs), "Result") == TrueString)
end | ruby | {
"resource": ""
} |
q21260 | GrabzIt.Client.delete_cookie | train | def delete_cookie(name, domain)
sig = encode(GrabzIt::Utility.nil_check(@applicationSecret)+"|"+GrabzIt::Utility.nil_check(name)+
"|"+GrabzIt::Utility.nil_check(domain)+"|1")
qs = "key="
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(@applicationKey)))
qs.concat("&domain=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(domain)))
qs.concat("&name=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(name)))
qs.concat("&delete=1&sig=")
qs.concat(sig)
return (get_result_value(get(@protocol + WebServicesBaseURLGet + "setcookie.ashx?" + qs), "Result") == TrueString)
end | ruby | {
"resource": ""
} |
q21261 | GrabzIt.Client.add_watermark | train | def add_watermark(identifier, path, xpos, ypos)
if !File.file?(path)
raise "File: " + path + " does not exist"
end
sig = encode(GrabzIt::Utility.nil_check(@applicationSecret)+"|"+GrabzIt::Utility.nil_check(identifier)+"|"+GrabzIt::Utility.nil_int_check(xpos)+
"|"+GrabzIt::Utility.nil_int_check(ypos))
boundary = '--------------------------'+Time.now.to_f.to_s
url = @protocol + "://grabz.it/services/addwatermark.ashx"
uri = URI.parse(url)
file = File.open(path, "rb")
data = file.read
post_body = Array.new
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"watermark\"; filename=\""+File.basename(path)+"\"\r\nContent-Type: image/jpeg\r\n\r\n"
post_body << data
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"key\"\r\n\r\n"
post_body << GrabzIt::Utility.nil_check(@applicationKey)
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"identifier\"\r\n\r\n"
post_body << GrabzIt::Utility.nil_check(identifier)
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"xpos\"\r\n\r\n"
post_body << GrabzIt::Utility.nil_check(xpos)
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"ypos\"\r\n\r\n"
post_body << GrabzIt::Utility.nil_check(ypos)
post_body << "\r\n--"+boundary+"\r\n"
post_body << "Content-Disposition: form-data; name=\"sig\"\r\n\r\n"
post_body << sig
post_body << "\r\n--"+boundary+"--\r\n"
request = Net::HTTP::Post.new(url)
request.content_type = "multipart/form-data, boundary="+boundary
request.body = post_body.join
caller = Net::HTTP.new(uri.host, uri.port)
caller.use_ssl = uri.scheme == 'https'
response = caller.start {|http| http.request(request)}
response_check(response)
return (get_result_value(response.body(), "Result") == TrueString)
end | ruby | {
"resource": ""
} |
q21262 | GrabzIt.Client.delete_watermark | train | def delete_watermark(identifier)
sig = encode(GrabzIt::Utility.nil_check(@applicationSecret)+"|"+GrabzIt::Utility.nil_check(identifier))
qs = "key="
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(@applicationKey)))
qs.concat("&identifier=")
qs.concat(CGI.escape(GrabzIt::Utility.nil_check(identifier)))
qs.concat("&sig=")
qs.concat(sig)
return (get_result_value(get(@protocol + WebServicesBaseURLGet + "deletewatermark.ashx?" + qs), "Result") == TrueString)
end | ruby | {
"resource": ""
} |
q21263 | GrabzIt.Client.set_local_proxy | train | def set_local_proxy(value)
if value
uri = URI.parse(value)
@proxy = Proxy.new(uri.host, uri.port, uri.user, uri.password)
else
@proxy = Proxy.new()
end
end | ruby | {
"resource": ""
} |
q21264 | GrabzIt.Client.decrypt_file | train | def decrypt_file(path, key)
data = read_file(path)
decryptedFile = File.new(path, "wb")
decryptedFile.write(decrypt(data, key))
decryptedFile.close
end | ruby | {
"resource": ""
} |
q21265 | GrabzIt.Client.decrypt | train | def decrypt(data, key)
if data == nil
return nil
end
iv = data[0..15]
payload = data[16..-1]
cipher = OpenSSL::Cipher::Cipher.new("aes-256-cbc")
cipher.padding = 0
cipher.key = Base64.strict_decode64(key);
cipher.iv = iv
decrypted = cipher.update(payload);
decrypted << cipher.final();
return decrypted
end | ruby | {
"resource": ""
} |
q21266 | ApiHammer.HaltMethods.halt_error | train | def halt_error(status, errors, options = {})
errors_as_json = errors.respond_to?(:as_json) ? errors.as_json : errors
unless errors_as_json.is_a?(Hash)
raise ArgumentError, "errors be an object representable in JSON as a Hash; got errors = #{errors.inspect}"
end
unless errors_as_json.keys.all? { |k| k.is_a?(String) || k.is_a?(Symbol) }
raise ArgumentError, "errors keys must all be string or symbol; got errors = #{errors.inspect}"
end
unless errors_as_json.values.all? { |v| v.is_a?(Array) && v.all? { |e| e.is_a?(String) } }
raise ArgumentError, "errors values must all be arrays of strings; got errors = #{errors.inspect}"
end
error_message = nil
halt_options = options.reject do |k,v|
(k.to_s == 'error_message').tap do |is_error_message|
if is_error_message
error_message = v
end
end
end
body = {'errors' => errors}
error_message ||= begin
error_values = errors.values.inject([], &:+)
if error_values.size <= 1
error_values.first
else
# sentencify with periods
error_values.map { |v| v =~ /\.\s*\z/ ? v : v + '.' }.join(' ')
end
end
body['error_message'] = error_message if error_message
if Object.const_defined?(:Rollbar) and status != 404 and Object.const_defined?(:DEBUG_4XX) and DEBUG_4XX['enabled']
Rollbar.debug "Service halted with status #{status}", status: status, body: body, halt_options: halt_options
end
halt(status, body, halt_options)
end | ruby | {
"resource": ""
} |
q21267 | ApiHammer.Sinatra.format_response | train | def format_response(status, body_object, headers={})
if status == 204
body = ''
else
body = case response_media_type
when 'application/json'
JSON.pretty_generate(body_object)
when 'application/x-www-form-urlencoded'
URI.encode_www_form(body_object)
when 'application/xml'
body_object.to_s
when 'text/plain'
body_object
else
# :nocov:
raise NotImplementedError, "unsupported response media type #{response_media_type}"
# :nocov:
end
end
[status, headers.merge({'Content-Type' => response_media_type}), [body]]
end | ruby | {
"resource": ""
} |
q21268 | ApiHammer.Sinatra.request_body | train | def request_body
# rewind in case anything in the past has left this un-rewound
request.body.rewind
request.body.read.tap do
# rewind in case anything in the future expects this to have been left rewound
request.body.rewind
end
end | ruby | {
"resource": ""
} |
q21269 | ApiHammer.Sinatra.parsed_body | train | def parsed_body
request_media_type = request.media_type
unless request_media_type =~ /\S/
fallback = true
request_media_type = supported_media_types.first
end
case request_media_type
when 'application/json'
begin
return JSON.parse(request_body)
rescue JSON::ParserError
if fallback
t_key = 'app.errors.request.body_parse_fallback_json'
default = "Error encountered attempting to parse the request body. No Content-Type was specified and parsing as JSON failed. Supported media types are %{supported_media_types}. JSON parser error: %{error_class}: %{error_message}"
else
t_key = 'app.errors.request.body_parse_indicated_json'
default = "Error encountered attempting to parse the JSON request body: %{error_class}: %{error_message}"
end
message = I18n.t(t_key,
:default => default,
:error_class => $!.class,
:error_message => $!.message,
:supported_media_types => supported_media_types.join(', ')
)
errors = {'json' => [message]}
halt_error(400, errors)
end
else
if supported_media_types.include?(request_media_type)
# :nocov:
raise NotImplementedError, "handling request body with media type #{request_media_type} not implemented"
# :nocov:
end
logger.error "received Content-Type of #{request.content_type.inspect}; halting with 415"
message = I18n.t('app.errors.request.content_type',
:default => "Unsupported Content-Type of %{content_type} given for the request body. Supported media types are %{supported_media_types}",
:content_type => request.content_type,
:supported_media_types => supported_media_types.join(', ')
)
errors = {'Content-Type' => [message]}
halt_error(415, errors)
end
end | ruby | {
"resource": ""
} |
q21270 | ApiHammer.Sinatra.check_params_and_object_consistent | train | def check_params_and_object_consistent(path_params, object)
errors = {}
path_params.each do |(k, v)|
if object.key?(k) && object[k] != v
errors[k] = [I18n.t('app.errors.inconsistent_uri_and_entity',
:key => k,
:uri_value => v,
:entity_value => object[k],
:default => "Inconsistent data given in the request URI and request entity: %{key} was specified as %{uri_value} in the URI but %{entity_value} in the entity",
)]
end
end
if errors.any?
halt_error(422, errors)
end
end | ruby | {
"resource": ""
} |
q21271 | ApiHammer.Body.object | train | def object
instance_variable_defined?(:@object) ? @object : @object = begin
if media_type == 'application/json'
JSON.parse(body) rescue nil
elsif media_type == 'application/x-www-form-urlencoded'
CGI.parse(body).map { |k, vs| {k => vs.last} }.inject({}, &:update)
end
end
end | ruby | {
"resource": ""
} |
q21272 | ApiHammer.Body.jsonifiable | train | def jsonifiable
@jsonifiable ||= Body.new(catch(:jsonifiable) do
original_body = self.body
unless original_body.is_a?(String)
begin
# if the response body is not a string, but JSON doesn't complain
# about dumping whatever it is, go ahead and use it
JSON.generate([original_body])
throw :jsonifiable, original_body
rescue
# otherwise return nil - don't know what to do with whatever this object is
throw :jsonifiable, nil
end
end
# first try to change the string's encoding per the Content-Type header
body = original_body.dup
unless body.valid_encoding?
# I think this always comes in as ASCII-8BIT anyway so may never get here. hopefully.
body.force_encoding('ASCII-8BIT')
end
content_type_attrs = ContentTypeAttrs.new(content_type)
if content_type_attrs.parsed?
charset = content_type_attrs['charset'].first
if charset && Encoding.list.any? { |enc| enc.to_s.downcase == charset.downcase }
if body.dup.force_encoding(charset).valid_encoding?
body.force_encoding(charset)
else
# I guess just ignore the specified encoding if the result is not valid. fall back to
# something else below.
end
end
end
begin
JSON.generate([body])
rescue Encoding::UndefinedConversionError
# if updating by content-type didn't do it, try UTF8 since JSON wants that - but only
# if it seems to be valid utf8.
# don't try utf8 if the response content-type indicated something else.
try_utf8 = !(content_type_attrs && content_type_attrs.parsed? && content_type_attrs['charset'].any? { |cs| !['utf8', ''].include?(cs.downcase) })
if try_utf8 && body.dup.force_encoding('UTF-8').valid_encoding?
body.force_encoding('UTF-8')
else
# I'm not sure if there is a way in this situation to get JSON gem to generate the
# string correctly. fall back to an array of codepoints I guess? this is a weird
# solution but the best I've got for now.
body = body.codepoints.to_a
end
end
body
end, content_type)
end | ruby | {
"resource": ""
} |
q21273 | ApiHammer.FaradayCurlVOutputter.alter_body_by_content_type | train | def alter_body_by_content_type(body, content_type)
return body unless body.is_a?(String)
content_type_attrs = ApiHammer::ContentTypeAttrs.new(content_type)
if @options[:text].nil? ? content_type_attrs.text? : @options[:text]
if pretty?
case content_type_attrs.media_type
when 'application/json'
require 'json'
begin
body = JSON.pretty_generate(JSON.parse(body))
rescue JSON::ParserError
end
end
end
if color?
coderay_scanner = CodeRayForMediaTypes.reject{|k,v| !v.any?{|type| type === content_type_attrs.media_type} }.keys.first
if coderay_scanner
require 'coderay'
body = CodeRay.scan(body, coderay_scanner).encode(:terminal)
end
end
else
body = omitted_body("[[omitted binary body (size = #{body.size})]]")
end
body
end | ruby | {
"resource": ""
} |
q21274 | ActiveList.Generator.select_data_code | train | def select_data_code(options = {})
paginate = (options.key?(:paginate) ? options[:paginate] : @table.paginate?)
# Check order
unless @table.options.keys.include?(:order)
columns = @table.table_columns
@table.options[:order] = (columns.any? ? columns.first.name.to_sym : { id: :desc })
end
class_name = @table.model.name
class_name = "(controller_name != '#{class_name.tableize}' ? controller_name.to_s.classify.constantize : #{class_name})" if collection?
# Find data
query_code = class_name.to_s
query_code << scope_code if scope_code
query_code << ".select(#{select_code})" if select_code
query_code << ".from(#{from_code})" if from_code
query_code << ".where(#{conditions_code})" unless @table.options[:conditions].blank?
query_code << ".joins(#{@table.options[:joins].inspect})" unless @table.options[:joins].blank?
unless includes_reflections.empty?
expr = includes_reflections.inspect[1..-2]
query_code << ".includes(#{expr})"
query_code << ".references(#{expr})"
end
code = ''
code << "#{query_code}\n"
code << if @table.options[:count].present?
"#{var_name(:count)} = #{query_code}.count(#{@table.options[:count].inspect})\n"
else
"#{var_name(:count)} = #{query_code}.count\n"
end
query_code << ".group(#{@table.options[:group].inspect})" unless @table.options[:group].blank?
query_code << ".reorder(#{var_name(:order)})"
if paginate
code << "#{var_name(:limit)} = (#{var_name(:params)}[:per_page] || 25).to_i\n"
code << "if params[:page]\n"
code << " #{var_name(:page)} = (#{var_name(:params)}[:page] || 1).to_i\n"
code << "elsif params['#{table.name}-id'] and #{var_name(:index)} = #{query_code}.pluck(:id).index(params['#{table.name}-id'].to_i)\n"
# Find page of request element
code << " #{var_name(:page)} = (#{var_name(:index)}.to_f / #{var_name(:limit)}).floor + 1\n"
code << "else\n"
code << " #{var_name(:page)} = 1\n"
code << "end\n"
code << "#{var_name(:page)} = 1 if #{var_name(:page)} < 1\n"
code << "#{var_name(:offset)} = (#{var_name(:page)} - 1) * #{var_name(:limit)}\n"
code << "#{var_name(:last)} = (#{var_name(:count)}.to_f / #{var_name(:limit)}).ceil.to_i\n"
code << "#{var_name(:last)} = 1 if #{var_name(:last)} < 1\n"
code << "return #{view_method_name}(options.merge(page: 1)) if 1 > #{var_name(:page)}\n"
code << "return #{view_method_name}(options.merge(page: #{var_name(:last)})) if #{var_name(:page)} > #{var_name(:last)}\n"
query_code << ".offset(#{var_name(:offset)})"
query_code << ".limit(#{var_name(:limit)})"
end
code << "#{records_variable_name} = #{query_code} || {}\n"
code
end | ruby | {
"resource": ""
} |
q21275 | ActiveList.Generator.includes_reflections | train | def includes_reflections
hash = []
@table.columns.each do |column|
hash << column.reflection.name if column.respond_to?(:reflection)
end
hash
end | ruby | {
"resource": ""
} |
q21276 | ActiveList.Generator.conditions_code | train | def conditions_code
conditions = @table.options[:conditions]
code = ''
case conditions
when Array
case conditions[0]
when String # SQL
code << '[' + conditions.first.inspect
code << conditions[1..-1].collect { |p| ', ' + sanitize_condition(p) }.join if conditions.size > 1
code << ']'
when Symbol # Method
raise 'What?' # Amazingly explicit.
# code << conditions.first.to_s + '('
# code << conditions[1..-1].collect { |p| sanitize_condition(p) }.join(', ') if conditions.size > 1
# code << ')'
else
raise ArgumentError, 'First element of an Array can only be String or Symbol.'
end
when Hash # SQL
code << '{' + conditions.collect { |key, value| key.to_s + ': ' + sanitize_condition(value) }.join(',') + '}'
when Symbol # Method
code << conditions.to_s + '(options)'
when CodeString
code << '(' + conditions.gsub(/\s*\n\s*/, ';') + ')'
when String
code << conditions.inspect
else
raise ArgumentError, "Unsupported type for conditions: #{conditions.inspect}"
end
code
end | ruby | {
"resource": ""
} |
q21277 | RConfig.Utils.default_load_paths | train | def default_load_paths
paths = []
# Check for Rails config path
paths << "#{::Rails.root}/config" if rails?
# Check for defined constants
paths << CONFIG_ROOT if defined?(CONFIG_ROOT) && Dir.exists?(CONFIG_ROOT)
paths << CONFIG_PATH if defined?(CONFIG_PATH) && Dir.exists?(CONFIG_PATH)
# Check for config directory in app root
config_dir = File.join(app_root, 'config')
paths << config_dir if Dir.exists?(config_dir)
paths
end | ruby | {
"resource": ""
} |
q21278 | RConfig.Utils.read | train | def read(file, name, ext)
contents = File.read(file) # Read the contents from the file.
contents = ERB.new(contents).result # Evaluate any ruby code using ERB.
parse(contents, name, ext) # Parse the contents based on the file type
end | ruby | {
"resource": ""
} |
q21279 | RConfig.Utils.parse | train | def parse(contents, name, ext)
hash = case ext
when *YML_FILE_TYPES
YAML::load(contents)
when *XML_FILE_TYPES
parse_xml(contents, name)
when *CNF_FILE_TYPES
RConfig::PropertiesFile.parse(contents)
else
raise ConfigError, "Unknown File type: #{ext}"
end
hash.freeze
end | ruby | {
"resource": ""
} |
q21280 | RConfig.Utils.parse_xml | train | def parse_xml(contents, name)
hash = Hash.from_xml(contents)
hash = hash[name] if hash.size == 1 && hash.key?(name) # xml document could have root tag matching the file name.
RConfig::PropertiesFile.parse_references(hash)
end | ruby | {
"resource": ""
} |
q21281 | RConfig.Utils.merge_hashes | train | def merge_hashes(hashes)
hashes.inject({}) { |n, h| n.weave(h, true) }
end | ruby | {
"resource": ""
} |
q21282 | RConfig.Utils.make_indifferent | train | def make_indifferent(hash)
case hash
when Hash
unless hash.frozen?
hash.each do |k, v|
hash[k] = make_indifferent(v)
end
hash = RConfig::Config.new.merge!(hash).freeze
end
logger.debug "make_indefferent: x = #{hash.inspect}:#{hash.class}"
when Array
unless hash.frozen?
hash.collect! do |v|
make_indifferent(v)
end
hash.freeze
end
# Freeze Strings.
when String
hash.freeze
end
hash
end | ruby | {
"resource": ""
} |
q21283 | RConfig.Utils.flush_cache | train | def flush_cache(name=nil)
if name
name = name.to_s
self.cache_hash[name] &&= nil
else
logger.warn "RConfig: Flushing config data cache."
self.suffixes = {}
self.cache = {}
self.cache_files = {}
self.cache_hash = {}
self.last_auto_check = {}
self
end
end | ruby | {
"resource": ""
} |
q21284 | RConfig.Cascade.overlay= | train | def overlay=(value)
reload(false) if self.overlay != value
self.overlay = value && value.dup.freeze
end | ruby | {
"resource": ""
} |
q21285 | RConfig.Cascade.suffixes_for | train | def suffixes_for(name)
name = name.to_s
self.suffixes[name] ||= begin
ol = overlay
name_x = name.dup
if name_x.sub!(/_([A-Z]+)$/, '')
ol = $1
end
name_x.freeze
result = if ol
ol_ = ol.upcase
ol = ol.downcase
x = []
SUFFIXES.each do |suffix|
# Standard, no overlay:
# e.g.: database_<suffix>.yml
x << suffix
# Overlay:
# e.g.: database_(US|GB)_<suffix>.yml
x << [ol_, suffix]
end
[name_x, x.freeze]
else
[name.dup.freeze, SUFFIXES.freeze]
end
result.freeze
logger.debug "suffixes(#{name}) => #{result.inspect}"
result
end
end | ruby | {
"resource": ""
} |
q21286 | RConfig.CoreMethods.load_config_files | train | def load_config_files(name, force=false)
name = name.to_s
# Return last config file hash list loaded,
# if reload is disabled and files have already been loaded.
return self.cache_config_files[name] if self.reload_disabled? && self.cache_config_files[name]
logger.info "Loading config files for: #{name}"
logger.debug "load_config_files(#{name.inspect})"
# Get current time for checking last loaded status.
now = Time.now
# Get array of all the existing files file the config name.
config_files = self.get_config_files(name)
# Get all the data from all yaml files into as configs
configs = config_files.collect do |f|
name, name_with_suffix, filename, ext, modified_time = * f
# Get the cached file info the specific file, if
# it's been loaded before.
config_data, last_modified, last_loaded = self.cache[filename]
logger.debug "f = #{f.inspect}\n" +
"cache #{name_with_suffix} filename = #{filename.inspect}\n" +
"cache #{name_with_suffix} config_data = #{config_data.inspect}\n" +
"cache #{name_with_suffix} last_modified = #{last_modified.inspect}\n" +
"cache #{name_with_suffix} last_loaded = #{last_loaded.inspect}\n"
# Load the file if its never been loaded or its been more than
# so many minutes since last load attempt. (default: 5 minutes)
if config_data.blank? || (now - last_loaded > self.reload_interval)
if force || config_data.blank? || modified_time != last_modified
logger.debug "modified_time #{name.inspect} #{filename.inspect} " +
"changed #{modified_time != last_modified} : #{modified_time.inspect} #{last_modified.inspect}"
logger.debug "RConfig: loading #{filename.inspect}"
config_data = read(filename, name, ext) # Get contents from config file
logger.debug "RConfig: loaded #{filename.inspect} => #{config_data.inspect}"
(self.config_loaded ||= {})[name] = config_files # add files to the loaded files cache
self.cache[filename] = [config_data, modified_time, now] # Save cached config file contents, and modified_time.
logger.debug "cache[#{filename.inspect}] = #{self.cache[filename].inspect}"
self.cache_hash[name] = nil # Flush merged hash cache.
self.cache_files[name] = config_files # Config files changed or disappeared.
end # if config_data == nil || (now - last_loaded > self.reload_interval)
end # if force || config_data == nil || modified_time != last_modified
config_data
end # config_files.collect
configs.compact!
logger.debug "load_config_files(#{name.inspect}) => #{configs.inspect}"
# Keep last loaded config files around in case self.reload_dsabled.
self.cache_config_files[name] = configs #unless configs.empty?
configs
end | ruby | {
"resource": ""
} |
q21287 | RConfig.CoreMethods.config_changed? | train | def config_changed?(name)
logger.debug "config_changed?(#{name.inspect})"
name = name.to_s
!(self.cache_files[name] === get_config_files(name))
end | ruby | {
"resource": ""
} |
q21288 | RConfig.CoreMethods.get_config_data | train | def get_config_data(name)
logger.debug "get_config_data(#{name.inspect})"
name = name.to_s
unless result = self.cache_hash[name]
result = self.cache_hash[name] =
make_indifferent(
merge_hashes(
load_config_files(name)
)
)
logger.debug "get_config_data(#{name.inspect}): reloaded"
end
result
end | ruby | {
"resource": ""
} |
q21289 | RConfig.CoreMethods.check_for_changes | train | def check_for_changes(name=nil)
changed = []
if name == nil
self.cache_hash.keys.dup.each do |name|
if reload_on_change(name)
changed << name
end
end
else
name = name.to_s
if reload_on_change(name)
changed << name
end
end
logger.debug "check_for_changes(#{name.inspect}) => #{changed.inspect}"
changed
end | ruby | {
"resource": ""
} |
q21290 | RConfig.CoreMethods.reload_on_change | train | def reload_on_change(name)
logger.debug "reload_on_change(#{name.inspect}), reload_disabled=#{self.reload_disabled?}"
if changed = config_changed?(name) && reload?
if self.cache_hash[name]
flush_cache(name) # flush cached config values.
fire_on_load(name) # force on_load triggers.
end
end
changed
end | ruby | {
"resource": ""
} |
q21291 | RConfig.CoreMethods.with_file | train | def with_file(name, *args)
logger.debug "with_file(#{name.inspect}, #{args.inspect})"
result = args.inject(config_for(name)) { |v, i|
logger.debug "v = #{v.inspect}, i = #{i.inspect}"
case v
when Hash
v[i.to_s]
when Array
i.is_a?(Integer) ? v[i] : nil
else
nil
end
}
logger.debug "with_file(#{name.inspect}, #{args.inspect}) => #{result.inspect}"
result
end | ruby | {
"resource": ""
} |
q21292 | RConfig.CoreMethods.config_for | train | def config_for(name)
name = name.to_s
check_for_changes(name) if auto_check?(name)
data = get_config_data(name)
logger.debug "config_for(#{name.inspect}) => #{data.inspect}"
data
end | ruby | {
"resource": ""
} |
q21293 | RConfig.CoreMethods.method_missing | train | def method_missing(method, * args)
value = with_file(method, * args)
logger.debug "#{self}.method_missing(#{method.inspect}, #{args.inspect}) => #{value.inspect}"
value
end | ruby | {
"resource": ""
} |
q21294 | SimplerTiles.VectorLayer.query | train | def query(sql, &blk)
layer = SimplerTiles::Query.new(sql, &blk)
add_query layer
end | ruby | {
"resource": ""
} |
q21295 | SimplerTiles.Map.layer | train | def layer(source, &blk)
layer = SimplerTiles::VectorLayer.new(source, &blk)
add_vector_layer layer
end | ruby | {
"resource": ""
} |
q21296 | SimplerTiles.Map.raster_layer | train | def raster_layer(source, &blk)
layer = SimplerTiles::RasterLayer.new(source, &blk)
add_raster_layer layer
end | ruby | {
"resource": ""
} |
q21297 | SimplerTiles.Map.ar_layer | train | def ar_layer(&blk)
if !defined?(ActiveRecord)
raise "ActiveRecord not available"
end
config = ActiveRecord::Base.connection.instance_variable_get("@config")
params = {
:dbname => config[:database],
:user => config[:username],
:host => config[:host],
:port => config[:port],
:password => config[:password]
}
conn = "PG:" + params.reject {|k,v| v.nil? }.map {|k,v| "#{k}=#{v}"}.join(' ')
layer conn, &blk
end | ruby | {
"resource": ""
} |
q21298 | SimplerTiles.Map.to_png | train | def to_png
data = ""
to_png_stream Proc.new { |chunk| data += chunk }
yield data if block_given?
data
end | ruby | {
"resource": ""
} |
q21299 | SimplerTiles.Query.styles | train | def styles(styles)
styles.each do |k,v|
style = SimplerTiles::Style.new k, v
add_style style
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.