_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q16900 | Logging.ColorScheme.[]= | train | def []=( color_tag, constants )
@scheme[to_key(color_tag)] = constants.respond_to?(:map) ?
constants.map { |c| to_constant(c) }.join : to_constant(constants)
end | ruby | {
"resource": ""
} |
q16901 | Logging.ColorScheme.to_constant | train | def to_constant( v )
v = v.to_s.upcase
ColorScheme.const_get(v) if (ColorScheme.const_defined?(v, false) rescue ColorScheme.const_defined?(v))
end | ruby | {
"resource": ""
} |
q16902 | Logging.Appender.encoding= | train | def encoding=( value )
if value.nil?
@encoding = nil
else
@encoding = Object.const_defined?(:Encoding) ? Encoding.find(value.to_s) : nil
end
end | ruby | {
"resource": ""
} |
q16903 | Logging.Appender.allow | train | def allow( event )
return nil if @level > event.level
@filters.each do |filter|
break unless event = filter.allow(event)
end
event
end | ruby | {
"resource": ""
} |
q16904 | Logging.MappedDiagnosticContext.context | train | def context
c = Thread.current.thread_variable_get(NAME)
if c.nil?
c = if Thread.current.thread_variable_get(STACK_NAME)
flatten(stack)
else
Hash.new
end
Thread.current.thread_variable_set(NAME, c)
end
return c
end | ruby | {
"resource": ""
} |
q16905 | Logging.MappedDiagnosticContext.stack | train | def stack
s = Thread.current.thread_variable_get(STACK_NAME)
if s.nil?
s = [{}]
Thread.current.thread_variable_set(STACK_NAME, s)
end
return s
end | ruby | {
"resource": ""
} |
q16906 | Logging.MappedDiagnosticContext.sanitize | train | def sanitize( hash, target = {} )
unless hash.is_a?(Hash)
raise ArgumentError, "Expecting a Hash but received a #{hash.class.name}"
end
hash.each { |k,v| target[k.to_s] = v }
return target
end | ruby | {
"resource": ""
} |
q16907 | Logging.NestedDiagnosticContext.context | train | def context
c = Thread.current.thread_variable_get(NAME)
if c.nil?
c = Array.new
Thread.current.thread_variable_set(NAME, c)
end
return c
end | ruby | {
"resource": ""
} |
q16908 | Logging::Layouts.Parseable.iso8601_format | train | def iso8601_format( time )
value = apply_utc_offset(time)
str = value.strftime('%Y-%m-%dT%H:%M:%S')
str << ('.%06d' % value.usec)
offset = value.gmt_offset.abs
return str << 'Z' if offset == 0
offset = sprintf('%02d:%02d', offset / 3600, offset % 3600 / 60)
return str << (value.gmt_offset < 0 ? '-' : '+') << offset
end | ruby | {
"resource": ""
} |
q16909 | CfnDsl.JSONable.as_json | train | def as_json(_options = {})
hash = {}
instance_variables.each do |var|
name = var[1..-1]
if name =~ /^__/
# if a variable starts with double underscore, strip one off
name = name[1..-1]
elsif name =~ /^_/
# Hide variables that start with single underscore
name = nil
end
hash[name] = instance_variable_get(var) if name
end
hash
end | ruby | {
"resource": ""
} |
q16910 | OpenTok.Broadcasts.find | train | def find(broadcast_id)
raise ArgumentError, "broadcast_id not provided" if broadcast_id.to_s.empty?
broadcast_json = @client.get_broadcast(broadcast_id.to_s)
Broadcast.new self, broadcast_json
end | ruby | {
"resource": ""
} |
q16911 | OpenTok.Broadcasts.stop | train | def stop(broadcast_id)
raise ArgumentError, "broadcast_id not provided" if broadcast_id.to_s.empty?
broadcast_json = @client.stop_broadcast(broadcast_id)
Broadcast.new self, broadcast_json
end | ruby | {
"resource": ""
} |
q16912 | OpenTok.OpenTok.create_session | train | def create_session(opts={})
# normalize opts so all keys are symbols and only include valid_opts
valid_opts = [ :media_mode, :location, :archive_mode ]
opts = opts.inject({}) do |m,(k,v)|
if valid_opts.include? k.to_sym
m[k.to_sym] = v
end
m
end
# keep opts around for Session constructor, build REST params
params = opts.clone
# anything other than :relayed sets the REST param to "disabled", in which case we force
# opts to be :routed. if we were more strict we could raise an error when the value isn't
# either :relayed or :routed
if params.delete(:media_mode) == :routed
params["p2p.preference"] = "disabled"
else
params["p2p.preference"] = "enabled"
opts[:media_mode] = :relayed
end
# location is optional, but it has to be an IP address if specified at all
unless params[:location].nil?
raise "location must be an IPv4 address" unless params[:location] =~ Resolv::IPv4::Regex
end
# archive mode is optional, but it has to be one of the valid values if present
unless params[:archive_mode].nil?
raise "archive mode must be either always or manual" unless ARCHIVE_MODES.include? params[:archive_mode].to_sym
end
raise "A session with always archive mode must also have the routed media mode." if (params[:archive_mode] == :always && params[:media_mode] == :relayed)
response = client.create_session(params)
Session.new api_key, api_secret, response['sessions']['Session']['session_id'], opts
end | ruby | {
"resource": ""
} |
q16913 | OpenTok.Streams.all | train | def all(session_id)
raise ArgumentError, 'session_id not provided' if session_id.to_s.empty?
response_json = @client.info_stream(session_id, '')
StreamList.new response_json
end | ruby | {
"resource": ""
} |
q16914 | OpenTok.Archives.find | train | def find(archive_id)
raise ArgumentError, "archive_id not provided" if archive_id.to_s.empty?
archive_json = @client.get_archive(archive_id.to_s)
Archive.new self, archive_json
end | ruby | {
"resource": ""
} |
q16915 | OpenTok.Archives.all | train | def all(options = {})
raise ArgumentError, "Limit is invalid" unless options[:count].nil? or (0..1000).include? options[:count]
archive_list_json = @client.list_archives(options[:offset], options[:count], options[:sessionId])
ArchiveList.new self, archive_list_json
end | ruby | {
"resource": ""
} |
q16916 | OpenTok.Archives.stop_by_id | train | def stop_by_id(archive_id)
raise ArgumentError, "archive_id not provided" if archive_id.to_s.empty?
archive_json = @client.stop_archive(archive_id)
Archive.new self, archive_json
end | ruby | {
"resource": ""
} |
q16917 | OpenTok.Archives.delete_by_id | train | def delete_by_id(archive_id)
raise ArgumentError, "archive_id not provided" if archive_id.to_s.empty?
response = @client.delete_archive(archive_id)
(200..300).include? response.code
end | ruby | {
"resource": ""
} |
q16918 | EmailSpec.Helpers.find_email | train | def find_email(address, opts={})
address = convert_address(address)
if opts[:with_subject]
expected_subject = (opts[:with_subject].is_a?(String) ? Regexp.escape(opts[:with_subject]) : opts[:with_subject])
mailbox_for(address).find { |m| m.subject =~ Regexp.new(expected_subject) }
elsif opts[:with_text]
expected_text = (opts[:with_text].is_a?(String) ? Regexp.escape(opts[:with_text]) : opts[:with_text])
mailbox_for(address).find { |m| m.default_part_body =~ Regexp.new(expected_text) }
elsif opts[:from]
mailbox_for(address).find { |m| m.from.include? opts[:from] }
else
mailbox_for(address).first
end
end | ruby | {
"resource": ""
} |
q16919 | Ferro.Router.path_to_parts | train | def path_to_parts(path)
path.
downcase.
split('/').
map { |part| part.empty? ? nil : part.strip }.
compact
end | ruby | {
"resource": ""
} |
q16920 | Ferro.Router.navigated | train | def navigated
url = get_location
@params = []
idx = match(path_to_parts(decode(url.pathname)), decode(url.search))
if idx
@routes[idx][:callback].call(@params)
else
@page404.call(url.pathname)
end
end | ruby | {
"resource": ""
} |
q16921 | Ferro.Router.match | train | def match(path, search)
matches = get_matches(path)
if matches.length > 0
match = matches.sort { |m| m[1] }.first
@params = match[2]
add_search_to_params(search)
match[0]
else
nil
end
end | ruby | {
"resource": ""
} |
q16922 | Ferro.Router.get_matches | train | def get_matches(path)
matches = []
@routes.each_with_index do |route, i|
score, pars = score_route(route[:parts], path)
matches << [i, score, pars] if score > 0
end
matches
end | ruby | {
"resource": ""
} |
q16923 | Ferro.Router.score_route | train | def score_route(parts, path)
score = 0
pars = {}
if parts.length == path.length
parts.each_with_index do |part, i|
if part[0] == ':'
score += 1
pars["#{part[1..-1]}"] = path[i]
elsif part == path[i].downcase
score += 2
end
end
end
return score, pars
end | ruby | {
"resource": ""
} |
q16924 | Ferro.Router.add_search_to_params | train | def add_search_to_params(search)
if !search.empty?
pars = search[1..-1].split('&')
pars.each do |par|
pair = par.split('=')
@params[ pair[0] ] = pair[1] if pair.length == 2
end
end
end | ruby | {
"resource": ""
} |
q16925 | Ferro.Factory.dasherize | train | def dasherize(class_name)
return class_name if class_name !~ /[A-Z:_]/
c = class_name.to_s.gsub('::', '')
(c[0] + c[1..-1].gsub(/[A-Z]/){ |c| "-#{c}" }).
downcase.
gsub('_', '-')
end | ruby | {
"resource": ""
} |
q16926 | Ferro.Factory.composite_state | train | def composite_state(class_name, state)
if @compositor
list = @compositor.css_classes_for("#{class_name}::#{state}")
return list if !list.empty?
end
[ dasherize(state) ]
end | ruby | {
"resource": ""
} |
q16927 | Ferro.Factory.composite_classes | train | def composite_classes(target, element, add_superclass)
if @compositor
composite_for(target.class.name, element)
if add_superclass
composite_for(target.class.superclass.name, element)
end
end
end | ruby | {
"resource": ""
} |
q16928 | Ferro.Compositor.css_classes_for_map | train | def css_classes_for_map(classname, mapping)
css = mapping[classname]
css.class == String ? css_classes_for_map(css, mapping) : (css || [])
end | ruby | {
"resource": ""
} |
q16929 | Ferro.Compositor.switch_theme | train | def switch_theme(root_element, theme)
old_map = @mapping
new_map = map(theme)
root_element.each_child do |e|
old_classes = css_classes_for_map e.class.name, old_map
new_classes = css_classes_for_map e.class.name, new_map
update_element_css_classes(e, old_classes, new_classes)
old_classes = css_classes_for_map e.class.superclass.name, old_map
new_classes = css_classes_for_map e.class.superclass.name, new_map
update_element_css_classes(e, old_classes, new_classes)
end
@mapping = new_map
end | ruby | {
"resource": ""
} |
q16930 | Ferro.Elementary._stylize | train | def _stylize
styles = style
if styles.class == Hash
set_attribute(
'style',
styles.map { |k, v| "#{k}:#{v};" }.join
)
end
end | ruby | {
"resource": ""
} |
q16931 | Ferro.Elementary.add_child | train | def add_child(name, element_class, options = {})
sym = symbolize(name)
raise "Child '#{sym}' already defined" if @children.has_key?(sym)
raise "Illegal name (#{sym})" if RESERVED_NAMES.include?(sym)
@children[sym] = element_class.new(self, sym, options)
end | ruby | {
"resource": ""
} |
q16932 | Ferro.Elementary.each_child | train | def each_child(&block)
if block_given?
block.call self
@children.each do |_, child|
child.each_child(&block)
end
end
end | ruby | {
"resource": ""
} |
q16933 | Ferro.I18n._replace_options | train | def _replace_options(string, options)
# Unescape the string so we can use the returned string
# to set an elements inner html.
s = string.gsub('<', '<').gsub('>', '>')
if options
# But escape option values to prevent code injection
s.gsub(/%\{(\w+)\}/) do |m|
key = ($1 || m.tr("%{}", ""))
if options.key?(key)
options[key].to_s.gsub('<', '<').gsub('>', '>')
else
key
end
end
else
s
end
end | ruby | {
"resource": ""
} |
q16934 | Ferro.BaseElement.option_replace | train | def option_replace(key, default = nil)
value = @options[key] || default
@options.delete(key) if @options.has_key?(key)
value
end | ruby | {
"resource": ""
} |
q16935 | Ferro.BaseElement.update_state | train | def update_state(state, active)
if !active.nil?
@states.each do |s, v|
v[1] = active if s == state
classify_state v
end
end
end | ruby | {
"resource": ""
} |
q16936 | Ferro.BaseElement.toggle_state | train | def toggle_state(state)
@states.select { |s, _| s == state }.each do |s, v|
v[1] = !v[1]
classify_state v
end
end | ruby | {
"resource": ""
} |
q16937 | Mailman.Router.route | train | def route(message)
@params.clear
@message = message
result = nil
if @bounce_block and message.respond_to?(:bounced?) and message.bounced?
return instance_exec(&@bounce_block)
end
routes.each do |route|
break if result = route.match!(message)
end
if result
@params.merge!(result[:params])
if !result[:klass].nil?
if result[:klass].is_a?(Class) # no instance method specified
result[:klass].new.send(:receive, @message, @params)
elsif result[:klass].kind_of?(String) # instance method specified
klass, method = result[:klass].split('#')
klass.camelize.constantize.new.send(method.to_sym, @message, @params)
end
elsif result[:block].arity > 0
instance_exec(*result[:args], &result[:block])
else
instance_exec(&result[:block])
end
elsif @default_block
instance_exec(&@default_block)
end
end | ruby | {
"resource": ""
} |
q16938 | Mailman.Route.match! | train | def match!(message)
params = {}
args = []
@conditions.each do |condition|
if result = condition.match(message)
params.merge!(result[0])
args += result[1]
else
return nil
end
end
{ :block => @block, :klass => @klass, :params => params, :args => args }
end | ruby | {
"resource": ""
} |
q16939 | Mailman.Application.run | train | def run
Mailman.logger.info "Mailman v#{Mailman::VERSION} started"
if config.rails_root
rails_env = File.join(config.rails_root, 'config', 'environment.rb')
if File.exist?(rails_env) && !(defined?(::Rails) && ::Rails.env)
Mailman.logger.info "Rails root found in #{config.rails_root}, requiring environment..."
require rails_env
end
end
if config.graceful_death
# When user presses CTRL-C, finish processing current message before exiting
Signal.trap("INT") { @polling_interrupt = true }
end
# STDIN
if !IS_WINDOWS && !config.ignore_stdin && $stdin.fcntl(Fcntl::F_GETFL, 0) == 0
Mailman.logger.debug "Processing message from STDIN."
@processor.process($stdin.read)
# IMAP
elsif config.imap
options = {:processor => @processor}.merge(config.imap)
Mailman.logger.info "IMAP receiver enabled (#{options[:username]}@#{options[:server]})."
polling_loop Receiver::IMAP.new(options)
# POP3
elsif config.pop3
options = {:processor => @processor}.merge(config.pop3)
Mailman.logger.info "POP3 receiver enabled (#{options[:username]}@#{options[:server]})."
polling_loop Receiver::POP3.new(options)
# HTTP
elsif config.http
options = {:processor => @processor}.merge(config.http)
Mailman.logger.info "HTTP server started"
Receiver::HTTP.new(options).start_and_block
# Maildir
elsif config.maildir
Mailman.logger.info "Maildir receiver enabled (#{config.maildir})."
Mailman.logger.debug "Processing new message queue..."
@maildir.list(:new).each do |message|
@processor.process_maildir_message(message)
end
if config.watch_maildir
require 'listen'
Mailman.logger.debug "Monitoring the Maildir for new messages..."
base = Pathname.new(@maildir.path)
callback = Proc.new do |modified, added, removed|
added.each do |new_file|
message = Maildir::Message.new(@maildir, Pathname.new(new_file).relative_path_from(base).to_s)
@processor.process_maildir_message(message)
end
end
@listener = Listen::Listener.new(File.join(@maildir.path, 'new'), &callback)
@listener.start
sleep
end
end
end | ruby | {
"resource": ""
} |
q16940 | Mailman.Application.polling_loop | train | def polling_loop(connection)
if polling?
polling_msg = "Polling enabled. Checking every #{config.poll_interval} seconds."
else
polling_msg = "Polling disabled. Checking for messages once."
end
Mailman.logger.info(polling_msg)
tries ||= 5
loop do
begin
connection.connect
connection.get_messages
rescue SystemCallError, EOFError => e
Mailman.logger.error e.message
unless (tries -= 1).zero?
Mailman.logger.error "Retrying..."
begin
connection.disconnect
rescue # don't crash in the crash handler
end
retry
end
ensure
connection.started? && connection.disconnect
end
break unless polling?
sleep config.poll_interval
end
end | ruby | {
"resource": ""
} |
q16941 | Hypernova.ControllerHelpers.hypernova_batch_render | train | def hypernova_batch_render(job)
if @hypernova_batch.nil?
raise NilBatchError.new('called hypernova_batch_render without calling '\
'hypernova_batch_before. Check your around_filter for :hypernova_render_support')
end
batch_token = @hypernova_batch.render(job)
template_safe_token = Hypernova.render_token(batch_token)
@hypernova_batch_mapping[template_safe_token] = batch_token
template_safe_token
end | ruby | {
"resource": ""
} |
q16942 | Hypernova.ControllerHelpers.render_react_component | train | def render_react_component(component, data = {})
begin
new_data = get_view_data(component, data)
rescue StandardError => e
on_error(e)
new_data = data
end
job = {
:data => new_data,
:name => component,
}
hypernova_batch_render(job)
end | ruby | {
"resource": ""
} |
q16943 | Hypernova.ControllerHelpers.hypernova_batch_after | train | def hypernova_batch_after
if @hypernova_batch.nil?
raise NilBatchError.new('called hypernova_batch_after without calling '\
'hypernova_batch_before. Check your around_filter for :hypernova_render_support')
end
return if @hypernova_batch.empty?
jobs = @hypernova_batch.jobs
hash = jobs.each_with_object({}) do |job, h|
h[job[:name]] = job
end
hash = prepare_request(hash, hash)
if send_request?(hash)
begin
will_send_request(hash)
result = @hypernova_batch.submit!
on_success(result, hash)
rescue StandardError => e
on_error(e, nil, hash)
result = @hypernova_batch.submit_fallback!
end
else
result = @hypernova_batch.submit_fallback!
end
new_body = Hypernova.replace_tokens_with_result(
response.body,
@hypernova_batch_mapping,
result
)
response.body = new_body
end | ruby | {
"resource": ""
} |
q16944 | Hypernova.Batch.jobs_hash | train | def jobs_hash
hash = {}
jobs.each_with_index { |job, idx| hash[idx.to_s] = job }
hash
end | ruby | {
"resource": ""
} |
q16945 | RSpec.Its.its | train | def its(attribute, *options, &block)
its_caller = caller.select {|file_line| file_line !~ %r(/lib/rspec/its) }
describe(attribute.to_s, :caller => its_caller) do
let(:__its_subject) do
if Array === attribute
if Hash === subject
attribute.inject(subject) {|inner, attr| inner[attr] }
else
subject[*attribute]
end
else
attribute_chain = attribute.to_s.split('.')
attribute_chain.inject(subject) do |inner_subject, attr|
inner_subject.send(attr)
end
end
end
def is_expected
expect(__its_subject)
end
alias_method :are_expected, :is_expected
def will(matcher=nil, message=nil)
unless matcher.supports_block_expectations?
raise ArgumentError, "`will` only supports block expectations"
end
expect { __its_subject }.to matcher, message
end
def will_not(matcher=nil, message=nil)
unless matcher.supports_block_expectations?
raise ArgumentError, "`will_not` only supports block expectations"
end
expect { __its_subject }.to_not matcher, message
end
def should(matcher=nil, message=nil)
RSpec::Expectations::PositiveExpectationHandler.handle_matcher(__its_subject, matcher, message)
end
def should_not(matcher=nil, message=nil)
RSpec::Expectations::NegativeExpectationHandler.handle_matcher(__its_subject, matcher, message)
end
options << {} unless options.last.kind_of?(Hash)
options.last.merge!(:caller => its_caller)
example(nil, *options, &block)
end
end | ruby | {
"resource": ""
} |
q16946 | Hue.Light.refresh | train | def refresh
json = JSON(Net::HTTP.get(URI.parse(base_url)))
unpack(json)
end | ruby | {
"resource": ""
} |
q16947 | Spectator.Timer.record | train | def record(nanos)
return if nanos < 0
@count.add_and_get(1)
@total_time.add_and_get(nanos)
@total_sq.add_and_get(nanos * nanos)
@max.max(nanos)
end | ruby | {
"resource": ""
} |
q16948 | Spectator.MeterId.with_tag | train | def with_tag(key, value)
new_tags = @tags.dup
new_tags[key] = value
MeterId.new(@name, new_tags)
end | ruby | {
"resource": ""
} |
q16949 | Spectator.MeterId.key | train | def key
if @key.nil?
hash_key = @name.to_s
@key = hash_key
keys = @tags.keys
keys.sort
keys.each do |k|
v = tags[k]
hash_key += "|#{k}|#{v}"
end
@key = hash_key
end
@key
end | ruby | {
"resource": ""
} |
q16950 | Spectator.DistributionSummary.record | train | def record(amount)
return if amount < 0
@count.add_and_get(1)
@total_amount.add_and_get(amount)
@total_sq.add_and_get(amount * amount)
@max.max(amount)
end | ruby | {
"resource": ""
} |
q16951 | Spectator.DistributionSummary.measure | train | def measure
cnt = Measure.new(@id.with_stat('count'), @count.get_and_set(0))
tot = Measure.new(@id.with_stat('totalAmount'),
@total_amount.get_and_set(0))
tot_sq = Measure.new(@id.with_stat('totalOfSquares'),
@total_sq.get_and_set(0))
mx = Measure.new(@id.with_stat('max'), @max.get_and_set(Float::NAN))
[cnt, tot, tot_sq, mx]
end | ruby | {
"resource": ""
} |
q16952 | Spectator.Http.post_json | train | def post_json(endpoint, payload)
s = payload.to_json
uri = URI(endpoint)
http = Net::HTTP.new(uri.host, uri.port)
req = Net::HTTP::Post.new(uri.path, 'Content-Type' => 'application/json')
req.body = s
begin
res = http.request(req)
rescue StandardError => e
Spectator.logger.info("Cause #{e.cause} - msg=#{e.message}")
return 400
end
res.value
end | ruby | {
"resource": ""
} |
q16953 | Spectator.Publisher.stop | train | def stop
unless @started
Spectator.logger.info('Attemping to stop Spectator ' \
'without a previous call to start')
return
end
@should_stop = true
Spectator.logger.info('Stopping spectator')
@publish_thread.kill if @publish_thread
@started = false
Spectator.logger.info('Sending last batch of metrics before exiting')
send_metrics_now
end | ruby | {
"resource": ""
} |
q16954 | Spectator.Publisher.op_for_measurement | train | def op_for_measurement(measure)
stat = measure.id.tags.fetch(:statistic, :unknown)
OPS.fetch(stat, UNKNOWN_OP)
end | ruby | {
"resource": ""
} |
q16955 | Spectator.Publisher.should_send | train | def should_send(measure)
op = op_for_measurement(measure)
return measure.value > 0 if op == ADD_OP
return !measure.value.nan? if op == MAX_OP
false
end | ruby | {
"resource": ""
} |
q16956 | Spectator.Publisher.build_string_table | train | def build_string_table(measurements)
common_tags = @registry.common_tags
table = {}
common_tags.each do |k, v|
table[k] = 0
table[v] = 0
end
table[:name] = 0
measurements.each do |m|
table[m.id.name] = 0
m.id.tags.each do |k, v|
table[k] = 0
table[v] = 0
end
end
keys = table.keys.sort
keys.each_with_index do |str, index|
table[str] = index
end
table
end | ruby | {
"resource": ""
} |
q16957 | Spectator.Publisher.payload_for_measurements | train | def payload_for_measurements(measurements)
table = build_string_table(measurements)
payload = []
payload.push(table.length)
strings = table.keys.sort
payload.concat(strings)
measurements.each { |m| append_measurement(payload, table, m) }
payload
end | ruby | {
"resource": ""
} |
q16958 | Spectator.Publisher.send_metrics_now | train | def send_metrics_now
ms = registry_measurements
if ms.empty?
Spectator.logger.debug 'No measurements to send'
else
uri = @registry.config[:uri]
ms.each_slice(@registry.batch_size) do |batch|
payload = payload_for_measurements(batch)
Spectator.logger.info "Sending #{batch.length} measurements to #{uri}"
@http.post_json(uri, payload)
end
end
end | ruby | {
"resource": ""
} |
q16959 | Transitions.Event.timestamp= | train | def timestamp=(values)
values.each do |value|
case value
when String, Symbol, TrueClass
@timestamps << value
else
fail ArgumentError, 'timestamp must be either: true, a String or a Symbol'
end
end
end | ruby | {
"resource": ""
} |
q16960 | Transitions.Event.timestamp_attribute_name | train | def timestamp_attribute_name(obj, next_state, user_timestamp)
user_timestamp == true ? default_timestamp_name(obj, next_state) : user_timestamp
end | ruby | {
"resource": ""
} |
q16961 | RailsEmailPreview.EmailsController.show | train | def show
prevent_browser_caching
cms_edit_links!
with_email_locale do
if @preview.respond_to?(:preview_mail)
@mail, body = mail_and_body
@mail_body_html = render_to_string(inline: body, layout: 'rails_email_preview/email')
else
raise ArgumentError.new("#{@preview} is not a preview class, does not respond_to?(:preview_mail)")
end
end
end | ruby | {
"resource": ""
} |
q16962 | RailsEmailPreview.EmailsController.show_headers | train | def show_headers
mail = with_email_locale { mail_and_body.first }
render partial: 'rails_email_preview/emails/headers', locals: {mail: mail}
end | ruby | {
"resource": ""
} |
q16963 | RailsEmailPreview.EmailsController.show_body | train | def show_body
prevent_browser_caching
cms_edit_links!
with_email_locale do
_, body = mail_and_body
render inline: body, layout: 'rails_email_preview/email'
end
end | ruby | {
"resource": ""
} |
q16964 | Taps.Utils.incorrect_blobs | train | def incorrect_blobs(db, table)
return [] if (db.url =~ /mysql:\/\//).nil?
columns = []
db.schema(table).each do |data|
column, cdata = data
columns << column if cdata[:db_type] =~ /text/
end
columns
end | ruby | {
"resource": ""
} |
q16965 | Taps.Utils.server_error_handling | train | def server_error_handling(&blk)
begin
blk.call
rescue Sequel::DatabaseError => e
if e.message =~ /duplicate key value/i
raise Taps::DuplicatePrimaryKeyError, e.message
else
raise
end
end
end | ruby | {
"resource": ""
} |
q16966 | Taps.DataStream.fetch_rows | train | def fetch_rows
state[:chunksize] = fetch_chunksize
ds = table.order(*order_by).limit(state[:chunksize], state[:offset])
log.debug "DataStream#fetch_rows SQL -> #{ds.sql}"
rows = Taps::Utils.format_data(ds.all,
:string_columns => string_columns,
:schema => db.schema(table_name),
:table => table_name
)
update_chunksize_stats
rows
end | ruby | {
"resource": ""
} |
q16967 | Taps.DataStream.fetch_remote_in_server | train | def fetch_remote_in_server(params)
json = self.class.parse_json(params[:json])
encoded_data = params[:encoded_data]
rows = parse_encoded_data(encoded_data, json[:checksum])
@complete = rows == { }
unless @complete
import_rows(rows)
rows[:data].size
else
0
end
end | ruby | {
"resource": ""
} |
q16968 | Cinch.Handler.stop | train | def stop
@bot.loggers.debug "[Stopping handler] Stopping all threads of handler #{self}: #{@thread_group.list.size} threads..."
@thread_group.list.each do |thread|
Thread.new do
@bot.loggers.debug "[Ending thread] Waiting 10 seconds for #{thread} to finish..."
thread.join(10)
@bot.loggers.debug "[Killing thread] Killing #{thread}"
thread.kill
end
end
end | ruby | {
"resource": ""
} |
q16969 | Cinch.Handler.call | train | def call(message, captures, arguments)
bargs = captures + arguments
thread = Thread.new {
@bot.loggers.debug "[New thread] For #{self}: #{Thread.current} -- #{@thread_group.list.size} in total."
begin
if @execute_in_callback
@bot.callback.instance_exec(message, *@args, *bargs, &@block)
else
@block.call(message, *@args, *bargs)
end
rescue => e
@bot.loggers.exception(e)
ensure
@bot.loggers.debug "[Thread done] For #{self}: #{Thread.current} -- #{@thread_group.list.size - 1} remaining."
end
}
@thread_group.add(thread)
thread
end | ruby | {
"resource": ""
} |
q16970 | Cinch.Syncable.wait_until_synced | train | def wait_until_synced(attr)
attr = attr.to_sym
waited = 0
while true
return if attribute_synced?(attr)
waited += 1
if waited % 100 == 0
bot.loggers.warn "A synced attribute ('%s' for %s) has not been available for %d seconds, still waiting" % [attr, self.inspect, waited / 10]
bot.loggers.warn caller.map {|s| " #{s}"}
if waited / 10 >= 30
bot.loggers.warn " Giving up..."
raise Exceptions::SyncedAttributeNotAvailable, "'%s' for %s" % [attr, self.inspect]
end
end
sleep 0.1
end
end | ruby | {
"resource": ""
} |
q16971 | Cinch.Configuration.load | train | def load(new_config, from_default = false)
if from_default
@table = self.class.default_config
end
new_config.each do |option, value|
if value.is_a?(Hash)
if self[option].is_a?(Configuration)
self[option].load(value)
else
# recursive merging is handled by subclasses like
# Configuration::Plugins
self[option] = value
end
else
self[option] = value
end
end
end | ruby | {
"resource": ""
} |
q16972 | Cinch.UserList.find | train | def find(nick)
if nick == @bot.nick
return @bot
end
downcased_nick = nick.irc_downcase(@bot.irc.isupport["CASEMAPPING"])
@mutex.synchronize do
return @cache[downcased_nick]
end
end | ruby | {
"resource": ""
} |
q16973 | Cinch.User.refresh | train | def refresh
return if @in_whois
@data.keys.each do |attr|
unsync attr
end
@in_whois = true
if @bot.irc.network.whois_only_one_argument?
@bot.irc.send "WHOIS #@name"
else
@bot.irc.send "WHOIS #@name #@name"
end
end | ruby | {
"resource": ""
} |
q16974 | Cinch.User.mask | train | def mask(s = "%n!%u@%h")
s = s.gsub(/%(.)/) {
case $1
when "n"
@name
when "u"
self.user
when "h"
self.host
when "r"
self.realname
when "a"
self.authname
end
}
Mask.new(s)
end | ruby | {
"resource": ""
} |
q16975 | Cinch.User.monitor | train | def monitor
if @bot.irc.isupport["MONITOR"] > 0
@bot.irc.send "MONITOR + #@name"
else
refresh
@monitored_timer = Timer.new(@bot, interval: 30) {
refresh
}
@monitored_timer.start
end
@monitored = true
end | ruby | {
"resource": ""
} |
q16976 | Cinch.User.online= | train | def online=(bool)
notify = self.__send__("online?_unsynced") != bool && @monitored
sync(:online?, bool, true)
return unless notify
if bool
@bot.handlers.dispatch(:online, nil, self)
else
@bot.handlers.dispatch(:offline, nil, self)
end
end | ruby | {
"resource": ""
} |
q16977 | Cinch.IRC.start | train | def start
setup
if connect
@sasl_remaining_methods = @bot.config.sasl.mechanisms.reverse
send_cap_ls
send_login
reading_thread = start_reading_thread
sending_thread = start_sending_thread
ping_thread = start_ping_thread
reading_thread.join
sending_thread.kill
ping_thread.kill
end
end | ruby | {
"resource": ""
} |
q16978 | Cinch.Logger.log | train | def log(messages, event = :debug, level = event)
return unless will_log?(level)
@mutex.synchronize do
Array(messages).each do |message|
message = format_general(message)
message = format_message(message, event)
next if message.nil?
@output.puts message.encode("locale", {:invalid => :replace, :undef => :replace})
end
end
end | ruby | {
"resource": ""
} |
q16979 | Cinch.ChannelList.find_ensured | train | def find_ensured(name)
downcased_name = name.irc_downcase(@bot.irc.isupport["CASEMAPPING"])
@mutex.synchronize do
@cache[downcased_name] ||= Channel.new(name, @bot)
end
end | ruby | {
"resource": ""
} |
q16980 | Cinch.Channel.topic= | train | def topic=(new_topic)
if new_topic.size > @bot.irc.isupport["TOPICLEN"] && @bot.strict?
raise Exceptions::TopicTooLong, new_topic
end
@bot.irc.send "TOPIC #@name :#{new_topic}"
end | ruby | {
"resource": ""
} |
q16981 | Cinch.Channel.kick | train | def kick(user, reason = nil)
if reason.to_s.size > @bot.irc.isupport["KICKLEN"] && @bot.strict?
raise Exceptions::KickReasonTooLong, reason
end
@bot.irc.send("KICK #@name #{user} :#{reason}")
end | ruby | {
"resource": ""
} |
q16982 | Cinch.Channel.join | train | def join(key = nil)
if key.nil? and self.key != true
key = self.key
end
@bot.irc.send "JOIN #{[@name, key].compact.join(" ")}"
end | ruby | {
"resource": ""
} |
q16983 | Cinch.Target.send | train | def send(text, notice = false)
# TODO deprecate `notice` argument, put splitting into own
# method
text = text.to_s
split_start = @bot.config.message_split_start || ""
split_end = @bot.config.message_split_end || ""
command = notice ? "NOTICE" : "PRIVMSG"
prefix = ":#{@bot.mask} #{command} #{@name} :"
text.lines.map(&:chomp).each do |line|
splitted = split_message(line, prefix, split_start, split_end)
splitted[0, (@bot.config.max_messages || splitted.size)].each do |string|
@bot.irc.send("#{command} #@name :#{string}")
end
end
end | ruby | {
"resource": ""
} |
q16984 | Cinch.Bot.start | train | def start(plugins = true)
@reconnects = 0
@plugins.register_plugins(@config.plugins.plugins) if plugins
begin
@user_list.each do |user|
user.in_whois = false
user.unsync_all
end # reset state of all users
@channel_list.each do |channel|
channel.unsync_all
end # reset state of all channels
@channels = [] # reset list of channels the bot is in
@join_handler.unregister if @join_handler
@join_timer.stop if @join_timer
join_lambda = lambda { @config.channels.each { |channel| Channel(channel).join }}
if @config.delay_joins.is_a?(Symbol)
@join_handler = join_handler = on(@config.delay_joins) {
join_handler.unregister
join_lambda.call
}
else
@join_timer = Timer.new(self, interval: @config.delay_joins, shots: 1) {
join_lambda.call
}
end
@modes = []
@loggers.info "Connecting to #{@config.server}:#{@config.port}"
@irc = IRC.new(self)
@irc.start
if @config.reconnect && !@quitting
# double the delay for each unsuccesful reconnection attempt
if @last_connection_was_successful
@reconnects = 0
@last_connection_was_successful = false
else
@reconnects += 1
end
# Throttle reconnect attempts
wait = 2**@reconnects
wait = @config.max_reconnect_delay if wait > @config.max_reconnect_delay
@loggers.info "Waiting #{wait} seconds before reconnecting"
start_time = Time.now
while !@quitting && (Time.now - start_time) < wait
sleep 1
end
end
end while @config.reconnect and not @quitting
end | ruby | {
"resource": ""
} |
q16985 | Cinch.Bot.part | train | def part(channel, reason = nil)
channel = Channel(channel)
channel.part(reason)
channel
end | ruby | {
"resource": ""
} |
q16986 | Cinch.Bot.generate_next_nick! | train | def generate_next_nick!(base = nil)
nicks = @config.nicks || []
if base
# if `base` is not in our list of nicks to try, assume that it's
# custom and just append an underscore
if !nicks.include?(base)
new_nick = base + "_"
else
# if we have a base, try the next nick or append an
# underscore if no more nicks are left
new_index = nicks.index(base) + 1
if nicks[new_index]
new_nick = nicks[new_index]
else
new_nick = base + "_"
end
end
else
# if we have no base, try the first possible nick
new_nick = @config.nicks ? @config.nicks.first : @config.nick
end
@config.nick = new_nick
end | ruby | {
"resource": ""
} |
q16987 | Garage::Representer.ClassMethods.metadata | train | def metadata
{:definitions => representer_attrs.grep(Definition).map {|definition| definition.name},
:links => representer_attrs.grep(Link).map {|link| link.options[:as] ? {link.rel => {'as' => link.options[:as]}} : link.rel}
}
end | ruby | {
"resource": ""
} |
q16988 | Garage.ControllerHelper.requested_by? | train | def requested_by?(resource)
user = resource.respond_to?(:owner) ? resource.owner : resource
case
when current_resource_owner.nil?
false
when !user.is_a?(current_resource_owner.class)
false
when current_resource_owner.id == user.id
true
else
false
end
end | ruby | {
"resource": ""
} |
q16989 | Loaf.OptionsValidator.valid? | train | def valid?(options)
valid_options = Loaf::Configuration::VALID_ATTRIBUTES
options.each_key do |key|
unless valid_options.include?(key)
fail Loaf::InvalidOptions.new(key, valid_options)
end
end
true
end | ruby | {
"resource": ""
} |
q16990 | Loaf.Configuration.to_hash | train | def to_hash
VALID_ATTRIBUTES.reduce({}) { |acc, k| acc[k] = send(k); acc }
end | ruby | {
"resource": ""
} |
q16991 | Loaf.ViewExtensions.breadcrumb | train | def breadcrumb(name, url, options = {})
_breadcrumbs << Loaf::Crumb.new(name, url, options)
end | ruby | {
"resource": ""
} |
q16992 | Loaf.ViewExtensions.breadcrumb_trail | train | def breadcrumb_trail(options = {})
return enum_for(:breadcrumb_trail) unless block_given?
valid?(options)
options = Loaf.configuration.to_hash.merge(options)
_breadcrumbs.each do |crumb|
name = title_for(crumb.name)
path = url_for(_expand_url(crumb.url))
current = current_crumb?(path, crumb.match)
yield(Loaf::Breadcrumb[name, path, current])
end
end | ruby | {
"resource": ""
} |
q16993 | Loaf.ViewExtensions._expand_url | train | def _expand_url(url)
case url
when String, Symbol
respond_to?(url) ? send(url) : url
when Proc
url.call(self)
else
url
end
end | ruby | {
"resource": ""
} |
q16994 | Loaf.Translation.find_title | train | def find_title(title, options = {})
return title if title.nil? || title.empty?
options[:scope] ||= translation_scope
options[:default] = Array(options[:default])
options[:default] << title if options[:default].empty?
I18n.t(title.to_s, options)
end | ruby | {
"resource": ""
} |
q16995 | BeakerHostGenerator.Parser.tokenize_layout | train | def tokenize_layout(layout_spec)
# Here we allow dashes in certain parts of the spec string
# i.e. "centos6-64m{hostname=foo-bar}-debian8-32"
# by first replacing all occurrences of - with | that exist within
# the braces {...}.
#
# So we'd end up with:
# "centos6-64m{hostname=foo|bar}-debian8-32"
#
# Which we can then simply split on - into:
# ["centos6", "64m{hostname=foo|bar}", "debian8", "32"]
#
# And then finally turn the | back into - now that we've
# properly decomposed the spec string:
# ["centos6", "64m{hostname=foo-bar}", "debian8", "32"]
#
# NOTE we've specifically chosen to use the pipe character |
# due to its unlikely occurrence in the user input string.
spec = String.new(layout_spec) # Copy so we can replace characters inline
within_braces = false
spec.chars.each_with_index do |char, index|
case char
when '{'
within_braces = true
when '}'
within_braces = false
when '-'
spec[index] = '|' if within_braces
end
end
tokens = spec.split('-')
tokens.map { |t| t.gsub('|', '-') }
end | ruby | {
"resource": ""
} |
q16996 | BeakerHostGenerator.Parser.settings_string_to_map | train | def settings_string_to_map(host_settings)
stringscan = StringScanner.new(host_settings)
object = nil
object_depth = []
current_depth = 0
# This loop scans until the next delimiter character is found. When
# the next delimiter is recognized, there is enough context in the
# substring to determine a course of action to modify the primary
# object. The `object_depth` object tracks which current object is
# being modified, and pops it off the end of the array when that
# data structure is completed.
#
# This algorithm would also support a base array, but since there
# is no need for that functionality, we just assume the string is
# always a representation of a hash.
loop do
blob = stringscan.scan_until(/\[|{|}|\]|,/)
break if blob.nil?
if stringscan.pos() == 1
object = {}
object_depth.push(object)
next
end
current_type = object_depth[current_depth].class
current_object = object_depth[current_depth]
if blob == '['
current_object.push([])
object_depth.push(current_object.last)
current_depth = current_depth.next
next
end
if blob.start_with?('{')
current_object.push({})
object_depth.push(current_object.last)
current_depth = current_depth.next
next
end
if blob == ']' or blob == '}'
object_depth.pop
current_depth = current_depth.pred
next
end
# When there is assignment happening, we need to create a new
# corresponding data structure, add it to the object depth, and
# then change the current depth
if blob[-2] == '='
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError unless blob.end_with?('{','[')
if blob[-1] == '{'
current_object[blob[0..-3]] = {}
else
current_object[blob[0..-3]] = []
end
object_depth.push(current_object[blob[0..-3]])
current_depth = current_depth.next
next
end
if blob[-1] == '}'
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError if blob.count('=') != 1
key_pair = blob[0..-2].split('=')
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError if key_pair.size != 2
key_pair.each do |element|
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError if element.empty?
end
current_object[key_pair[0]] = key_pair[1]
object_depth.pop
current_depth = current_depth.pred
next
end
if blob == ','
next
end
if blob[-1] == ','
if current_type == Hash
key_pair = blob[0..-2].split('=')
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError if key_pair.size != 2
key_pair.each do |element|
raise Beaker::HostGenerator::Exceptions::InvalidNodeSpecError if element.empty?
end
current_object[key_pair[0]] = key_pair[1]
next
elsif current_type == Array
current_object.push(blob[0..-2])
next
end
end
if blob[-1] == ']'
current_object.push(blob[0..-2])
object_depth.pop
current_depth = current_depth.pred
next
end
end
object
rescue Exception => e
raise BeakerHostGenerator::Exceptions::InvalidNodeSpecError,
"Malformed host settings: #{host_settings}"
end | ruby | {
"resource": ""
} |
q16997 | BeakerHostGenerator.Data.get_platform_info | train | def get_platform_info(bhg_version, platform, hypervisor)
info = get_osinfo(bhg_version)[platform]
{}.deep_merge!(info[:general]).deep_merge!(info[hypervisor])
end | ruby | {
"resource": ""
} |
q16998 | BeakerHostGenerator.AbsSupport.extract_templates | train | def extract_templates(config)
templates_hosts = config['HOSTS'].values.group_by { |h| h['template'] }
templates_hosts.each do |template, hosts|
templates_hosts[template] = hosts.count
end
end | ruby | {
"resource": ""
} |
q16999 | BeakerHostGenerator.Generator.generate | train | def generate(layout, options)
layout = prepare(layout)
tokens = tokenize_layout(layout)
config = {}.deep_merge(BASE_CONFIG)
nodeid = Hash.new(1)
ostype = nil
bhg_version = options[:osinfo_version] || 0
tokens.each do |token|
if is_ostype_token?(token, bhg_version)
if nodeid[ostype] == 1 and ostype != nil
raise "Error: no nodes generated for #{ostype}"
end
ostype = token
next
end
node_info = parse_node_info_token(token)
# Build node host name
platform = "#{ostype}-#{node_info['bits']}"
host_name = "#{platform}-#{nodeid[ostype]}"
node_info['platform'] = platform
node_info['ostype'] = ostype
node_info['nodeid'] = nodeid[ostype]
host_config = base_host_config(options)
# Delegate to the hypervisor
hypervisor = BeakerHostGenerator::Hypervisor.create(node_info, options)
host_config = hypervisor.generate_node(node_info, host_config, bhg_version)
config['CONFIG'].deep_merge!(hypervisor.global_config())
# Merge in any arbitrary key-value host settings. Treat the 'hostname'
# setting specially, and don't merge it in as an arbitrary setting.
arbitrary_settings = node_info['host_settings']
host_name = arbitrary_settings.delete('hostname') if
arbitrary_settings.has_key?('hostname')
host_config.merge!(arbitrary_settings)
if PE_USE_WIN32 && ostype =~ /windows/ && node_info['bits'] == "64"
host_config['ruby_arch'] = 'x86'
host_config['install_32'] = true
end
generate_host_roles!(host_config, node_info, options)
config['HOSTS'][host_name] = host_config
nodeid[ostype] += 1
end
# Merge in global configuration settings after the hypervisor defaults
if options[:global_config]
decoded = prepare(options[:global_config])
# Support for strings without '{}' was introduced, so just double
# check here to ensure that we pass in values surrounded by '{}'.
if !decoded.start_with?('{')
decoded = "{#{decoded}}"
end
global_config = settings_string_to_map(decoded)
config['CONFIG'].deep_merge!(global_config)
end
# Munge non-string scalar values into proper data types
unstringify_values!(config)
return config
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.