_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q11300 | Trestle.TableHelper.table | train | def table(name=nil, options={}, &block)
if block_given?
if name.is_a?(Hash)
options = name
else
collection = name
end
table = Table::Builder.build(options, &block)
elsif name.is_a?(Trestle::Table)
table = name
else
table = admin.tables.fetch(name) { raise ArgumentError, "Unable to find table named #{name.inspect}" }
end
collection ||= options[:collection] || table.options[:collection]
collection = collection.call if collection.respond_to?(:call)
render "trestle/table/table", table: table, collection: collection
end | ruby | {
"resource": ""
} |
q11301 | Trestle.PaginationHelper.page_entries_info | train | def page_entries_info(collection, options = {})
entry_name = options[:entry_name] || "entry"
entry_name = entry_name.pluralize unless collection.total_count == 1
if collection.total_pages < 2
t('trestle.helpers.page_entries_info.one_page.display_entries', entry_name: entry_name, count: collection.total_count, default: "Displaying <strong>all %{count}</strong> %{entry_name}")
else
first = number_with_delimiter(collection.offset_value + 1)
last = number_with_delimiter((sum = collection.offset_value + collection.limit_value) > collection.total_count ? collection.total_count : sum)
total = number_with_delimiter(collection.total_count)
t('trestle.helpers.page_entries_info.more_pages.display_entries', entry_name: entry_name, first: first, last: last, total: total, default: "Displaying %{entry_name} <strong>%{first} - %{last}</strong> of <b>%{total}</b>")
end.html_safe
end | ruby | {
"resource": ""
} |
q11302 | Trestle.Configuration.hook | train | def hook(name, options={}, &block)
hooks[name.to_s] << Hook.new(name.to_s, options, &block)
end | ruby | {
"resource": ""
} |
q11303 | Shipit.Deploy.trigger_rollback | train | def trigger_rollback(user = AnonymousUser.new, env: nil, force: false)
rollback = build_rollback(user, env: env, force: force)
rollback.save!
rollback.enqueue
lock_reason = "A rollback for #{rollback.since_commit.sha} has been triggered. " \
"Please make sure the reason for the rollback has been addressed before deploying again."
stack.update!(lock_reason: lock_reason, lock_author_id: user.id)
rollback
end | ruby | {
"resource": ""
} |
q11304 | Jazzy.PodspecDocumenter.compiler_swift_version | train | def compiler_swift_version(user_version)
return LATEST_SWIFT_VERSION unless user_version
LONG_SWIFT_VERSIONS.select do |version|
user_version.start_with?(version)
end.last || "#{user_version[0]}.0"
end | ruby | {
"resource": ""
} |
q11305 | JSONAPI.ResourceIdTree.fetch_related_resource_id_tree | train | def fetch_related_resource_id_tree(relationship)
relationship_name = relationship.name.to_sym
@related_resource_id_trees[relationship_name] ||= RelatedResourceIdTree.new(relationship, self)
end | ruby | {
"resource": ""
} |
q11306 | JSONAPI.PrimaryResourceIdTree.add_resource_fragment | train | def add_resource_fragment(fragment, include_related)
fragment.primary = true
init_included_relationships(fragment, include_related)
@fragments[fragment.identity] = fragment
end | ruby | {
"resource": ""
} |
q11307 | JSONAPI.RelatedResourceIdTree.add_resource_fragment | train | def add_resource_fragment(fragment, include_related)
init_included_relationships(fragment, include_related)
fragment.related_from.each do |rid|
@source_resource_id_tree.fragments[rid].add_related_identity(parent_relationship.name, fragment.identity)
end
@fragments[fragment.identity] = fragment
end | ruby | {
"resource": ""
} |
q11308 | Google.Cloud.error_reporting | train | def error_reporting scope: nil, timeout: nil, client_config: nil
Google::Cloud.error_reporting @project, @keyfile,
scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11309 | Google.Cloud.dns | train | def dns scope: nil, retries: nil, timeout: nil
Google::Cloud.dns @project, @keyfile, scope: scope,
retries: (retries || @retries),
timeout: (timeout || @timeout)
end | ruby | {
"resource": ""
} |
q11310 | Google.Cloud.spanner | train | def spanner scope: nil, timeout: nil, client_config: nil
Google::Cloud.spanner @project, @keyfile, scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11311 | Google.Cloud.logging | train | def logging scope: nil, timeout: nil, client_config: nil
timeout ||= @timeout
Google::Cloud.logging @project, @keyfile, scope: scope,
timeout: timeout,
client_config: client_config
end | ruby | {
"resource": ""
} |
q11312 | Google.Cloud.bigquery | train | def bigquery scope: nil, retries: nil, timeout: nil
Google::Cloud.bigquery @project, @keyfile, scope: scope,
retries: (retries || @retries),
timeout: (timeout || @timeout)
end | ruby | {
"resource": ""
} |
q11313 | Google.Cloud.debugger | train | def debugger service_name: nil, service_version: nil, scope: nil,
timeout: nil, client_config: nil
Google::Cloud.debugger @project, @keyfile,
service_name: service_name,
service_version: service_version,
scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11314 | Google.Cloud.datastore | train | def datastore scope: nil, timeout: nil, client_config: nil
Google::Cloud.datastore @project, @keyfile,
scope: scope, timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11315 | Google.Cloud.resource_manager | train | def resource_manager scope: nil, retries: nil, timeout: nil
Google::Cloud.resource_manager @keyfile, scope: scope,
retries: (retries || @retries),
timeout: (timeout || @timeout)
end | ruby | {
"resource": ""
} |
q11316 | Google.Cloud.storage | train | def storage scope: nil, retries: nil, timeout: nil
Google::Cloud.storage @project, @keyfile, scope: scope,
retries: (retries || @retries),
timeout: (timeout || @timeout)
end | ruby | {
"resource": ""
} |
q11317 | Google.Cloud.translate | train | def translate key = nil, scope: nil, retries: nil, timeout: nil
Google::Cloud.translate key, project_id: @project, credentials: @keyfile,
scope: scope,
retries: (retries || @retries),
timeout: (timeout || @timeout)
end | ruby | {
"resource": ""
} |
q11318 | Google.Cloud.firestore | train | def firestore scope: nil, timeout: nil, client_config: nil
Google::Cloud.firestore @project, @keyfile,
scope: scope, timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11319 | Google.Cloud.trace | train | def trace scope: nil, timeout: nil, client_config: nil
Google::Cloud.trace @project, @keyfile, scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
end | ruby | {
"resource": ""
} |
q11320 | Google.Cloud.bigtable | train | def bigtable scope: nil, timeout: nil, credentials: nil, client_config: nil
Google::Cloud.bigtable(
project_id: @project,
credentials: (credentials || @keyfile),
scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
)
end | ruby | {
"resource": ""
} |
q11321 | WebMock.BodyPattern.matching_body_hashes? | train | def matching_body_hashes?(query_parameters, pattern, content_type)
return false unless query_parameters.is_a?(Hash)
return false unless query_parameters.keys.sort == pattern.keys.sort
query_parameters.each do |key, actual|
expected = pattern[key]
if actual.is_a?(Hash) && expected.is_a?(Hash)
return false unless matching_body_hashes?(actual, expected, content_type)
else
expected = WebMock::Util::ValuesStringifier.stringify_values(expected) if url_encoded_body?(content_type)
return false unless expected === actual
end
end
true
end | ruby | {
"resource": ""
} |
q11322 | HTML.Pipeline.call | train | def call(html, context = {}, result = nil)
context = @default_context.merge(context)
context = context.freeze
result ||= @result_class.new
payload = default_payload filters: @filters.map(&:name),
context: context, result: result
instrument 'call_pipeline.html_pipeline', payload do
result[:output] =
@filters.inject(html) do |doc, filter|
perform_filter(filter, doc, context, result)
end
end
result
end | ruby | {
"resource": ""
} |
q11323 | HTML.Pipeline.to_document | train | def to_document(input, context = {}, result = nil)
result = call(input, context, result)
HTML::Pipeline.parse(result[:output])
end | ruby | {
"resource": ""
} |
q11324 | HTML.Pipeline.to_html | train | def to_html(input, context = {}, result = nil)
result = call(input, context, result = nil)
output = result[:output]
if output.respond_to?(:to_html)
output.to_html
else
output.to_s
end
end | ruby | {
"resource": ""
} |
q11325 | Kaminari.PageScopeMethods.total_pages | train | def total_pages
count_without_padding = total_count
count_without_padding -= @_padding if defined?(@_padding) && @_padding
count_without_padding = 0 if count_without_padding < 0
total_pages_count = (count_without_padding.to_f / limit_value).ceil
max_pages && (max_pages < total_pages_count) ? max_pages : total_pages_count
rescue FloatDomainError
raise ZeroPerPageOperation, "The number of total pages was incalculable. Perhaps you called .per(0)?"
end | ruby | {
"resource": ""
} |
q11326 | Kaminari.PageScopeMethods.current_page | train | def current_page
offset_without_padding = offset_value
offset_without_padding -= @_padding if defined?(@_padding) && @_padding
offset_without_padding = 0 if offset_without_padding < 0
(offset_without_padding / limit_value) + 1
rescue ZeroDivisionError
raise ZeroPerPageOperation, "Current page was incalculable. Perhaps you called .per(0)?"
end | ruby | {
"resource": ""
} |
q11327 | Kaminari.ActiveRecordRelationMethods.entry_name | train | def entry_name(options = {})
default = options[:count] == 1 ? model_name.human : model_name.human.pluralize
model_name.human(options.reverse_merge(default: default))
end | ruby | {
"resource": ""
} |
q11328 | Guard.Watcher.call_action | train | def call_action(matches)
@action.arity > 0 ? @action.call(matches) : @action.call
rescue => ex
UI.error "Problem with watch action!\n#{ex.message}"
UI.error ex.backtrace.join("\n")
end | ruby | {
"resource": ""
} |
q11329 | Guard.CLI.start | train | def start
if defined?(JRUBY_VERSION)
unless options[:no_interactions]
abort "\nSorry, JRuby and interactive mode are incompatible.\n"\
"As a workaround, use the '-i' option instead.\n\n"\
"More info: \n"\
" * https://github.com/guard/guard/issues/754\n"\
" * https://github.com/jruby/jruby/issues/2383\n\n"
end
end
exit(Cli::Environments::Valid.new(options).start_guard)
end | ruby | {
"resource": ""
} |
q11330 | Guard.CLI.notifiers | train | def notifiers
Cli::Environments::EvaluateOnly.new(options).evaluate
# TODO: pass the data directly to the notifiers?
DslDescriber.new.notifiers
end | ruby | {
"resource": ""
} |
q11331 | Guard.CLI.init | train | def init(*plugin_names)
env = Cli::Environments::Valid.new(options)
exitcode = env.initialize_guardfile(plugin_names)
exit(exitcode)
end | ruby | {
"resource": ""
} |
q11332 | Guard.CLI.show | train | def show
Cli::Environments::EvaluateOnly.new(options).evaluate
DslDescriber.new.show
end | ruby | {
"resource": ""
} |
q11333 | Guard.PluginUtil.add_to_guardfile | train | def add_to_guardfile
klass = plugin_class # call here to avoid failing later
require_relative "guardfile/evaluator"
# TODO: move this to Generator?
options = Guard.state.session.evaluator_options
evaluator = Guardfile::Evaluator.new(options)
begin
evaluator.evaluate
rescue Guard::Guardfile::Evaluator::NoPluginsError
end
if evaluator.guardfile_include?(name)
UI.info "Guardfile already includes #{ name } guard"
else
content = File.read("Guardfile")
File.open("Guardfile", "wb") do |f|
f.puts(content)
f.puts("")
f.puts(klass.template(plugin_location))
end
UI.info INFO_ADDED_GUARD_TO_GUARDFILE % name
end
end | ruby | {
"resource": ""
} |
q11334 | Guard.PluginUtil._plugin_constant | train | def _plugin_constant
@_plugin_constant ||= Guard.constants.detect do |c|
c.to_s.casecmp(_constant_name.downcase).zero?
end
end | ruby | {
"resource": ""
} |
q11335 | Guard.Dsl.interactor | train | def interactor(options)
# TODO: remove dependency on Interactor (let session handle this)
case options
when :off
Interactor.enabled = false
when Hash
Interactor.options = options
end
end | ruby | {
"resource": ""
} |
q11336 | Guard.Dsl.guard | train | def guard(name, options = {})
@plugin_options = options.merge(watchers: [], callbacks: [])
yield if block_given?
@current_groups ||= []
groups = @current_groups && @current_groups.last || [:default]
groups.each do |group|
opts = @plugin_options.merge(group: group)
# TODO: let plugins be added *after* evaluation
Guard.state.session.plugins.add(name, opts)
end
@plugin_options = nil
end | ruby | {
"resource": ""
} |
q11337 | Guard.Dsl.watch | train | def watch(pattern, &action)
# Allow watches in the global scope (to execute arbitrary commands) by
# building a generic Guard::Plugin.
@plugin_options ||= nil
return guard(:plugin) { watch(pattern, &action) } unless @plugin_options
@plugin_options[:watchers] << Watcher.new(pattern, action)
end | ruby | {
"resource": ""
} |
q11338 | Guard.Dsl.callback | train | def callback(*args, &block)
@plugin_options ||= nil
fail "callback must be called within a guard block" unless @plugin_options
block, events = if args.size > 1
# block must be the first argument in that case, the
# yielded block is ignored
args
else
[block, args[0]]
end
@plugin_options[:callbacks] << { events: events, listener: block }
end | ruby | {
"resource": ""
} |
q11339 | Guard.Dsl.logger | train | def logger(options)
if options[:level]
options[:level] = options[:level].to_sym
unless [:debug, :info, :warn, :error].include? options[:level]
UI.warning(format(WARN_INVALID_LOG_LEVEL, options[:level]))
options.delete :level
end
end
if options[:only] && options[:except]
UI.warning WARN_INVALID_LOG_OPTIONS
options.delete :only
options.delete :except
end
# Convert the :only and :except options to a regular expression
[:only, :except].each do |name|
next unless options[name]
list = [].push(options[name]).flatten.map do |plugin|
Regexp.escape(plugin.to_s)
end
options[name] = Regexp.new(list.join("|"), Regexp::IGNORECASE)
end
UI.options = UI.options.merge(options)
end | ruby | {
"resource": ""
} |
q11340 | Guard.Dsl.directories | train | def directories(directories)
directories.each do |dir|
fail "Directory #{dir.inspect} does not exist!" unless Dir.exist?(dir)
end
Guard.state.session.watchdirs = directories
end | ruby | {
"resource": ""
} |
q11341 | Guard.Commander.start | train | def start(options = {})
setup(options)
UI.debug "Guard starts all plugins"
Runner.new.run(:start)
listener.start
watched = Guard.state.session.watchdirs.join("', '")
UI.info "Guard is now watching at '#{ watched }'"
exitcode = 0
begin
while interactor.foreground != :exit
Guard.queue.process while Guard.queue.pending?
end
rescue Interrupt
rescue SystemExit => e
exitcode = e.status
end
exitcode
ensure
stop
end | ruby | {
"resource": ""
} |
q11342 | Guard.Commander.run_all | train | def run_all(scopes = {})
UI.clear(force: true)
UI.action_with_scopes("Run", scopes)
Runner.new.run(:run_all, scopes)
end | ruby | {
"resource": ""
} |
q11343 | Guard.Commander.pause | train | def pause(expected = nil)
paused = listener.paused?
states = { paused: true, unpaused: false, toggle: !paused }
pause = states[expected || :toggle]
fail ArgumentError, "invalid mode: #{expected.inspect}" if pause.nil?
return if pause == paused
listener.public_send(pause ? :pause : :start)
UI.info "File event handling has been #{pause ? 'paused' : 'resumed'}"
end | ruby | {
"resource": ""
} |
q11344 | Guard.DslDescriber.show | train | def show
# collect metadata
groups = Guard.state.session.groups.all
objects = []
empty_plugin = OpenStruct.new
empty_plugin.options = [["", nil]]
groups.each do |group|
plugins = Array(Guard.state.session.plugins.all(group: group.name))
plugins = [empty_plugin] if plugins.empty?
plugins.each do |plugin|
options = plugin.options
options = [["", nil]] if options.empty?
options.each do |option, raw_value|
value = raw_value.nil? ? "" : raw_value.inspect
objects << [group.title, plugin.title, option.to_s, value]
end
end
end
# presentation
rows = []
prev_group = prev_plugin = prev_option = prev_value = nil
objects.each do |group, plugin, option, value|
group_changed = prev_group != group
plugin_changed = (prev_plugin != plugin || group_changed)
rows << :split if group_changed || plugin_changed
rows << {
Group: group_changed ? group : "",
Plugin: plugin_changed ? plugin : "",
Option: option,
Value: value
}
prev_group = group
prev_plugin = plugin
prev_option = option
prev_value = value
end
# render
Formatador.display_compact_table(
rows.drop(1),
[:Group, :Plugin, :Option, :Value]
)
end | ruby | {
"resource": ""
} |
q11345 | Guard.DslDescriber.notifiers | train | def notifiers
supported = Notifier.supported
Notifier.connect(notify: true, silent: true)
detected = Notifier.detected
Notifier.disconnect
detected_names = detected.map { |item| item[:name] }
final_rows = supported.each_with_object([]) do |(name, _), rows|
available = detected_names.include?(name) ? "✔" : "✘"
notifier = detected.detect { |n| n[:name] == name }
used = notifier ? "✔" : "✘"
options = notifier ? notifier[:options] : {}
if options.empty?
rows << :split
_add_row(rows, name, available, used, "", "")
else
options.each_with_index do |(option, value), index|
if index == 0
rows << :split
_add_row(rows, name, available, used, option.to_s, value.inspect)
else
_add_row(rows, "", "", "", option.to_s, value.inspect)
end
end
end
rows
end
Formatador.display_compact_table(
final_rows.drop(1),
[:Name, :Available, :Used, :Option, :Value]
)
end | ruby | {
"resource": ""
} |
q11346 | Guard.Runner.run | train | def run(task, scope_hash = {})
Lumberjack.unit_of_work do
items = Guard.state.scope.grouped_plugins(scope_hash || {})
items.each do |_group, plugins|
_run_group_plugins(plugins) do |plugin|
_supervise(plugin, task) if plugin.respond_to?(task)
end
end
end
end | ruby | {
"resource": ""
} |
q11347 | Guard.Runner.run_on_changes | train | def run_on_changes(modified, added, removed)
types = {
MODIFICATION_TASKS => modified,
ADDITION_TASKS => added,
REMOVAL_TASKS => removed
}
UI.clearable
Guard.state.scope.grouped_plugins.each do |_group, plugins|
_run_group_plugins(plugins) do |plugin|
UI.clear
types.each do |tasks, unmatched_paths|
next if unmatched_paths.empty?
match_result = Watcher.match_files(plugin, unmatched_paths)
next if match_result.empty?
task = tasks.detect { |meth| plugin.respond_to?(meth) }
_supervise(plugin, task, match_result) if task
end
end
end
end | ruby | {
"resource": ""
} |
q11348 | Guard.Runner._supervise | train | def _supervise(plugin, task, *args)
catch self.class.stopping_symbol_for(plugin) do
plugin.hook("#{ task }_begin", *args)
result = UI.options.with_progname(plugin.class.name) do
begin
plugin.send(task, *args)
rescue Interrupt
throw(:task_has_failed)
end
end
plugin.hook("#{ task }_end", result)
result
end
rescue ScriptError, StandardError, RuntimeError
UI.error("#{ plugin.class.name } failed to achieve its"\
" <#{ task }>, exception was:" \
"\n#{ $!.class }: #{ $!.message }" \
"\n#{ $!.backtrace.join("\n") }")
Guard.state.session.plugins.remove(plugin)
UI.info("\n#{ plugin.class.name } has just been fired")
$!
end | ruby | {
"resource": ""
} |
q11349 | HTTP.Response.content_length | train | def content_length
# http://greenbytes.de/tech/webdav/rfc7230.html#rfc.section.3.3.3
# Clause 3: "If a message is received with both a Transfer-Encoding
# and a Content-Length header field, the Transfer-Encoding overrides the Content-Length.
return nil if @headers.include?(Headers::TRANSFER_ENCODING)
value = @headers[Headers::CONTENT_LENGTH]
return nil unless value
begin
Integer(value)
rescue ArgumentError
nil
end
end | ruby | {
"resource": ""
} |
q11350 | HTTP.Connection.readpartial | train | def readpartial(size = BUFFER_SIZE)
return unless @pending_response
chunk = @parser.read(size)
return chunk if chunk
finished = (read_more(size) == :eof) || @parser.finished?
chunk = @parser.read(size)
finish_response if finished
chunk.to_s
end | ruby | {
"resource": ""
} |
q11351 | HTTP.Connection.start_tls | train | def start_tls(req, options)
return unless req.uri.https? && !failed_proxy_connect?
ssl_context = options.ssl_context
unless ssl_context
ssl_context = OpenSSL::SSL::SSLContext.new
ssl_context.set_params(options.ssl || {})
end
@socket.start_tls(req.uri.host, options.ssl_socket_class, ssl_context)
end | ruby | {
"resource": ""
} |
q11352 | HTTP.Connection.send_proxy_connect_request | train | def send_proxy_connect_request(req)
return unless req.uri.https? && req.using_proxy?
@pending_request = true
req.connect_using_proxy @socket
@pending_request = false
@pending_response = true
read_headers!
@proxy_response_headers = @parser.headers
if @parser.status_code != 200
@failed_proxy_connect = true
return
end
@parser.reset
@pending_response = false
end | ruby | {
"resource": ""
} |
q11353 | HTTP.Connection.set_keep_alive | train | def set_keep_alive
return @keep_alive = false unless @persistent
@keep_alive =
case @parser.http_version
when HTTP_1_0 # HTTP/1.0 requires opt in for Keep Alive
@parser.headers[Headers::CONNECTION] == KEEP_ALIVE
when HTTP_1_1 # HTTP/1.1 is opt-out
@parser.headers[Headers::CONNECTION] != CLOSE
else # Anything else we assume doesn't supportit
false
end
end | ruby | {
"resource": ""
} |
q11354 | HTTP.Connection.read_more | train | def read_more(size)
return if @parser.finished?
value = @socket.readpartial(size, @buffer)
if value == :eof
@parser << ""
:eof
elsif value
@parser << value
end
rescue IOError, SocketError, SystemCallError => ex
raise ConnectionError, "error reading from socket: #{ex}", ex.backtrace
end | ruby | {
"resource": ""
} |
q11355 | HTTP.Chainable.via | train | def via(*proxy)
proxy_hash = {}
proxy_hash[:proxy_address] = proxy[0] if proxy[0].is_a?(String)
proxy_hash[:proxy_port] = proxy[1] if proxy[1].is_a?(Integer)
proxy_hash[:proxy_username] = proxy[2] if proxy[2].is_a?(String)
proxy_hash[:proxy_password] = proxy[3] if proxy[3].is_a?(String)
proxy_hash[:proxy_headers] = proxy[2] if proxy[2].is_a?(Hash)
proxy_hash[:proxy_headers] = proxy[4] if proxy[4].is_a?(Hash)
raise(RequestError, "invalid HTTP proxy: #{proxy_hash}") unless (2..5).cover?(proxy_hash.keys.size)
branch default_options.with_proxy(proxy_hash)
end | ruby | {
"resource": ""
} |
q11356 | HTTP.Chainable.basic_auth | train | def basic_auth(opts)
user = opts.fetch :user
pass = opts.fetch :pass
auth("Basic " + Base64.strict_encode64("#{user}:#{pass}"))
end | ruby | {
"resource": ""
} |
q11357 | HTTP.Client.request | train | def request(verb, uri, opts = {}) # rubocop:disable Style/OptionHash
opts = @default_options.merge(opts)
req = build_request(verb, uri, opts)
res = perform(req, opts)
return res unless opts.follow
Redirector.new(opts.follow).perform(req, res) do |request|
perform(request, opts)
end
end | ruby | {
"resource": ""
} |
q11358 | HTTP.Client.build_request | train | def build_request(verb, uri, opts = {}) # rubocop:disable Style/OptionHash
opts = @default_options.merge(opts)
uri = make_request_uri(uri, opts)
headers = make_request_headers(opts)
body = make_request_body(opts, headers)
req = HTTP::Request.new(
:verb => verb,
:uri => uri,
:uri_normalizer => opts.feature(:normalize_uri)&.normalizer,
:proxy => opts.proxy,
:headers => headers,
:body => body
)
opts.features.inject(req) do |request, (_name, feature)|
feature.wrap_request(request)
end
end | ruby | {
"resource": ""
} |
q11359 | HTTP.Client.verify_connection! | train | def verify_connection!(uri)
if default_options.persistent? && uri.origin != default_options.persistent
raise StateError, "Persistence is enabled for #{default_options.persistent}, but we got #{uri.origin}"
# We re-create the connection object because we want to let prior requests
# lazily load the body as long as possible, and this mimics prior functionality.
elsif @connection && (!@connection.keep_alive? || @connection.expired?)
close
# If we get into a bad state (eg, Timeout.timeout ensure being killed)
# close the connection to prevent potential for mixed responses.
elsif @state == :dirty
close
end
end | ruby | {
"resource": ""
} |
q11360 | HTTP.Client.make_request_uri | train | def make_request_uri(uri, opts)
uri = uri.to_s
if default_options.persistent? && uri !~ HTTP_OR_HTTPS_RE
uri = "#{default_options.persistent}#{uri}"
end
uri = HTTP::URI.parse uri
if opts.params && !opts.params.empty?
uri.query_values = uri.query_values(Array).to_a.concat(opts.params.to_a)
end
# Some proxies (seen on WEBRick) fail if URL has
# empty path (e.g. `http://example.com`) while it's RFC-complaint:
# http://tools.ietf.org/html/rfc1738#section-3.1
uri.path = "/" if uri.path.empty?
uri
end | ruby | {
"resource": ""
} |
q11361 | HTTP.Client.make_request_body | train | def make_request_body(opts, headers)
case
when opts.body
opts.body
when opts.form
form = HTTP::FormData.create opts.form
headers[Headers::CONTENT_TYPE] ||= form.content_type
form
when opts.json
body = MimeType[:json].encode opts.json
headers[Headers::CONTENT_TYPE] ||= "application/json; charset=#{body.encoding.name}"
body
end
end | ruby | {
"resource": ""
} |
q11362 | HTTP.Headers.delete | train | def delete(name)
name = normalize_header name.to_s
@pile.delete_if { |k, _| k == name }
end | ruby | {
"resource": ""
} |
q11363 | HTTP.Headers.add | train | def add(name, value)
name = normalize_header name.to_s
Array(value).each { |v| @pile << [name, validate_value(v)] }
end | ruby | {
"resource": ""
} |
q11364 | HTTP.Headers.get | train | def get(name)
name = normalize_header name.to_s
@pile.select { |k, _| k == name }.map { |_, v| v }
end | ruby | {
"resource": ""
} |
q11365 | HTTP.Headers.include? | train | def include?(name)
name = normalize_header name.to_s
@pile.any? { |k, _| k == name }
end | ruby | {
"resource": ""
} |
q11366 | HTTP.Headers.merge! | train | def merge!(other)
self.class.coerce(other).to_h.each { |name, values| set name, values }
end | ruby | {
"resource": ""
} |
q11367 | HTTP.Headers.normalize_header | train | def normalize_header(name)
return name if name =~ CANONICAL_NAME_RE
normalized = name.split(/[\-_]/).each(&:capitalize!).join("-")
return normalized if normalized =~ COMPLIANT_NAME_RE
raise HeaderError, "Invalid HTTP header field name: #{name.inspect}"
end | ruby | {
"resource": ""
} |
q11368 | HTTP.Headers.validate_value | train | def validate_value(value)
v = value.to_s
return v unless v.include?("\n")
raise HeaderError, "Invalid HTTP header field value: #{v.inspect}"
end | ruby | {
"resource": ""
} |
q11369 | HTTP.Redirector.redirect_to | train | def redirect_to(uri)
raise StateError, "no Location header in redirect" unless uri
verb = @request.verb
code = @response.status.code
if UNSAFE_VERBS.include?(verb) && STRICT_SENSITIVE_CODES.include?(code)
raise StateError, "can't follow #{@response.status} redirect" if @strict
verb = :get
end
verb = :get if !SEE_OTHER_ALLOWED_VERBS.include?(verb) && 303 == code
@request.redirect(uri, verb)
end | ruby | {
"resource": ""
} |
q11370 | HTTP.Request.stream | train | def stream(socket)
include_proxy_headers if using_proxy? && !@uri.https?
Request::Writer.new(socket, body, headers, headline).stream
end | ruby | {
"resource": ""
} |
q11371 | HTTP.Request.proxy_connect_headers | train | def proxy_connect_headers
connect_headers = HTTP::Headers.coerce(
Headers::HOST => headers[Headers::HOST],
Headers::USER_AGENT => headers[Headers::USER_AGENT]
)
connect_headers[Headers::PROXY_AUTHORIZATION] = proxy_authorization_header if using_authenticated_proxy?
connect_headers.merge!(proxy[:proxy_headers]) if proxy.key?(:proxy_headers)
connect_headers
end | ruby | {
"resource": ""
} |
q11372 | Roo.Excelx.column | train | def column(column_number, sheet = nil)
if column_number.is_a?(::String)
column_number = ::Roo::Utils.letter_to_number(column_number)
end
sheet_for(sheet).column(column_number)
end | ruby | {
"resource": ""
} |
q11373 | Roo.Excelx.excelx_format | train | def excelx_format(row, col, sheet = nil)
key = normalize(row, col)
sheet_for(sheet).excelx_format(key)
end | ruby | {
"resource": ""
} |
q11374 | Roo.Excelx.process_zipfile | train | def process_zipfile(zipfilename_or_stream)
@sheet_files = []
unless is_stream?(zipfilename_or_stream)
zip_file = Zip::File.open(zipfilename_or_stream)
else
zip_file = Zip::CentralDirectory.new
zip_file.read_from_stream zipfilename_or_stream
end
process_zipfile_entries zip_file.to_a.sort_by(&:name)
end | ruby | {
"resource": ""
} |
q11375 | Roo.OpenOffice.decrypt_if_necessary | train | def decrypt_if_necessary(
zip_file,
content_entry,
roo_content_xml_path, options
)
# Check if content.xml is encrypted by extracting manifest.xml
# and searching for a manifest:encryption-data element
if (manifest_entry = zip_file.glob('META-INF/manifest.xml').first)
roo_manifest_xml_path = File.join(@tmpdir, 'roo_manifest.xml')
manifest_entry.extract(roo_manifest_xml_path)
manifest = ::Roo::Utils.load_xml(roo_manifest_xml_path)
# XPath search for manifest:encryption-data only for the content.xml
# file
encryption_data = manifest.xpath(
"//manifest:file-entry[@manifest:full-path='content.xml']"\
"/manifest:encryption-data"
).first
# If XPath returns a node, then we know content.xml is encrypted
unless encryption_data.nil?
# Since we know it's encrypted, we check for the password option
# and if it doesn't exist, raise an argument error
password = options[:password]
if !password.nil?
perform_decryption(
encryption_data,
password,
content_entry,
roo_content_xml_path
)
else
fail ArgumentError, 'file is encrypted but password was not supplied'
end
end
else
fail ArgumentError, 'file missing required META-INF/manifest.xml'
end
end | ruby | {
"resource": ""
} |
q11376 | Roo.OpenOffice.perform_decryption | train | def perform_decryption(
encryption_data,
password,
content_entry,
roo_content_xml_path
)
# Extract various expected attributes from the manifest that
# describe the encryption
algorithm_node = encryption_data.xpath('manifest:algorithm').first
key_derivation_node =
encryption_data.xpath('manifest:key-derivation').first
start_key_generation_node =
encryption_data.xpath('manifest:start-key-generation').first
# If we have all the expected elements, then we can perform
# the decryption.
if !algorithm_node.nil? && !key_derivation_node.nil? &&
!start_key_generation_node.nil?
# The algorithm is a URI describing the algorithm used
algorithm = algorithm_node['manifest:algorithm-name']
# The initialization vector is base-64 encoded
iv = Base64.decode64(
algorithm_node['manifest:initialisation-vector']
)
key_derivation_name = key_derivation_node['manifest:key-derivation-name']
iteration_count = key_derivation_node['manifest:iteration-count'].to_i
salt = Base64.decode64(key_derivation_node['manifest:salt'])
# The key is hashed with an algorithm represented by this URI
key_generation_name =
start_key_generation_node[
'manifest:start-key-generation-name'
]
hashed_password = password
if key_generation_name == 'http://www.w3.org/2000/09/xmldsig#sha256'
hashed_password = Digest::SHA256.digest(password)
else
fail ArgumentError, "Unknown key generation algorithm #{key_generation_name}"
end
cipher = find_cipher(
algorithm,
key_derivation_name,
hashed_password,
salt,
iteration_count,
iv
)
begin
decrypted = decrypt(content_entry, cipher)
# Finally, inflate the decrypted stream and overwrite
# content.xml
IO.binwrite(
roo_content_xml_path,
Zlib::Inflate.new(-Zlib::MAX_WBITS).inflate(decrypted)
)
rescue StandardError => error
raise ArgumentError, "Invalid password or other data error: #{error}"
end
else
fail ArgumentError, 'manifest.xml missing encryption-data elements'
end
end | ruby | {
"resource": ""
} |
q11377 | Roo.OpenOffice.find_cipher_key | train | def find_cipher_key(*args)
fail ArgumentError, 'Unknown key derivation name ', args[1] unless args[1] == 'PBKDF2'
::OpenSSL::PKCS5.pbkdf2_hmac_sha1(args[2], args[3], args[4], args[0].key_len)
end | ruby | {
"resource": ""
} |
q11378 | Roo.OpenOffice.decrypt | train | def decrypt(content_entry, cipher)
# Zip::Entry.extract writes a 0-length file when trying
# to extract an encrypted stream, so we read the
# raw bytes based on the offset and lengths
decrypted = ''
File.open(@filename, 'rb') do |zipfile|
zipfile.seek(
content_entry.local_header_offset +
content_entry.calculate_local_header_size
)
total_to_read = content_entry.compressed_size
block_size = 4096
block_size = total_to_read if block_size > total_to_read
while (buffer = zipfile.read(block_size))
decrypted += cipher.update(buffer)
total_to_read -= buffer.length
break if total_to_read == 0
block_size = total_to_read if block_size > total_to_read
end
end
decrypted + cipher.final
end | ruby | {
"resource": ""
} |
q11379 | Roo.OpenOffice.set_cell_values | train | def set_cell_values(sheet, x, y, i, v, value_type, formula, table_cell, str_v, style_name)
key = [y, x + i]
@cell_type[sheet] ||= {}
@cell_type[sheet][key] = value_type.to_sym if value_type
@formula[sheet] ||= {}
if formula
['of:', 'oooc:'].each do |prefix|
if formula[0, prefix.length] == prefix
formula = formula[prefix.length..-1]
end
end
@formula[sheet][key] = formula
end
@cell[sheet] ||= {}
@style[sheet] ||= {}
@style[sheet][key] = style_name
case @cell_type[sheet][key]
when :float
@cell[sheet][key] = (table_cell.attributes['value'].to_s.include?(".") || table_cell.children.first.text.include?(".")) ? v.to_f : v.to_i
when :percentage
@cell[sheet][key] = v.to_f
when :string
@cell[sheet][key] = str_v
when :date
# TODO: if table_cell.attributes['date-value'].size != "XXXX-XX-XX".size
if attribute(table_cell, 'date-value').size != 'XXXX-XX-XX'.size
#-- dann ist noch eine Uhrzeit vorhanden
#-- "1961-11-21T12:17:18"
@cell[sheet][key] = DateTime.parse(attribute(table_cell, 'date-value').to_s)
@cell_type[sheet][key] = :datetime
else
@cell[sheet][key] = table_cell.attributes['date-value']
end
when :time
hms = v.split(':')
@cell[sheet][key] = hms[0].to_i * 3600 + hms[1].to_i * 60 + hms[2].to_i
else
@cell[sheet][key] = v
end
end | ruby | {
"resource": ""
} |
q11380 | Roo.Utils.num_cells_in_range | train | def num_cells_in_range(str)
cells = str.split(':')
return 1 if cells.count == 1
raise ArgumentError.new("invalid range string: #{str}. Supported range format 'A1:B2'") if cells.count != 2
x1, y1 = extract_coordinate(cells[0])
x2, y2 = extract_coordinate(cells[1])
(x2 - (x1 - 1)) * (y2 - (y1 - 1))
end | ruby | {
"resource": ""
} |
q11381 | Solargraph.ApiMap.catalog | train | def catalog bundle
new_map_hash = {}
# Bundle always needs to be merged if it adds or removes sources
merged = (bundle.sources.length == source_map_hash.values.length)
bundle.sources.each do |source|
if source_map_hash.key?(source.filename)
if source_map_hash[source.filename].code == source.code && source_map_hash[source.filename].source.synchronized? && source.synchronized?
new_map_hash[source.filename] = source_map_hash[source.filename]
elsif !source.synchronized?
new_map_hash[source.filename] = source_map_hash[source.filename]
# @todo Smelly instance variable access
new_map_hash[source.filename].instance_variable_set(:@source, source)
else
map = Solargraph::SourceMap.map(source)
if source_map_hash[source.filename].try_merge!(map)
new_map_hash[source.filename] = source_map_hash[source.filename]
else
new_map_hash[source.filename] = map
merged = false
end
end
else
map = Solargraph::SourceMap.map(source)
new_map_hash[source.filename] = map
merged = false
end
end
return self if merged
pins = []
reqs = []
# @param map [SourceMap]
new_map_hash.values.each do |map|
pins.concat map.pins
reqs.concat map.requires.map(&:name)
end
reqs.concat bundle.workspace.config.required
unless bundle.workspace.require_paths.empty?
reqs.delete_if do |r|
result = false
bundle.workspace.require_paths.each do |l|
pn = Pathname.new(bundle.workspace.directory).join(l, "#{r}.rb")
if new_map_hash.keys.include?(pn.to_s)
result = true
break
end
end
result
end
end
yard_map.change(reqs)
new_store = Store.new(pins + yard_map.pins)
@mutex.synchronize {
@cache.clear
@source_map_hash = new_map_hash
@store = new_store
@unresolved_requires = yard_map.unresolved_requires
}
# resolve_method_aliases
self
end | ruby | {
"resource": ""
} |
q11382 | Solargraph.ApiMap.clip_at | train | def clip_at filename, position
position = Position.normalize(position)
SourceMap::Clip.new(self, cursor_at(filename, position))
end | ruby | {
"resource": ""
} |
q11383 | Solargraph.ApiMap.get_constants | train | def get_constants namespace, context = ''
namespace ||= ''
cached = cache.get_constants(namespace, context)
return cached.clone unless cached.nil?
skip = []
result = []
bases = context.split('::')
while bases.length > 0
built = bases.join('::')
fqns = qualify(namespace, built)
visibility = [:public]
visibility.push :private if fqns == context
result.concat inner_get_constants(fqns, visibility, skip)
bases.pop
end
fqns = qualify(namespace, '')
visibility = [:public]
visibility.push :private if fqns == context
result.concat inner_get_constants(fqns, visibility, skip)
cache.set_constants(namespace, context, result)
result
end | ruby | {
"resource": ""
} |
q11384 | Solargraph.ApiMap.qualify | train | def qualify namespace, context = ''
# @todo The return for self might work better elsewhere
return nil if namespace.nil?
return qualify(context) if namespace == 'self'
cached = cache.get_qualified_namespace(namespace, context)
return cached.clone unless cached.nil?
result = if namespace.start_with?('::')
inner_qualify(namespace[2..-1], '', [])
else
inner_qualify(namespace, context, [])
end
cache.set_qualified_namespace(namespace, context, result)
result
end | ruby | {
"resource": ""
} |
q11385 | Solargraph.ApiMap.get_instance_variable_pins | train | def get_instance_variable_pins(namespace, scope = :instance)
result = []
result.concat store.get_instance_variables(namespace, scope)
sc = qualify(store.get_superclass(namespace), namespace)
until sc.nil?
result.concat store.get_instance_variables(sc, scope)
sc = qualify(store.get_superclass(sc), sc)
end
result
end | ruby | {
"resource": ""
} |
q11386 | Solargraph.ApiMap.get_methods | train | def get_methods fqns, scope: :instance, visibility: [:public], deep: true
cached = cache.get_methods(fqns, scope, visibility, deep)
return cached.clone unless cached.nil?
result = []
skip = []
if fqns == ''
# @todo Implement domains
# domains.each do |domain|
# type = ComplexType.parse(domain).first
# result.concat inner_get_methods(type.name, type.scope, [:public], deep, skip)
# end
result.concat inner_get_methods(fqns, :class, visibility, deep, skip)
result.concat inner_get_methods(fqns, :instance, visibility, deep, skip)
result.concat inner_get_methods('Kernel', :instance, visibility, deep, skip)
else
result.concat inner_get_methods(fqns, scope, visibility, deep, skip)
end
# live = live_map.get_methods(fqns, '', scope.to_s, visibility.include?(:private))
# unless live.empty?
# exist = result.map(&:name)
# result.concat live.reject{|p| exist.include?(p.name)}
# end
resolved = resolve_method_aliases(result)
cache.set_methods(fqns, scope, visibility, deep, resolved)
resolved
end | ruby | {
"resource": ""
} |
q11387 | Solargraph.ApiMap.get_complex_type_methods | train | def get_complex_type_methods type, context = '', internal = false
# This method does not qualify the complex type's namespace because
# it can cause conflicts between similar names, e.g., `Foo` vs.
# `Other::Foo`. It still takes a context argument to determine whether
# protected and private methods are visible.
return [] if type.undefined? || type.void?
result = []
if type.duck_type?
type.select(&:duck_type?).each do |t|
result.push Pin::DuckMethod.new(nil, t.tag[1..-1])
end
result.concat get_methods('Object')
else
unless type.nil? || type.name == 'void'
visibility = [:public]
if type.namespace == context || super_and_sub?(type.namespace, context)
visibility.push :protected
visibility.push :private if internal
end
result.concat get_methods(type.namespace, scope: type.scope, visibility: visibility)
end
end
result
end | ruby | {
"resource": ""
} |
q11388 | Solargraph.ApiMap.get_method_stack | train | def get_method_stack fqns, name, scope: :instance
get_methods(fqns, scope: scope, visibility: [:private, :protected, :public]).select{|p| p.name == name}
end | ruby | {
"resource": ""
} |
q11389 | Solargraph.ApiMap.get_path_suggestions | train | def get_path_suggestions path
return [] if path.nil?
result = []
result.concat store.get_path_pins(path)
# if result.empty?
# lp = live_map.get_path_pin(path)
# result.push lp unless lp.nil?
# end
resolve_method_aliases(result)
end | ruby | {
"resource": ""
} |
q11390 | Solargraph.ApiMap.search | train | def search query
rake_yard(store)
found = []
code_object_paths.each do |k|
if found.empty? || (query.include?('.') || query.include?('#')) || !(k.include?('.') || k.include?('#'))
found.push k if k.downcase.include?(query.downcase)
end
end
found
end | ruby | {
"resource": ""
} |
q11391 | Solargraph.ApiMap.document | train | def document path
rake_yard(store)
docs = []
docs.push code_object_at(path) unless code_object_at(path).nil?
docs
end | ruby | {
"resource": ""
} |
q11392 | Solargraph.ApiMap.query_symbols | train | def query_symbols query
result = []
source_map_hash.values.each do |s|
result.concat s.query_symbols(query)
end
result
end | ruby | {
"resource": ""
} |
q11393 | Solargraph.ApiMap.require_extensions | train | def require_extensions
Gem::Specification.all_names.select{|n| n.match(/^solargraph\-[a-z0-9_\-]*?\-ext\-[0-9\.]*$/)}.each do |n|
Solargraph::Logging.logger.info "Loading extension #{n}"
require n.match(/^(solargraph\-[a-z0-9_\-]*?\-ext)\-[0-9\.]*$/)[1]
end
end | ruby | {
"resource": ""
} |
q11394 | Solargraph.ApiMap.prefer_non_nil_variables | train | def prefer_non_nil_variables pins
result = []
nil_pins = []
pins.each do |pin|
if pin.variable? && pin.nil_assignment?
nil_pins.push pin
else
result.push pin
end
end
result + nil_pins
end | ruby | {
"resource": ""
} |
q11395 | Solargraph.ApiMap.super_and_sub? | train | def super_and_sub?(sup, sub)
fqsup = qualify(sup)
cls = qualify(store.get_superclass(sub), sub)
until cls.nil?
return true if cls == fqsup
cls = qualify(store.get_superclass(cls), cls)
end
false
end | ruby | {
"resource": ""
} |
q11396 | Solargraph.Range.contain? | train | def contain? position
position = Position.normalize(position)
return false if position.line < start.line || position.line > ending.line
return false if position.line == start.line && position.character < start.character
return false if position.line == ending.line && position.character > ending.character
true
end | ruby | {
"resource": ""
} |
q11397 | Solargraph.Range.include? | train | def include? position
position = Position.normalize(position)
contain?(position) && !(position.line == start.line && position.character == start.character)
end | ruby | {
"resource": ""
} |
q11398 | Solargraph.Source.tree_at | train | def tree_at(line, column)
# offset = Position.line_char_to_offset(@code, line, column)
position = Position.new(line, column)
stack = []
inner_tree_at @node, position, stack
stack
end | ruby | {
"resource": ""
} |
q11399 | Solargraph.Source.synchronize | train | def synchronize updater
raise 'Invalid synchronization' unless updater.filename == filename
real_code = updater.write(@code)
if real_code == @code
@version = updater.version
return self
end
synced = Source.new(real_code, filename)
if synced.parsed?
synced.version = updater.version
return synced
end
incr_code = updater.repair(@repaired)
synced = Source.new(incr_code, filename)
synced.error_ranges.concat (error_ranges + updater.changes.map(&:range))
synced.code = real_code
synced.version = updater.version
synced
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.