_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3
values | text stringlengths 66 10.5k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q15600 | SimpleNavigation.ItemContainer.level_for_item | train | def level_for_item(navi_key)
return level if self[navi_key]
items.each do |item|
next unless item.sub_navigation
level = item.sub_navigation.level_for_item(navi_key)
return level if level
end
return nil
end | ruby | {
"resource": ""
} |
q15601 | GPGME.KeyCommon.usable_for? | train | def usable_for?(purposes)
unless purposes.kind_of? Array
purposes = [purposes]
end
return false if [:revoked, :expired, :disabled, :invalid].include? trust
return (purposes - capability).empty?
end | ruby | {
"resource": ""
} |
q15602 | GPGME.Data.read | train | def read(length = nil)
if length
GPGME::gpgme_data_read(self, length)
else
buf = String.new
loop do
s = GPGME::gpgme_data_read(self, BLOCK_SIZE)
break unless s
buf << s
end
buf
end
end | ruby | {
"resource": ""
} |
q15603 | GPGME.Data.seek | train | def seek(offset, whence = IO::SEEK_SET)
GPGME::gpgme_data_seek(self, offset, IO::SEEK_SET)
end | ruby | {
"resource": ""
} |
q15604 | GPGME.Data.file_name= | train | def file_name=(file_name)
err = GPGME::gpgme_data_set_file_name(self, file_name)
exc = GPGME::error_to_exception(err)
raise exc if exc
file_name
end | ruby | {
"resource": ""
} |
q15605 | GPGME.Key.delete! | train | def delete!(allow_secret = false)
GPGME::Ctx.new do |ctx|
ctx.delete_key self, allow_secret
end
end | ruby | {
"resource": ""
} |
q15606 | GPGME.Ctx.protocol= | train | def protocol=(proto)
err = GPGME::gpgme_set_protocol(self, proto)
exc = GPGME::error_to_exception(err)
raise exc if exc
proto
end | ruby | {
"resource": ""
} |
q15607 | GPGME.Ctx.keylist_next | train | def keylist_next
rkey = []
err = GPGME::gpgme_op_keylist_next(self, rkey)
exc = GPGME::error_to_exception(err)
raise exc if exc
rkey[0]
end | ruby | {
"resource": ""
} |
q15608 | GPGME.Ctx.keylist_end | train | def keylist_end
err = GPGME::gpgme_op_keylist_end(self)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15609 | GPGME.Ctx.each_key | train | def each_key(pattern = nil, secret_only = false, &block)
keylist_start(pattern, secret_only)
begin
loop { yield keylist_next }
rescue EOFError
# The last key in the list has already been returned.
ensure
keylist_end
end
end | ruby | {
"resource": ""
} |
q15610 | GPGME.Ctx.keys | train | def keys(pattern = nil, secret_only = nil)
keys = []
each_key(pattern, secret_only) do |key|
keys << key
end
keys
end | ruby | {
"resource": ""
} |
q15611 | GPGME.Ctx.get_key | train | def get_key(fingerprint, secret = false)
rkey = []
err = GPGME::gpgme_get_key(self, fingerprint, rkey, secret ? 1 : 0)
exc = GPGME::error_to_exception(err)
raise exc if exc
rkey[0]
end | ruby | {
"resource": ""
} |
q15612 | GPGME.Ctx.import_keys | train | def import_keys(keydata)
err = GPGME::gpgme_op_import(self, keydata)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15613 | GPGME.Ctx.delete_key | train | def delete_key(key, allow_secret = false)
err = GPGME::gpgme_op_delete(self, key, allow_secret ? 1 : 0)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15614 | GPGME.Ctx.edit_key | train | def edit_key(key, editfunc, hook_value = nil, out = Data.new)
err = GPGME::gpgme_op_edit(self, key, editfunc, hook_value, out)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15615 | GPGME.Ctx.edit_card_key | train | def edit_card_key(key, editfunc, hook_value = nil, out = Data.new)
err = GPGME::gpgme_op_card_edit(self, key, editfunc, hook_value, out)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15616 | GPGME.Ctx.verify | train | def verify(sig, signed_text = nil, plain = Data.new)
err = GPGME::gpgme_op_verify(self, sig, signed_text, plain)
exc = GPGME::error_to_exception(err)
raise exc if exc
plain
end | ruby | {
"resource": ""
} |
q15617 | GPGME.Ctx.add_signer | train | def add_signer(*keys)
keys.each do |key|
err = GPGME::gpgme_signers_add(self, key)
exc = GPGME::error_to_exception(err)
raise exc if exc
end
end | ruby | {
"resource": ""
} |
q15618 | GPGME.Ctx.sign | train | def sign(plain, sig = Data.new, mode = GPGME::SIG_MODE_NORMAL)
err = GPGME::gpgme_op_sign(self, plain, sig, mode)
exc = GPGME::error_to_exception(err)
raise exc if exc
sig
end | ruby | {
"resource": ""
} |
q15619 | GPGME.Ctx.encrypt | train | def encrypt(recp, plain, cipher = Data.new, flags = 0)
err = GPGME::gpgme_op_encrypt(self, recp, flags, plain, cipher)
exc = GPGME::error_to_exception(err)
raise exc if exc
cipher
end | ruby | {
"resource": ""
} |
q15620 | GPGME.Crypto.encrypt | train | def encrypt(plain, options = {})
options = @default_options.merge options
plain_data = Data.new(plain)
cipher_data = Data.new(options[:output])
keys = Key.find(:public, options[:recipients])
keys = nil if options[:symmetric]
flags = 0
flags |= GPGME::ENCRYPT_AL... | ruby | {
"resource": ""
} |
q15621 | GPGME.Crypto.decrypt | train | def decrypt(cipher, options = {})
options = @default_options.merge options
plain_data = Data.new(options[:output])
cipher_data = Data.new(cipher)
GPGME::Ctx.new(options) do |ctx|
begin
ctx.decrypt_verify(cipher_data, plain_data)
rescue GPGME::Error::UnsupportedAlgo... | ruby | {
"resource": ""
} |
q15622 | GPGME.Crypto.sign | train | def sign(text, options = {})
options = @default_options.merge options
plain = Data.new(text)
output = Data.new(options[:output])
mode = options[:mode] || GPGME::SIG_MODE_NORMAL
GPGME::Ctx.new(options) do |ctx|
if options[:signer]
signers = Key.find(:secret, options[:... | ruby | {
"resource": ""
} |
q15623 | GPGME.Crypto.verify | train | def verify(sig, options = {})
options = @default_options.merge options
sig = Data.new(sig)
signed_text = Data.new(options[:signed_text])
output = Data.new(options[:output]) unless options[:signed_text]
GPGME::Ctx.new(options) do |ctx|
ctx.verify(sig, signed_text, out... | ruby | {
"resource": ""
} |
q15624 | Grit.Index.add | train | def add(path, data)
path = path.split('/')
filename = path.pop
current = self.tree
path.each do |dir|
current[dir] ||= {}
node = current[dir]
current = node
end
current[filename] = data
end | ruby | {
"resource": ""
} |
q15625 | Grit.Index.write_tree | train | def write_tree(tree = nil, now_tree = nil)
tree = self.tree if !tree
tree_contents = {}
# fill in original tree
now_tree = read_tree(now_tree) if(now_tree && now_tree.is_a?(String))
now_tree.contents.each do |obj|
sha = [obj.id].pack("H*")
k = obj.name
k += '/' if ... | ruby | {
"resource": ""
} |
q15626 | Grit.Tree.content_from_string | train | def content_from_string(repo, text)
mode, type, id, name = text.split(/ |\t/, 4)
case type
when "tree"
Tree.create(repo, :id => id, :mode => mode, :name => name)
when "blob"
Blob.create(repo, :id => id, :mode => mode, :name => name)
when "link"
Blob.crea... | ruby | {
"resource": ""
} |
q15627 | Grit.Tree./ | train | def /(file)
if file =~ /\//
file.split("/").inject(self) { |acc, x| acc/x } rescue nil
else
self.contents.find { |c| c.name == file }
end
end | ruby | {
"resource": ""
} |
q15628 | Grit.Submodule.create_initialize | train | def create_initialize(repo, atts)
@repo = repo
atts.each do |k, v|
instance_variable_set("@#{k}".to_sym, v)
end
self
end | ruby | {
"resource": ""
} |
q15629 | Grit.Submodule.url | train | def url(ref)
config = self.class.config(@repo, ref)
lookup = config.keys.inject({}) do |acc, key|
id = config[key]['id']
acc[id] = config[key]['url']
acc
end
lookup[@id]
end | ruby | {
"resource": ""
} |
q15630 | Grit.Status.diff_files | train | def diff_files
hsh = {}
@base.git.diff_files.split("\n").each do |line|
(info, file) = line.split("\t")
(mode_src, mode_dest, sha_src, sha_dest, type) = info.split
hsh[file] = {:path => file, :mode_file => mode_src.to_s[1, 7], :mode_index => mode_dest,
... | ruby | {
"resource": ""
} |
q15631 | Grit.Actor.output | train | def output(time)
offset = time.utc_offset / 60
"%s <%s> %d %+.2d%.2d" % [
@name,
@email || "null",
time.to_i,
offset / 60,
offset.abs % 60]
end | ruby | {
"resource": ""
} |
q15632 | Grit.Repo.recent_tag_name | train | def recent_tag_name(committish = nil, options = {})
value = git.describe({:always => true}.update(options), committish.to_s).to_s.strip
value.size.zero? ? nil : value
end | ruby | {
"resource": ""
} |
q15633 | Grit.Repo.refs_list | train | def refs_list
refs = self.git.for_each_ref
refarr = refs.split("\n").map do |line|
shatype, ref = line.split("\t")
sha, type = shatype.split(' ')
[ref, sha, type]
end
refarr
end | ruby | {
"resource": ""
} |
q15634 | Grit.Repo.commit_deltas_from | train | def commit_deltas_from(other_repo, ref = "master", other_ref = "master")
# TODO: we should be able to figure out the branch point, rather than
# rev-list'ing the whole thing
repo_refs = self.git.rev_list({}, ref).strip.split("\n")
other_repo_refs = other_repo.git.rev_list({}, other_ref).st... | ruby | {
"resource": ""
} |
q15635 | Grit.Repo.log | train | def log(commit = 'master', path = nil, options = {})
default_options = {:pretty => "raw"}
actual_options = default_options.merge(options)
arg = path ? [commit, '--', path] : [commit]
commits = self.git.log(actual_options, *arg)
Commit.list_from_string(self, commits)
end | ruby | {
"resource": ""
} |
q15636 | Grit.Repo.alternates= | train | def alternates=(alts)
alts.each do |alt|
unless File.exist?(alt)
raise "Could not set alternates. Alternate path #{alt} must exist"
end
end
if alts.empty?
self.git.fs_write('objects/info/alternates', '')
else
self.git.fs_write('objects/info/alternates',... | ruby | {
"resource": ""
} |
q15637 | Grit.Commit.diffs | train | def diffs(options = {})
if parents.empty?
show
else
self.class.diff(@repo, parents.first.id, @id, [], options)
end
end | ruby | {
"resource": ""
} |
q15638 | Grit.Git.fs_write | train | def fs_write(file, contents)
path = File.join(self.git_dir, file)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, 'w') do |f|
f.write(contents)
end
end | ruby | {
"resource": ""
} |
q15639 | Grit.Git.fs_move | train | def fs_move(from, to)
FileUtils.mv(File.join(self.git_dir, from), File.join(self.git_dir, to))
end | ruby | {
"resource": ""
} |
q15640 | Grit.Git.fs_chmod | train | def fs_chmod(mode, file = '/')
FileUtils.chmod_R(mode, File.join(self.git_dir, file))
end | ruby | {
"resource": ""
} |
q15641 | Grit.Git.check_applies | train | def check_applies(options={}, head_sha=nil, applies_sha=nil)
options, head_sha, applies_sha = {}, options, head_sha if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GIT_INDEX_FILE' =... | ruby | {
"resource": ""
} |
q15642 | Grit.Git.get_patch | train | def get_patch(options={}, applies_sha=nil)
options, applies_sha = {}, options if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GIT_INDEX_FILE' => git_index)
native(:diff, opti... | ruby | {
"resource": ""
} |
q15643 | Grit.Git.apply_patch | train | def apply_patch(options={}, head_sha=nil, patch=nil)
options, head_sha, patch = {}, options, head_sha if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
options[:raise] = true
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GI... | ruby | {
"resource": ""
} |
q15644 | Grit.Git.native | train | def native(cmd, options = {}, *args, &block)
args = args.first if args.size == 1 && args[0].is_a?(Array)
args.map! { |a| a.to_s }
args.reject! { |a| a.empty? }
# special option arguments
env = options.delete(:env) || {}
raise_errors = options.delete(:raise)
process_info... | ruby | {
"resource": ""
} |
q15645 | Grit.Git.run | train | def run(prefix, cmd, postfix, options, args, &block)
timeout = options.delete(:timeout) rescue nil
timeout = true if timeout.nil?
base = options.delete(:base) rescue nil
base = true if base.nil?
if input = options.delete(:input)
block = lambda { |stdin| stdin.write(inpu... | ruby | {
"resource": ""
} |
q15646 | Grit.Git.transform_options | train | def transform_options(options)
args = []
options.keys.each do |opt|
if opt.to_s.size == 1
if options[opt] == true
args << "-#{opt}"
elsif options[opt] == false
# ignore
else
val = options.delete(opt)
args << "-#{opt.to_s} ... | ruby | {
"resource": ""
} |
q15647 | Pact.RequestDecorator.body | train | def body
if content_type_is_form && request.body.is_a?(Hash)
URI.encode_www_form convert_hash_body_to_array_of_arrays
else
Pact::Reification.from_term(request.body)
end
end | ruby | {
"resource": ""
} |
q15648 | Pact.RequestDecorator.convert_hash_body_to_array_of_arrays | train | def convert_hash_body_to_array_of_arrays
arrays = []
request.body.keys.each do | key |
[*request.body[key]].each do | value |
arrays << [key, value]
end
end
Pact::Reification.from_term(arrays)
end | ruby | {
"resource": ""
} |
q15649 | ForemanTasks.ProxySelector.select_by_jobs_count | train | def select_by_jobs_count(proxies)
exclude = @tasks.keys + @offline
@tasks.merge!(get_counts(proxies - exclude))
next_proxy = @tasks.select { |proxy, _| proxies.include?(proxy) }
.min_by { |_, job_count| job_count }.try(:first)
@tasks[next_proxy] += 1 if next_proxy.presen... | ruby | {
"resource": ""
} |
q15650 | ForemanTasks.Lock.colliding_locks | train | def colliding_locks
task_ids = task.self_and_parents.map(&:id)
colliding_locks_scope = Lock.active.where(Lock.arel_table[:task_id].not_in(task_ids))
colliding_locks_scope = colliding_locks_scope.where(name: name,
resource_id: resourc... | ruby | {
"resource": ""
} |
q15651 | ForemanTasks.RemoteTask.trigger | train | def trigger(proxy_action_name, input)
response = begin
proxy.trigger_task(proxy_action_name, input).merge('result' => 'success')
rescue RestClient::Exception => e
logger.warn "Could not trigger task on the smart proxy: #{e.message}"
{}
... | ruby | {
"resource": ""
} |
q15652 | Actions.ProxyAction.fill_continuous_output | train | def fill_continuous_output(continuous_output)
failed_proxy_tasks.each do |failure_data|
message = _('Initialization error: %s') %
"#{failure_data[:exception_class]} - #{failure_data[:exception_message]}"
continuous_output.add_output(message, 'debug', failure_data[:timestamp])
... | ruby | {
"resource": ""
} |
q15653 | Actions.RecurringAction.trigger_repeat | train | def trigger_repeat(execution_plan)
request_id = ::Logging.mdc['request']
::Logging.mdc['request'] = SecureRandom.uuid
if execution_plan.delay_record && recurring_logic_task_group
args = execution_plan.delay_record.args
logic = recurring_logic_task_group.recurring_logic
logic.tr... | ruby | {
"resource": ""
} |
q15654 | BitBucket.Repos::Following.followers | train | def followers(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
response = get_request("/1.0/repositories/#{user}/#{repo.downcase}/followers/", params)
return response unless block... | ruby | {
"resource": ""
} |
q15655 | BitBucket.Repos::Following.followed | train | def followed(*args)
params = args.extract_options!
normalize! params
response = get_request("/1.0/user/follows", params)
return response unless block_given?
response.each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15656 | BitBucket.Repos::Components.get | train | def get(user_name, repo_name, component_id, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
get_request("/2.0/repositories/#{user}/#{repo.downcase}/components/#{component_id}", params)
end | ruby | {
"resource": ""
} |
q15657 | BitBucket.Repos::Services.create | train | def create(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
assert_required_keys(REQUIRED_KEY_PARAM_NAMES, params)
post_request("/1.0/repositories/#{user}/#{repo.downcase}/service... | ruby | {
"resource": ""
} |
q15658 | BitBucket.Repos::Services.edit | train | def edit(user_name, repo_name, service_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of(service_id)
normalize! params
put_request("/1.0/repositories/#{user}/#{repo.downcase}/services/#{service... | ruby | {
"resource": ""
} |
q15659 | BitBucket.Repos::DefaultReviewers.get | train | def get(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
get_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15660 | BitBucket.Repos::DefaultReviewers.add | train | def add(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
put_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15661 | BitBucket.Repos::DefaultReviewers.remove | train | def remove(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
delete_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15662 | BitBucket.Issues::Components.get | train | def get(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components... | ruby | {
"resource": ""
} |
q15663 | BitBucket.Issues::Components.update | train | def update(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
filter! VALID_COMPONENT_INPUTS, params
assert_required_keys(VALI... | ruby | {
"resource": ""
} |
q15664 | BitBucket.Issues::Components.delete | train | def delete(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
delete_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/com... | ruby | {
"resource": ""
} |
q15665 | BitBucket.Repos::Keys.create | train | def create(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_KEY_PARAM_NAMES, params
assert_required_keys(VALID_KEY_PARAM_NAMES, params)
options = { headers: { ... | ruby | {
"resource": ""
} |
q15666 | BitBucket.Repos::Keys.edit | train | def edit(user_name, repo_name, key_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of key_id
normalize! params
filter! VALID_KEY_PARAM_NAMES, params
put_request("/1.0/repositories/#{user}/... | ruby | {
"resource": ""
} |
q15667 | BitBucket.Teams.members | train | def members(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/members")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15668 | BitBucket.Teams.followers | train | def followers(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/followers")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15669 | BitBucket.Teams.following | train | def following(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/following")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15670 | BitBucket.Teams.repos | train | def repos(team_name)
response = get_request("/2.0/repositories/#{team_name.to_s}")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15671 | BitBucket.Issues.list_repo | train | def list_repo(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_ISSUE_PARAM_NAMES, params
# _merge_mime_type(:issue, params)
assert_valid_values(VALID_ISSUE_PA... | ruby | {
"resource": ""
} |
q15672 | BitBucket.Issues.create | train | def create(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
_merge_user_into_params!(params) unless params.has_key?('user')
# _merge_mime_type(:issue, params)
filter! VALID... | ruby | {
"resource": ""
} |
q15673 | BitBucket.API.method_missing | train | def method_missing(method, *args, &block) # :nodoc:
case method.to_s
when /^(.*)\?$/
return !self.send($1.to_s).nil?
when /^clear_(.*)$/
self.send("#{$1.to_s}=", nil)
else
super
end
end | ruby | {
"resource": ""
} |
q15674 | Down.Utils.filename_from_content_disposition | train | def filename_from_content_disposition(content_disposition)
content_disposition = content_disposition.to_s
escaped_filename =
content_disposition[/filename\*=UTF-8''(\S+)/, 1] ||
content_disposition[/filename="([^"]*)"/, 1] ||
content_disposition[/filename=(\S+)/, 1]
filename ... | ruby | {
"resource": ""
} |
q15675 | Down.Wget.download | train | def download(url, *args, max_size: nil, content_length_proc: nil, progress_proc: nil, destination: nil, **options)
io = open(url, *args, **options, rewindable: false)
content_length_proc.call(io.size) if content_length_proc && io.size
if max_size && io.size && io.size > max_size
raise Down::... | ruby | {
"resource": ""
} |
q15676 | Down.Wget.open | train | def open(url, *args, rewindable: true, **options)
arguments = generate_command(url, *args, **options)
command = Down::Wget::Command.execute(arguments)
# Wrap the wget command output in an IO-like object.
output = Down::ChunkedIO.new(
chunks: command.enum_for(:output),
on_cl... | ruby | {
"resource": ""
} |
q15677 | Down.Wget.generate_command | train | def generate_command(url, *args, **options)
command = %W[wget --no-verbose --save-headers -O -]
options = @arguments.grep(Hash).inject({}, :merge).merge(options)
args = @arguments.grep(->(o){!o.is_a?(Hash)}) + args
(args + options.to_a).each do |option, value|
if option.is_a?(String... | ruby | {
"resource": ""
} |
q15678 | Down.Http.download | train | def download(url, max_size: nil, progress_proc: nil, content_length_proc: nil, destination: nil, **options, &block)
response = request(url, **options, &block)
content_length_proc.call(response.content_length) if content_length_proc && response.content_length
if max_size && response.content_length &&... | ruby | {
"resource": ""
} |
q15679 | Down.Http.open | train | def open(url, rewindable: true, **options, &block)
response = request(url, **options, &block)
Down::ChunkedIO.new(
chunks: enum_for(:stream_body, response),
size: response.content_length,
encoding: response.content_type.charset,
rewindable: rewindable,
da... | ruby | {
"resource": ""
} |
q15680 | Down.Http.stream_body | train | def stream_body(response, &block)
response.body.each(&block)
rescue => exception
request_error!(exception)
ensure
response.connection.close unless @client.persistent?
end | ruby | {
"resource": ""
} |
q15681 | Down.NetHttp.download | train | def download(url, options = {})
options = @options.merge(options)
max_size = options.delete(:max_size)
max_redirects = options.delete(:max_redirects)
progress_proc = options.delete(:progress_proc)
content_length_proc = options.delete(:content_length_proc)
dest... | ruby | {
"resource": ""
} |
q15682 | Down.NetHttp.ensure_uri | train | def ensure_uri(url, allow_relative: false)
begin
uri = URI(url)
rescue URI::InvalidURIError => exception
raise Down::InvalidUrl, exception.message
end
unless allow_relative && uri.relative?
raise Down::InvalidUrl, "URL scheme needs to be http or https: #{uri}" unless uri... | ruby | {
"resource": ""
} |
q15683 | Down.NetHttp.addressable_normalize | train | def addressable_normalize(url)
addressable_uri = Addressable::URI.parse(url)
addressable_uri.normalize.to_s
end | ruby | {
"resource": ""
} |
q15684 | Down.Backend.download_result | train | def download_result(tempfile, destination)
return tempfile unless destination
tempfile.close # required for Windows
FileUtils.mv tempfile.path, destination
nil
end | ruby | {
"resource": ""
} |
q15685 | Xirr.NewtonMethod.xirr | train | def xirr guess, options
func = Function.new(self, :xnpv)
rate = [guess || cf.irr_guess]
begin
nlsolve(func, rate)
(rate[0] <= -1 || rate[0].nan?) ? nil : rate[0].round(Xirr::PRECISION)
# rate[0].round(Xirr::PRECISION)
rescue
nil
end
end | ruby | {
"resource": ""
} |
q15686 | Xirr.Bisection.xirr | train | def xirr(midpoint, options)
# Initial values
left = [BigDecimal.new(-0.99999999, Xirr::PRECISION), cf.irr_guess].min
right = [BigDecimal.new(9.99999999, Xirr::PRECISION), cf.irr_guess + 1].max
@original_right = right
midpoint ||= cf.irr_guess
midpoint, runs = loop_rates(left, midp... | ruby | {
"resource": ""
} |
q15687 | Xirr.Base.xnpv | train | def xnpv(rate)
cf.inject(0) do |sum, t|
sum + (xnpv_c rate, t.amount, periods_from_start(t.date))
end
end | ruby | {
"resource": ""
} |
q15688 | Rake.ExtensionTask.define_staging_file_tasks | train | def define_staging_file_tasks(files, lib_path, stage_path, platf, ruby_ver)
files.each do |gem_file|
# ignore directories and the binary extension
next if File.directory?(gem_file) || gem_file == "#{lib_path}/#{binary(platf)}"
stage_file = "#{stage_path}/#{gem_file}"
# copy each f... | ruby | {
"resource": ""
} |
q15689 | Rake.JavaExtensionTask.java_extdirs_arg | train | def java_extdirs_arg
extdirs = Java::java.lang.System.getProperty('java.ext.dirs') rescue nil
extdirs = ENV['JAVA_EXT_DIR'] unless extdirs
java_extdir = extdirs.nil? ? "" : "-extdirs \"#{extdirs}\""
end | ruby | {
"resource": ""
} |
q15690 | WinRM.Connection.shell | train | def shell(shell_type, shell_opts = {})
shell = shell_factory.create_shell(shell_type, shell_opts)
if block_given?
begin
yield shell
ensure
shell.close
end
else
shell
end
end | ruby | {
"resource": ""
} |
q15691 | WinRM.Connection.run_wql | train | def run_wql(wql, namespace = 'root/cimv2/*', &block)
query = WinRM::WSMV::WqlQuery.new(transport, @connection_opts, wql, namespace)
query.process_response(transport.send_request(query.build), &block)
end | ruby | {
"resource": ""
} |
q15692 | Honeybadger.Config.includes_token? | train | def includes_token?(obj, value)
return false unless obj.kind_of?(Array)
obj.map(&:to_sym).include?(value.to_sym)
end | ruby | {
"resource": ""
} |
q15693 | Honeybadger.Notice.ignore_by_class? | train | def ignore_by_class?(ignored_class = nil)
@ignore_by_class ||= Proc.new do |ignored_class|
case error_class
when (ignored_class.respond_to?(:name) ? ignored_class.name : ignored_class)
true
else
exception && ignored_class.is_a?(Class) && exception.class < ignored_class
... | ruby | {
"resource": ""
} |
q15694 | Honeybadger.Notice.construct_request_hash | train | def construct_request_hash
request = {
url: url,
component: component,
action: action,
params: params,
session: session,
cgi_data: cgi_data,
sanitizer: request_sanitizer
}
request.delete_if {|k,v| config.excluded_request_keys.include?(k) }
... | ruby | {
"resource": ""
} |
q15695 | Honeybadger.Notice.exception_context | train | def exception_context(exception)
# This extra check exists because the exception itself is not expected to
# convert to a hash.
object = exception if exception.respond_to?(:to_honeybadger_context)
object ||= {}.freeze
Context(object)
end | ruby | {
"resource": ""
} |
q15696 | Honeybadger.Notice.parse_backtrace | train | def parse_backtrace(backtrace)
Backtrace.parse(
backtrace,
filters: construct_backtrace_filters(opts),
config: config,
source_radius: config[:'exceptions.source_radius']
).to_a
end | ruby | {
"resource": ""
} |
q15697 | Honeybadger.Notice.exception_cause | train | def exception_cause(exception)
e = exception
if e.respond_to?(:cause) && e.cause && e.cause.is_a?(Exception)
e.cause
elsif e.respond_to?(:original_exception) && e.original_exception && e.original_exception.is_a?(Exception)
e.original_exception
elsif e.respond_to?(:continued_excep... | ruby | {
"resource": ""
} |
q15698 | Honeybadger.Notice.unwrap_causes | train | def unwrap_causes(cause)
causes, c, i = [], cause, 0
while c && i < MAX_EXCEPTION_CAUSES
causes << {
class: c.class.name,
message: c.message,
backtrace: parse_backtrace(c.backtrace || caller)
}
i += 1
c = exception_cause(c)
end
caus... | ruby | {
"resource": ""
} |
q15699 | Honeybadger.Worker.flush | train | def flush
mutex.synchronize do
if thread && thread.alive?
queue.push(marker)
marker.wait(mutex)
end
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.