_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q15600 | SimpleNavigation.ItemContainer.level_for_item | train | def level_for_item(navi_key)
return level if self[navi_key]
items.each do |item|
next unless item.sub_navigation
level = item.sub_navigation.level_for_item(navi_key)
return level if level
end
return nil
end | ruby | {
"resource": ""
} |
q15601 | GPGME.KeyCommon.usable_for? | train | def usable_for?(purposes)
unless purposes.kind_of? Array
purposes = [purposes]
end
return false if [:revoked, :expired, :disabled, :invalid].include? trust
return (purposes - capability).empty?
end | ruby | {
"resource": ""
} |
q15602 | GPGME.Data.read | train | def read(length = nil)
if length
GPGME::gpgme_data_read(self, length)
else
buf = String.new
loop do
s = GPGME::gpgme_data_read(self, BLOCK_SIZE)
break unless s
buf << s
end
buf
end
end | ruby | {
"resource": ""
} |
q15603 | GPGME.Data.seek | train | def seek(offset, whence = IO::SEEK_SET)
GPGME::gpgme_data_seek(self, offset, IO::SEEK_SET)
end | ruby | {
"resource": ""
} |
q15604 | GPGME.Data.file_name= | train | def file_name=(file_name)
err = GPGME::gpgme_data_set_file_name(self, file_name)
exc = GPGME::error_to_exception(err)
raise exc if exc
file_name
end | ruby | {
"resource": ""
} |
q15605 | GPGME.Key.delete! | train | def delete!(allow_secret = false)
GPGME::Ctx.new do |ctx|
ctx.delete_key self, allow_secret
end
end | ruby | {
"resource": ""
} |
q15606 | GPGME.Ctx.protocol= | train | def protocol=(proto)
err = GPGME::gpgme_set_protocol(self, proto)
exc = GPGME::error_to_exception(err)
raise exc if exc
proto
end | ruby | {
"resource": ""
} |
q15607 | GPGME.Ctx.keylist_next | train | def keylist_next
rkey = []
err = GPGME::gpgme_op_keylist_next(self, rkey)
exc = GPGME::error_to_exception(err)
raise exc if exc
rkey[0]
end | ruby | {
"resource": ""
} |
q15608 | GPGME.Ctx.keylist_end | train | def keylist_end
err = GPGME::gpgme_op_keylist_end(self)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15609 | GPGME.Ctx.each_key | train | def each_key(pattern = nil, secret_only = false, &block)
keylist_start(pattern, secret_only)
begin
loop { yield keylist_next }
rescue EOFError
# The last key in the list has already been returned.
ensure
keylist_end
end
end | ruby | {
"resource": ""
} |
q15610 | GPGME.Ctx.keys | train | def keys(pattern = nil, secret_only = nil)
keys = []
each_key(pattern, secret_only) do |key|
keys << key
end
keys
end | ruby | {
"resource": ""
} |
q15611 | GPGME.Ctx.get_key | train | def get_key(fingerprint, secret = false)
rkey = []
err = GPGME::gpgme_get_key(self, fingerprint, rkey, secret ? 1 : 0)
exc = GPGME::error_to_exception(err)
raise exc if exc
rkey[0]
end | ruby | {
"resource": ""
} |
q15612 | GPGME.Ctx.import_keys | train | def import_keys(keydata)
err = GPGME::gpgme_op_import(self, keydata)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15613 | GPGME.Ctx.delete_key | train | def delete_key(key, allow_secret = false)
err = GPGME::gpgme_op_delete(self, key, allow_secret ? 1 : 0)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15614 | GPGME.Ctx.edit_key | train | def edit_key(key, editfunc, hook_value = nil, out = Data.new)
err = GPGME::gpgme_op_edit(self, key, editfunc, hook_value, out)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15615 | GPGME.Ctx.edit_card_key | train | def edit_card_key(key, editfunc, hook_value = nil, out = Data.new)
err = GPGME::gpgme_op_card_edit(self, key, editfunc, hook_value, out)
exc = GPGME::error_to_exception(err)
raise exc if exc
end | ruby | {
"resource": ""
} |
q15616 | GPGME.Ctx.verify | train | def verify(sig, signed_text = nil, plain = Data.new)
err = GPGME::gpgme_op_verify(self, sig, signed_text, plain)
exc = GPGME::error_to_exception(err)
raise exc if exc
plain
end | ruby | {
"resource": ""
} |
q15617 | GPGME.Ctx.add_signer | train | def add_signer(*keys)
keys.each do |key|
err = GPGME::gpgme_signers_add(self, key)
exc = GPGME::error_to_exception(err)
raise exc if exc
end
end | ruby | {
"resource": ""
} |
q15618 | GPGME.Ctx.sign | train | def sign(plain, sig = Data.new, mode = GPGME::SIG_MODE_NORMAL)
err = GPGME::gpgme_op_sign(self, plain, sig, mode)
exc = GPGME::error_to_exception(err)
raise exc if exc
sig
end | ruby | {
"resource": ""
} |
q15619 | GPGME.Ctx.encrypt | train | def encrypt(recp, plain, cipher = Data.new, flags = 0)
err = GPGME::gpgme_op_encrypt(self, recp, flags, plain, cipher)
exc = GPGME::error_to_exception(err)
raise exc if exc
cipher
end | ruby | {
"resource": ""
} |
q15620 | GPGME.Crypto.encrypt | train | def encrypt(plain, options = {})
options = @default_options.merge options
plain_data = Data.new(plain)
cipher_data = Data.new(options[:output])
keys = Key.find(:public, options[:recipients])
keys = nil if options[:symmetric]
flags = 0
flags |= GPGME::ENCRYPT_ALWAYS_TRUST if options[:always_trust]
GPGME::Ctx.new(options) do |ctx|
begin
if options[:sign]
if options[:signers]
signers = Key.find(:public, options[:signers], :sign)
ctx.add_signer(*signers)
end
ctx.encrypt_sign(keys, plain_data, cipher_data, flags)
else
ctx.encrypt(keys, plain_data, cipher_data, flags)
end
rescue GPGME::Error::UnusablePublicKey => exc
exc.keys = ctx.encrypt_result.invalid_recipients
raise exc
rescue GPGME::Error::UnusableSecretKey => exc
exc.keys = ctx.sign_result.invalid_signers
raise exc
end
end
cipher_data.seek(0)
cipher_data
end | ruby | {
"resource": ""
} |
q15621 | GPGME.Crypto.decrypt | train | def decrypt(cipher, options = {})
options = @default_options.merge options
plain_data = Data.new(options[:output])
cipher_data = Data.new(cipher)
GPGME::Ctx.new(options) do |ctx|
begin
ctx.decrypt_verify(cipher_data, plain_data)
rescue GPGME::Error::UnsupportedAlgorithm => exc
exc.algorithm = ctx.decrypt_result.unsupported_algorithm
raise exc
rescue GPGME::Error::WrongKeyUsage => exc
exc.key_usage = ctx.decrypt_result.wrong_key_usage
raise exc
end
verify_result = ctx.verify_result
if verify_result && block_given?
verify_result.signatures.each do |signature|
yield signature
end
end
end
plain_data.seek(0)
plain_data
end | ruby | {
"resource": ""
} |
q15622 | GPGME.Crypto.sign | train | def sign(text, options = {})
options = @default_options.merge options
plain = Data.new(text)
output = Data.new(options[:output])
mode = options[:mode] || GPGME::SIG_MODE_NORMAL
GPGME::Ctx.new(options) do |ctx|
if options[:signer]
signers = Key.find(:secret, options[:signer], :sign)
ctx.add_signer(*signers)
end
begin
ctx.sign(plain, output, mode)
rescue GPGME::Error::UnusableSecretKey => exc
exc.keys = ctx.sign_result.invalid_signers
raise exc
end
end
output.seek(0)
output
end | ruby | {
"resource": ""
} |
q15623 | GPGME.Crypto.verify | train | def verify(sig, options = {})
options = @default_options.merge options
sig = Data.new(sig)
signed_text = Data.new(options[:signed_text])
output = Data.new(options[:output]) unless options[:signed_text]
GPGME::Ctx.new(options) do |ctx|
ctx.verify(sig, signed_text, output)
ctx.verify_result.signatures.each do |signature|
yield signature
end
end
if output
output.seek(0)
output
end
end | ruby | {
"resource": ""
} |
q15624 | Grit.Index.add | train | def add(path, data)
path = path.split('/')
filename = path.pop
current = self.tree
path.each do |dir|
current[dir] ||= {}
node = current[dir]
current = node
end
current[filename] = data
end | ruby | {
"resource": ""
} |
q15625 | Grit.Index.write_tree | train | def write_tree(tree = nil, now_tree = nil)
tree = self.tree if !tree
tree_contents = {}
# fill in original tree
now_tree = read_tree(now_tree) if(now_tree && now_tree.is_a?(String))
now_tree.contents.each do |obj|
sha = [obj.id].pack("H*")
k = obj.name
k += '/' if (obj.class == Grit::Tree)
tmode = obj.mode.to_i.to_s ## remove zero-padding
tree_contents[k] = "%s %s\0%s" % [tmode, obj.name, sha]
end if now_tree
# overwrite with new tree contents
tree.each do |k, v|
case v
when Array
sha, mode = v
if sha.size == 40 # must be a sha
sha = [sha].pack("H*")
mode = mode.to_i.to_s # leading 0s not allowed
k = k.split('/').last # slashes not allowed
str = "%s %s\0%s" % [mode, k, sha]
tree_contents[k] = str
end
when String
sha = write_blob(v)
sha = [sha].pack("H*")
str = "%s %s\0%s" % ['100644', k, sha]
tree_contents[k] = str
when Hash
ctree = now_tree/k if now_tree
sha = write_tree(v, ctree)
sha = [sha].pack("H*")
str = "%s %s\0%s" % ['40000', k, sha]
tree_contents[k + '/'] = str
when false
tree_contents.delete(k)
end
end
tr = tree_contents.sort.map { |k, v| v }.join('')
@last_tree_size = tr.size
self.repo.git.put_raw_object(tr, 'tree')
end | ruby | {
"resource": ""
} |
q15626 | Grit.Tree.content_from_string | train | def content_from_string(repo, text)
mode, type, id, name = text.split(/ |\t/, 4)
case type
when "tree"
Tree.create(repo, :id => id, :mode => mode, :name => name)
when "blob"
Blob.create(repo, :id => id, :mode => mode, :name => name)
when "link"
Blob.create(repo, :id => id, :mode => mode, :name => name)
when "commit"
Submodule.create(repo, :id => id, :mode => mode, :name => name)
else
raise Grit::InvalidObjectType, type
end
end | ruby | {
"resource": ""
} |
q15627 | Grit.Tree./ | train | def /(file)
if file =~ /\//
file.split("/").inject(self) { |acc, x| acc/x } rescue nil
else
self.contents.find { |c| c.name == file }
end
end | ruby | {
"resource": ""
} |
q15628 | Grit.Submodule.create_initialize | train | def create_initialize(repo, atts)
@repo = repo
atts.each do |k, v|
instance_variable_set("@#{k}".to_sym, v)
end
self
end | ruby | {
"resource": ""
} |
q15629 | Grit.Submodule.url | train | def url(ref)
config = self.class.config(@repo, ref)
lookup = config.keys.inject({}) do |acc, key|
id = config[key]['id']
acc[id] = config[key]['url']
acc
end
lookup[@id]
end | ruby | {
"resource": ""
} |
q15630 | Grit.Status.diff_files | train | def diff_files
hsh = {}
@base.git.diff_files.split("\n").each do |line|
(info, file) = line.split("\t")
(mode_src, mode_dest, sha_src, sha_dest, type) = info.split
hsh[file] = {:path => file, :mode_file => mode_src.to_s[1, 7], :mode_index => mode_dest,
:sha_file => sha_src, :sha_index => sha_dest, :type => type}
end
hsh
end | ruby | {
"resource": ""
} |
q15631 | Grit.Actor.output | train | def output(time)
offset = time.utc_offset / 60
"%s <%s> %d %+.2d%.2d" % [
@name,
@email || "null",
time.to_i,
offset / 60,
offset.abs % 60]
end | ruby | {
"resource": ""
} |
q15632 | Grit.Repo.recent_tag_name | train | def recent_tag_name(committish = nil, options = {})
value = git.describe({:always => true}.update(options), committish.to_s).to_s.strip
value.size.zero? ? nil : value
end | ruby | {
"resource": ""
} |
q15633 | Grit.Repo.refs_list | train | def refs_list
refs = self.git.for_each_ref
refarr = refs.split("\n").map do |line|
shatype, ref = line.split("\t")
sha, type = shatype.split(' ')
[ref, sha, type]
end
refarr
end | ruby | {
"resource": ""
} |
q15634 | Grit.Repo.commit_deltas_from | train | def commit_deltas_from(other_repo, ref = "master", other_ref = "master")
# TODO: we should be able to figure out the branch point, rather than
# rev-list'ing the whole thing
repo_refs = self.git.rev_list({}, ref).strip.split("\n")
other_repo_refs = other_repo.git.rev_list({}, other_ref).strip.split("\n")
(other_repo_refs - repo_refs).map do |refn|
Commit.find_all(other_repo, refn, {:max_count => 1}).first
end
end | ruby | {
"resource": ""
} |
q15635 | Grit.Repo.log | train | def log(commit = 'master', path = nil, options = {})
default_options = {:pretty => "raw"}
actual_options = default_options.merge(options)
arg = path ? [commit, '--', path] : [commit]
commits = self.git.log(actual_options, *arg)
Commit.list_from_string(self, commits)
end | ruby | {
"resource": ""
} |
q15636 | Grit.Repo.alternates= | train | def alternates=(alts)
alts.each do |alt|
unless File.exist?(alt)
raise "Could not set alternates. Alternate path #{alt} must exist"
end
end
if alts.empty?
self.git.fs_write('objects/info/alternates', '')
else
self.git.fs_write('objects/info/alternates', alts.join("\n"))
end
end | ruby | {
"resource": ""
} |
q15637 | Grit.Commit.diffs | train | def diffs(options = {})
if parents.empty?
show
else
self.class.diff(@repo, parents.first.id, @id, [], options)
end
end | ruby | {
"resource": ""
} |
q15638 | Grit.Git.fs_write | train | def fs_write(file, contents)
path = File.join(self.git_dir, file)
FileUtils.mkdir_p(File.dirname(path))
File.open(path, 'w') do |f|
f.write(contents)
end
end | ruby | {
"resource": ""
} |
q15639 | Grit.Git.fs_move | train | def fs_move(from, to)
FileUtils.mv(File.join(self.git_dir, from), File.join(self.git_dir, to))
end | ruby | {
"resource": ""
} |
q15640 | Grit.Git.fs_chmod | train | def fs_chmod(mode, file = '/')
FileUtils.chmod_R(mode, File.join(self.git_dir, file))
end | ruby | {
"resource": ""
} |
q15641 | Grit.Git.check_applies | train | def check_applies(options={}, head_sha=nil, applies_sha=nil)
options, head_sha, applies_sha = {}, options, head_sha if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GIT_INDEX_FILE' => git_index)
options[:raise] = true
status = 0
begin
native(:read_tree, options.dup, head_sha)
stdin = native(:diff, options.dup, "#{applies_sha}^", applies_sha)
native(:apply, options.merge(:check => true, :cached => true, :input => stdin))
rescue CommandFailed => fail
status += fail.exitstatus
end
status
end | ruby | {
"resource": ""
} |
q15642 | Grit.Git.get_patch | train | def get_patch(options={}, applies_sha=nil)
options, applies_sha = {}, options if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GIT_INDEX_FILE' => git_index)
native(:diff, options, "#{applies_sha}^", applies_sha)
end | ruby | {
"resource": ""
} |
q15643 | Grit.Git.apply_patch | train | def apply_patch(options={}, head_sha=nil, patch=nil)
options, head_sha, patch = {}, options, head_sha if !options.is_a?(Hash)
options = options.dup
options[:env] &&= options[:env].dup
options[:raise] = true
git_index = create_tempfile('index', true)
(options[:env] ||= {}).merge!('GIT_INDEX_FILE' => git_index)
begin
native(:read_tree, options.dup, head_sha)
native(:apply, options.merge(:cached => true, :input => patch))
rescue CommandFailed
return false
end
native(:write_tree, :env => options[:env]).to_s.chomp!
end | ruby | {
"resource": ""
} |
q15644 | Grit.Git.native | train | def native(cmd, options = {}, *args, &block)
args = args.first if args.size == 1 && args[0].is_a?(Array)
args.map! { |a| a.to_s }
args.reject! { |a| a.empty? }
# special option arguments
env = options.delete(:env) || {}
raise_errors = options.delete(:raise)
process_info = options.delete(:process_info)
# fall back to using a shell when the last argument looks like it wants to
# start a pipeline for compatibility with previous versions of grit.
return run(prefix, cmd, '', options, args) if args[-1].to_s[0] == ?|
# more options
input = options.delete(:input)
timeout = options.delete(:timeout); timeout = true if timeout.nil?
base = options.delete(:base); base = true if base.nil?
chdir = options.delete(:chdir)
# build up the git process argv
argv = []
argv << Git.git_binary
argv << "--git-dir=#{git_dir}" if base
argv << cmd.to_s.tr('_', '-')
argv.concat(options_to_argv(options))
argv.concat(args)
# run it and deal with fallout
Grit.log(argv.join(' ')) if Grit.debug
process =
Child.new(env, *(argv + [{
:input => input,
:chdir => chdir,
:timeout => (Grit::Git.git_timeout if timeout == true),
:max => (Grit::Git.git_max_size if timeout == true)
}]))
Grit.log(process.out) if Grit.debug
Grit.log(process.err) if Grit.debug
status = process.status
if raise_errors && !status.success?
raise CommandFailed.new(argv.join(' '), status.exitstatus, process.err)
elsif process_info
[status.exitstatus, process.out, process.err]
else
process.out
end
rescue TimeoutExceeded, MaximumOutputExceeded
raise GitTimeout, argv.join(' ')
end | ruby | {
"resource": ""
} |
q15645 | Grit.Git.run | train | def run(prefix, cmd, postfix, options, args, &block)
timeout = options.delete(:timeout) rescue nil
timeout = true if timeout.nil?
base = options.delete(:base) rescue nil
base = true if base.nil?
if input = options.delete(:input)
block = lambda { |stdin| stdin.write(input) }
end
opt_args = transform_options(options)
if RUBY_PLATFORM.downcase =~ /mswin(?!ce)|mingw|bccwin/
ext_args = args.reject { |a| a.empty? }.map { |a| (a == '--' || a[0].chr == '|' || Grit.no_quote) ? a : "\"#{e(a)}\"" }
gitdir = base ? "--git-dir=\"#{self.git_dir}\"" : ""
call = "#{prefix}#{Git.git_binary} #{gitdir} #{cmd.to_s.gsub(/_/, '-')} #{(opt_args + ext_args).join(' ')}#{e(postfix)}"
else
ext_args = args.reject { |a| a.empty? }.map { |a| (a == '--' || a[0].chr == '|' || Grit.no_quote) ? a : "'#{e(a)}'" }
gitdir = base ? "--git-dir='#{self.git_dir}'" : ""
call = "#{prefix}#{Git.git_binary} #{gitdir} #{cmd.to_s.gsub(/_/, '-')} #{(opt_args + ext_args).join(' ')}#{e(postfix)}"
end
Grit.log(call) if Grit.debug
response, err = timeout ? sh(call, &block) : wild_sh(call, &block)
Grit.log(response) if Grit.debug
Grit.log(err) if Grit.debug
response
end | ruby | {
"resource": ""
} |
q15646 | Grit.Git.transform_options | train | def transform_options(options)
args = []
options.keys.each do |opt|
if opt.to_s.size == 1
if options[opt] == true
args << "-#{opt}"
elsif options[opt] == false
# ignore
else
val = options.delete(opt)
args << "-#{opt.to_s} '#{e(val)}'"
end
else
if options[opt] == true
args << "--#{opt.to_s.gsub(/_/, '-')}"
elsif options[opt] == false
# ignore
else
val = options.delete(opt)
args << "--#{opt.to_s.gsub(/_/, '-')}='#{e(val)}'"
end
end
end
args
end | ruby | {
"resource": ""
} |
q15647 | Pact.RequestDecorator.body | train | def body
if content_type_is_form && request.body.is_a?(Hash)
URI.encode_www_form convert_hash_body_to_array_of_arrays
else
Pact::Reification.from_term(request.body)
end
end | ruby | {
"resource": ""
} |
q15648 | Pact.RequestDecorator.convert_hash_body_to_array_of_arrays | train | def convert_hash_body_to_array_of_arrays
arrays = []
request.body.keys.each do | key |
[*request.body[key]].each do | value |
arrays << [key, value]
end
end
Pact::Reification.from_term(arrays)
end | ruby | {
"resource": ""
} |
q15649 | ForemanTasks.ProxySelector.select_by_jobs_count | train | def select_by_jobs_count(proxies)
exclude = @tasks.keys + @offline
@tasks.merge!(get_counts(proxies - exclude))
next_proxy = @tasks.select { |proxy, _| proxies.include?(proxy) }
.min_by { |_, job_count| job_count }.try(:first)
@tasks[next_proxy] += 1 if next_proxy.present?
next_proxy
end | ruby | {
"resource": ""
} |
q15650 | ForemanTasks.Lock.colliding_locks | train | def colliding_locks
task_ids = task.self_and_parents.map(&:id)
colliding_locks_scope = Lock.active.where(Lock.arel_table[:task_id].not_in(task_ids))
colliding_locks_scope = colliding_locks_scope.where(name: name,
resource_id: resource_id,
resource_type: resource_type)
unless exclusive?
colliding_locks_scope = colliding_locks_scope.where(:exclusive => true)
end
colliding_locks_scope
end | ruby | {
"resource": ""
} |
q15651 | ForemanTasks.RemoteTask.trigger | train | def trigger(proxy_action_name, input)
response = begin
proxy.trigger_task(proxy_action_name, input).merge('result' => 'success')
rescue RestClient::Exception => e
logger.warn "Could not trigger task on the smart proxy: #{e.message}"
{}
end
update_from_batch_trigger(response)
save!
end | ruby | {
"resource": ""
} |
q15652 | Actions.ProxyAction.fill_continuous_output | train | def fill_continuous_output(continuous_output)
failed_proxy_tasks.each do |failure_data|
message = _('Initialization error: %s') %
"#{failure_data[:exception_class]} - #{failure_data[:exception_message]}"
continuous_output.add_output(message, 'debug', failure_data[:timestamp])
end
end | ruby | {
"resource": ""
} |
q15653 | Actions.RecurringAction.trigger_repeat | train | def trigger_repeat(execution_plan)
request_id = ::Logging.mdc['request']
::Logging.mdc['request'] = SecureRandom.uuid
if execution_plan.delay_record && recurring_logic_task_group
args = execution_plan.delay_record.args
logic = recurring_logic_task_group.recurring_logic
logic.trigger_repeat_after(task.start_at, self.class, *args)
end
ensure
::Logging.mdc['request'] = request_id
end | ruby | {
"resource": ""
} |
q15654 | BitBucket.Repos::Following.followers | train | def followers(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
response = get_request("/1.0/repositories/#{user}/#{repo.downcase}/followers/", params)
return response unless block_given?
response.each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15655 | BitBucket.Repos::Following.followed | train | def followed(*args)
params = args.extract_options!
normalize! params
response = get_request("/1.0/user/follows", params)
return response unless block_given?
response.each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15656 | BitBucket.Repos::Components.get | train | def get(user_name, repo_name, component_id, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
get_request("/2.0/repositories/#{user}/#{repo.downcase}/components/#{component_id}", params)
end | ruby | {
"resource": ""
} |
q15657 | BitBucket.Repos::Services.create | train | def create(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
assert_required_keys(REQUIRED_KEY_PARAM_NAMES, params)
post_request("/1.0/repositories/#{user}/#{repo.downcase}/services", params)
end | ruby | {
"resource": ""
} |
q15658 | BitBucket.Repos::Services.edit | train | def edit(user_name, repo_name, service_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of(service_id)
normalize! params
put_request("/1.0/repositories/#{user}/#{repo.downcase}/services/#{service_id}", params)
end | ruby | {
"resource": ""
} |
q15659 | BitBucket.Repos::DefaultReviewers.get | train | def get(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
get_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15660 | BitBucket.Repos::DefaultReviewers.add | train | def add(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
put_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15661 | BitBucket.Repos::DefaultReviewers.remove | train | def remove(user_name, repo_name, reviewer_username, params={})
update_and_validate_user_repo_params(user_name, repo_name)
normalize! params
delete_request("/2.0/repositories/#{user_name}/#{repo_name}/default-reviewers/#{reviewer_username}", params)
end | ruby | {
"resource": ""
} |
q15662 | BitBucket.Issues::Components.get | train | def get(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end | ruby | {
"resource": ""
} |
q15663 | BitBucket.Issues::Components.update | train | def update(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
filter! VALID_COMPONENT_INPUTS, params
assert_required_keys(VALID_COMPONENT_INPUTS, params)
put_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end | ruby | {
"resource": ""
} |
q15664 | BitBucket.Issues::Components.delete | train | def delete(user_name, repo_name, component_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of component_id
normalize! params
delete_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}", params)
end | ruby | {
"resource": ""
} |
q15665 | BitBucket.Repos::Keys.create | train | def create(user_name, repo_name, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_KEY_PARAM_NAMES, params
assert_required_keys(VALID_KEY_PARAM_NAMES, params)
options = { headers: { "Content-Type" => "application/json" } }
post_request("/1.0/repositories/#{user}/#{repo.downcase}/deploy-keys/", params, options)
end | ruby | {
"resource": ""
} |
q15666 | BitBucket.Repos::Keys.edit | train | def edit(user_name, repo_name, key_id, params={})
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
_validate_presence_of key_id
normalize! params
filter! VALID_KEY_PARAM_NAMES, params
put_request("/1.0/repositories/#{user}/#{repo.downcase}/deploy-keys/#{key_id}", params)
end | ruby | {
"resource": ""
} |
q15667 | BitBucket.Teams.members | train | def members(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/members")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15668 | BitBucket.Teams.followers | train | def followers(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/followers")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15669 | BitBucket.Teams.following | train | def following(team_name)
response = get_request("/2.0/teams/#{team_name.to_s}/following")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15670 | BitBucket.Teams.repos | train | def repos(team_name)
response = get_request("/2.0/repositories/#{team_name.to_s}")
return response["values"] unless block_given?
response["values"].each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15671 | BitBucket.Issues.list_repo | train | def list_repo(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
filter! VALID_ISSUE_PARAM_NAMES, params
# _merge_mime_type(:issue, params)
assert_valid_values(VALID_ISSUE_PARAM_VALUES, params)
response = get_request("/1.0/repositories/#{user}/#{repo.downcase}/issues", params)
return response.issues unless block_given?
response.issues.each { |el| yield el }
end | ruby | {
"resource": ""
} |
q15672 | BitBucket.Issues.create | train | def create(user_name, repo_name, params={ })
_update_user_repo_params(user_name, repo_name)
_validate_user_repo_params(user, repo) unless user? && repo?
normalize! params
_merge_user_into_params!(params) unless params.has_key?('user')
# _merge_mime_type(:issue, params)
filter! VALID_ISSUE_PARAM_NAMES, params
assert_required_keys(%w[ title ], params)
post_request("/1.0/repositories/#{user}/#{repo.downcase}/issues/", params)
end | ruby | {
"resource": ""
} |
q15673 | BitBucket.API.method_missing | train | def method_missing(method, *args, &block) # :nodoc:
case method.to_s
when /^(.*)\?$/
return !self.send($1.to_s).nil?
when /^clear_(.*)$/
self.send("#{$1.to_s}=", nil)
else
super
end
end | ruby | {
"resource": ""
} |
q15674 | Down.Utils.filename_from_content_disposition | train | def filename_from_content_disposition(content_disposition)
content_disposition = content_disposition.to_s
escaped_filename =
content_disposition[/filename\*=UTF-8''(\S+)/, 1] ||
content_disposition[/filename="([^"]*)"/, 1] ||
content_disposition[/filename=(\S+)/, 1]
filename = CGI.unescape(escaped_filename.to_s)
filename unless filename.empty?
end | ruby | {
"resource": ""
} |
q15675 | Down.Wget.download | train | def download(url, *args, max_size: nil, content_length_proc: nil, progress_proc: nil, destination: nil, **options)
io = open(url, *args, **options, rewindable: false)
content_length_proc.call(io.size) if content_length_proc && io.size
if max_size && io.size && io.size > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
extname = File.extname(URI(url).path)
tempfile = Tempfile.new(["down-wget", extname], binmode: true)
until io.eof?
chunk = io.readpartial(nil, buffer ||= String.new)
tempfile.write(chunk)
progress_proc.call(tempfile.size) if progress_proc
if max_size && tempfile.size > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
end
tempfile.open # flush written content
tempfile.extend Down::Wget::DownloadedFile
tempfile.url = url
tempfile.headers = io.data[:headers]
download_result(tempfile, destination)
rescue
tempfile.close! if tempfile
raise
ensure
io.close if io
end | ruby | {
"resource": ""
} |
q15676 | Down.Wget.open | train | def open(url, *args, rewindable: true, **options)
arguments = generate_command(url, *args, **options)
command = Down::Wget::Command.execute(arguments)
# Wrap the wget command output in an IO-like object.
output = Down::ChunkedIO.new(
chunks: command.enum_for(:output),
on_close: command.method(:terminate),
rewindable: false,
)
# https://github.com/tmm1/http_parser.rb/issues/29#issuecomment-309976363
header_string = output.readpartial
header_string << output.readpartial until header_string.include?("\r\n\r\n")
header_string, first_chunk = header_string.split("\r\n\r\n", 2)
# Use an HTTP parser to parse out the response headers.
parser = HTTP::Parser.new
parser << header_string
if parser.headers.nil?
output.close
raise Down::Error, "failed to parse response headers"
end
headers = parser.headers
status = parser.status_code
content_length = headers["Content-Length"].to_i if headers["Content-Length"]
charset = headers["Content-Type"][/;\s*charset=([^;]+)/i, 1] if headers["Content-Type"]
# Create an Enumerator which will lazily retrieve chunks of response body.
chunks = Enumerator.new do |yielder|
yielder << first_chunk if first_chunk
yielder << output.readpartial until output.eof?
end
Down::ChunkedIO.new(
chunks: chunks,
size: content_length,
encoding: charset,
rewindable: rewindable,
on_close: output.method(:close),
data: { status: status, headers: headers },
)
end | ruby | {
"resource": ""
} |
q15677 | Down.Wget.generate_command | train | def generate_command(url, *args, **options)
command = %W[wget --no-verbose --save-headers -O -]
options = @arguments.grep(Hash).inject({}, :merge).merge(options)
args = @arguments.grep(->(o){!o.is_a?(Hash)}) + args
(args + options.to_a).each do |option, value|
if option.is_a?(String)
command << option
elsif option.length == 1
command << "-#{option}"
else
command << "--#{option.to_s.gsub("_", "-")}"
end
command << value.to_s unless value.nil?
end
command << url
command
end | ruby | {
"resource": ""
} |
q15678 | Down.Http.download | train | def download(url, max_size: nil, progress_proc: nil, content_length_proc: nil, destination: nil, **options, &block)
response = request(url, **options, &block)
content_length_proc.call(response.content_length) if content_length_proc && response.content_length
if max_size && response.content_length && response.content_length > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
extname = File.extname(response.uri.path)
tempfile = Tempfile.new(["down-http", extname], binmode: true)
stream_body(response) do |chunk|
tempfile.write(chunk)
chunk.clear # deallocate string
progress_proc.call(tempfile.size) if progress_proc
if max_size && tempfile.size > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
end
tempfile.open # flush written content
tempfile.extend Down::Http::DownloadedFile
tempfile.url = response.uri.to_s
tempfile.headers = response.headers.to_h
download_result(tempfile, destination)
rescue
tempfile.close! if tempfile
raise
end | ruby | {
"resource": ""
} |
q15679 | Down.Http.open | train | def open(url, rewindable: true, **options, &block)
response = request(url, **options, &block)
Down::ChunkedIO.new(
chunks: enum_for(:stream_body, response),
size: response.content_length,
encoding: response.content_type.charset,
rewindable: rewindable,
data: { status: response.code, headers: response.headers.to_h, response: response },
)
end | ruby | {
"resource": ""
} |
q15680 | Down.Http.stream_body | train | def stream_body(response, &block)
response.body.each(&block)
rescue => exception
request_error!(exception)
ensure
response.connection.close unless @client.persistent?
end | ruby | {
"resource": ""
} |
q15681 | Down.NetHttp.download | train | def download(url, options = {})
options = @options.merge(options)
max_size = options.delete(:max_size)
max_redirects = options.delete(:max_redirects)
progress_proc = options.delete(:progress_proc)
content_length_proc = options.delete(:content_length_proc)
destination = options.delete(:destination)
headers = options.delete(:headers) || {}
# Use open-uri's :content_lenth_proc or :progress_proc to raise an
# exception early if the file is too large.
#
# Also disable following redirects, as we'll provide our own
# implementation that has the ability to limit the number of redirects.
open_uri_options = {
content_length_proc: proc { |size|
if size && max_size && size > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
content_length_proc.call(size) if content_length_proc
},
progress_proc: proc { |current_size|
if max_size && current_size > max_size
raise Down::TooLarge, "file is too large (max is #{max_size/1024/1024}MB)"
end
progress_proc.call(current_size) if progress_proc
},
redirect: false,
}
# Handle basic authentication in the :proxy option.
if options[:proxy]
proxy = URI(options.delete(:proxy))
user = proxy.user
password = proxy.password
if user || password
proxy.user = nil
proxy.password = nil
open_uri_options[:proxy_http_basic_authentication] = [proxy.to_s, user, password]
else
open_uri_options[:proxy] = proxy.to_s
end
end
open_uri_options.merge!(options)
open_uri_options.merge!(headers)
uri = ensure_uri(addressable_normalize(url))
# Handle basic authentication in the remote URL.
if uri.user || uri.password
open_uri_options[:http_basic_authentication] ||= [uri.user, uri.password]
uri.user = nil
uri.password = nil
end
open_uri_file = open_uri(uri, open_uri_options, follows_remaining: max_redirects)
# Handle the fact that open-uri returns StringIOs for small files.
tempfile = ensure_tempfile(open_uri_file, File.extname(open_uri_file.base_uri.path))
OpenURI::Meta.init tempfile, open_uri_file # add back open-uri methods
tempfile.extend Down::NetHttp::DownloadedFile
download_result(tempfile, destination)
end | ruby | {
"resource": ""
} |
q15682 | Down.NetHttp.ensure_uri | train | def ensure_uri(url, allow_relative: false)
begin
uri = URI(url)
rescue URI::InvalidURIError => exception
raise Down::InvalidUrl, exception.message
end
unless allow_relative && uri.relative?
raise Down::InvalidUrl, "URL scheme needs to be http or https: #{uri}" unless uri.is_a?(URI::HTTP)
end
uri
end | ruby | {
"resource": ""
} |
q15683 | Down.NetHttp.addressable_normalize | train | def addressable_normalize(url)
addressable_uri = Addressable::URI.parse(url)
addressable_uri.normalize.to_s
end | ruby | {
"resource": ""
} |
q15684 | Down.Backend.download_result | train | def download_result(tempfile, destination)
return tempfile unless destination
tempfile.close # required for Windows
FileUtils.mv tempfile.path, destination
nil
end | ruby | {
"resource": ""
} |
q15685 | Xirr.NewtonMethod.xirr | train | def xirr guess, options
func = Function.new(self, :xnpv)
rate = [guess || cf.irr_guess]
begin
nlsolve(func, rate)
(rate[0] <= -1 || rate[0].nan?) ? nil : rate[0].round(Xirr::PRECISION)
# rate[0].round(Xirr::PRECISION)
rescue
nil
end
end | ruby | {
"resource": ""
} |
q15686 | Xirr.Bisection.xirr | train | def xirr(midpoint, options)
# Initial values
left = [BigDecimal.new(-0.99999999, Xirr::PRECISION), cf.irr_guess].min
right = [BigDecimal.new(9.99999999, Xirr::PRECISION), cf.irr_guess + 1].max
@original_right = right
midpoint ||= cf.irr_guess
midpoint, runs = loop_rates(left, midpoint, right, options[:iteration_limit])
get_answer(midpoint, options, runs)
end | ruby | {
"resource": ""
} |
q15687 | Xirr.Base.xnpv | train | def xnpv(rate)
cf.inject(0) do |sum, t|
sum + (xnpv_c rate, t.amount, periods_from_start(t.date))
end
end | ruby | {
"resource": ""
} |
q15688 | Rake.ExtensionTask.define_staging_file_tasks | train | def define_staging_file_tasks(files, lib_path, stage_path, platf, ruby_ver)
files.each do |gem_file|
# ignore directories and the binary extension
next if File.directory?(gem_file) || gem_file == "#{lib_path}/#{binary(platf)}"
stage_file = "#{stage_path}/#{gem_file}"
# copy each file from base to stage directory
unless Rake::Task.task_defined?(stage_file) then
directory File.dirname(stage_file)
file stage_file => [File.dirname(stage_file), gem_file] do
cp gem_file, stage_file
end
end
# append each file to the copy task
task "copy:#{@name}:#{platf}:#{ruby_ver}" => [stage_file]
end
end | ruby | {
"resource": ""
} |
q15689 | Rake.JavaExtensionTask.java_extdirs_arg | train | def java_extdirs_arg
extdirs = Java::java.lang.System.getProperty('java.ext.dirs') rescue nil
extdirs = ENV['JAVA_EXT_DIR'] unless extdirs
java_extdir = extdirs.nil? ? "" : "-extdirs \"#{extdirs}\""
end | ruby | {
"resource": ""
} |
q15690 | WinRM.Connection.shell | train | def shell(shell_type, shell_opts = {})
shell = shell_factory.create_shell(shell_type, shell_opts)
if block_given?
begin
yield shell
ensure
shell.close
end
else
shell
end
end | ruby | {
"resource": ""
} |
q15691 | WinRM.Connection.run_wql | train | def run_wql(wql, namespace = 'root/cimv2/*', &block)
query = WinRM::WSMV::WqlQuery.new(transport, @connection_opts, wql, namespace)
query.process_response(transport.send_request(query.build), &block)
end | ruby | {
"resource": ""
} |
q15692 | Honeybadger.Config.includes_token? | train | def includes_token?(obj, value)
return false unless obj.kind_of?(Array)
obj.map(&:to_sym).include?(value.to_sym)
end | ruby | {
"resource": ""
} |
q15693 | Honeybadger.Notice.ignore_by_class? | train | def ignore_by_class?(ignored_class = nil)
@ignore_by_class ||= Proc.new do |ignored_class|
case error_class
when (ignored_class.respond_to?(:name) ? ignored_class.name : ignored_class)
true
else
exception && ignored_class.is_a?(Class) && exception.class < ignored_class
end
end
ignored_class ? @ignore_by_class.call(ignored_class) : config.ignored_classes.any?(&@ignore_by_class)
end | ruby | {
"resource": ""
} |
q15694 | Honeybadger.Notice.construct_request_hash | train | def construct_request_hash
request = {
url: url,
component: component,
action: action,
params: params,
session: session,
cgi_data: cgi_data,
sanitizer: request_sanitizer
}
request.delete_if {|k,v| config.excluded_request_keys.include?(k) }
Util::RequestPayload.build(request)
end | ruby | {
"resource": ""
} |
q15695 | Honeybadger.Notice.exception_context | train | def exception_context(exception)
# This extra check exists because the exception itself is not expected to
# convert to a hash.
object = exception if exception.respond_to?(:to_honeybadger_context)
object ||= {}.freeze
Context(object)
end | ruby | {
"resource": ""
} |
q15696 | Honeybadger.Notice.parse_backtrace | train | def parse_backtrace(backtrace)
Backtrace.parse(
backtrace,
filters: construct_backtrace_filters(opts),
config: config,
source_radius: config[:'exceptions.source_radius']
).to_a
end | ruby | {
"resource": ""
} |
q15697 | Honeybadger.Notice.exception_cause | train | def exception_cause(exception)
e = exception
if e.respond_to?(:cause) && e.cause && e.cause.is_a?(Exception)
e.cause
elsif e.respond_to?(:original_exception) && e.original_exception && e.original_exception.is_a?(Exception)
e.original_exception
elsif e.respond_to?(:continued_exception) && e.continued_exception && e.continued_exception.is_a?(Exception)
e.continued_exception
end
end | ruby | {
"resource": ""
} |
q15698 | Honeybadger.Notice.unwrap_causes | train | def unwrap_causes(cause)
causes, c, i = [], cause, 0
while c && i < MAX_EXCEPTION_CAUSES
causes << {
class: c.class.name,
message: c.message,
backtrace: parse_backtrace(c.backtrace || caller)
}
i += 1
c = exception_cause(c)
end
causes
end | ruby | {
"resource": ""
} |
q15699 | Honeybadger.Worker.flush | train | def flush
mutex.synchronize do
if thread && thread.alive?
queue.push(marker)
marker.wait(mutex)
end
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.