_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q22000
YNAB.ScheduledTransactionsApi.get_scheduled_transaction_by_id
train
def get_scheduled_transaction_by_id(budget_id, scheduled_transaction_id, opts = {}) data, _status_code, _headers = get_scheduled_transaction_by_id_with_http_info(budget_id, scheduled_transaction_id, opts) data end
ruby
{ "resource": "" }
q22001
YNAB.ScheduledTransactionsApi.get_scheduled_transactions
train
def get_scheduled_transactions(budget_id, opts = {}) data, _status_code, _headers = get_scheduled_transactions_with_http_info(budget_id, opts) data end
ruby
{ "resource": "" }
q22002
Chess.Gnuchess.gnuchess_move
train
def gnuchess_move pipe = IO.popen('gnuchess -x', 'r+') begin pipe.puts('depth 1') pipe.puts('manual') self.coord_moves.each do |m| pipe.puts(m) end pipe.puts('go') while line = pipe.gets raise IllegalMoveError if line.include?('Invalid move') match = line.match(/My move is : ([a-h][1-8][a-h][1-8][rkbq]?)/) return match[1] if match end ensure pipe.puts('quit') pipe.close end return moves end
ruby
{ "resource": "" }
q22003
YNAB.PayeesApi.get_payee_by_id
train
def get_payee_by_id(budget_id, payee_id, opts = {}) data, _status_code, _headers = get_payee_by_id_with_http_info(budget_id, payee_id, opts) data end
ruby
{ "resource": "" }
q22004
YNAB.PayeesApi.get_payees
train
def get_payees(budget_id, opts = {}) data, _status_code, _headers = get_payees_with_http_info(budget_id, opts) data end
ruby
{ "resource": "" }
q22005
Jdoc.Resource.links
train
def links @links ||= @schema.links.map do |link| if link.method && link.href Link.new(link) end end.compact end
ruby
{ "resource": "" }
q22006
Daybreak.DB.hash_default
train
def hash_default(_, key) if @default != nil value = @default.respond_to?(:call) ? @default.call(key) : @default @journal << [key, value] @table[key] = value end end
ruby
{ "resource": "" }
q22007
Daybreak.Journal.clear
train
def clear flush with_tmpfile do |path, file| file.write(@format.header) file.close # Clear replaces the database file like a compactification does with_flock(File::LOCK_EX) do File.rename(path, @file) end end open end
ruby
{ "resource": "" }
q22008
Daybreak.Journal.compact
train
def compact load with_tmpfile do |path, file| # Compactified database has the same size -> return return self if @pos == file.write(dump(yield, @format.header)) with_flock(File::LOCK_EX) do # Database was replaced (cleared or compactified) in the meantime if @pos != nil # Append changed journal records if the database changed during compactification file.write(read) file.close File.rename(path, @file) end end end open replay end
ruby
{ "resource": "" }
q22009
Daybreak.Journal.open
train
def open @fd.close if @fd @fd = File.open(@file, 'ab+') @fd.advise(:sequential) if @fd.respond_to? :advise stat = @fd.stat @inode = stat.ino write(@format.header) if stat.size == 0 @pos = nil end
ruby
{ "resource": "" }
q22010
Daybreak.Journal.read
train
def read with_flock(File::LOCK_SH) do # File was opened unless @pos @fd.pos = 0 @format.read_header(@fd) @size = 0 @emit.call(nil) else @fd.pos = @pos end buf = @fd.read @pos = @fd.pos buf end end
ruby
{ "resource": "" }
q22011
Daybreak.Journal.dump
train
def dump(records, dump = '') # each is faster than inject records.each do |record| record[1] = @serializer.dump(record.last) dump << @format.dump(record) end dump end
ruby
{ "resource": "" }
q22012
Daybreak.Journal.write
train
def write(dump) with_flock(File::LOCK_EX) do @fd.write(dump) # Flush to make sure the file is really updated @fd.flush end @pos = @fd.pos if @pos && @fd.pos == @pos + dump.bytesize end
ruby
{ "resource": "" }
q22013
Daybreak.Journal.with_flock
train
def with_flock(mode) return yield if @locked begin loop do # HACK: JRuby returns false if the process is already hold by the same process # see https://github.com/jruby/jruby/issues/496 Thread.pass until @fd.flock(mode) # Check if database was replaced (cleared or compactified) in the meantime # break if not stat = @fd.stat break if stat.nlink > 0 && stat.ino == @inode open end @locked = true yield ensure @fd.flock(File::LOCK_UN) @locked = false end end
ruby
{ "resource": "" }
q22014
Daybreak.Journal.with_tmpfile
train
def with_tmpfile path = [@file, $$.to_s(36), Thread.current.object_id.to_s(36)].join file = File.open(path, 'wb') yield(path, file) ensure file.close unless file.closed? File.unlink(path) if File.exists?(path) end
ruby
{ "resource": "" }
q22015
Daybreak.Format.read_header
train
def read_header(input) raise 'Not a Daybreak database' if input.read(MAGIC.bytesize) != MAGIC ver = input.read(2).unpack('n').first raise "Expected database version #{VERSION}, got #{ver}" if ver != VERSION end
ruby
{ "resource": "" }
q22016
Daybreak.Format.dump
train
def dump(record) data = if record.size == 1 [record[0].bytesize, DELETE].pack('NN') << record[0] else [record[0].bytesize, record[1].bytesize].pack('NN') << record[0] << record[1] end data << crc32(data) end
ruby
{ "resource": "" }
q22017
Daybreak.Format.parse
train
def parse(buf) n, count = 0, 0 while n < buf.size key_size, value_size = buf[n, 8].unpack('NN') data_size = key_size + 8 data_size += value_size if value_size != DELETE data = buf[n, data_size] n += data_size raise 'CRC mismatch: your data might be corrupted!' unless buf[n, 4] == crc32(data) n += 4 yield(value_size == DELETE ? [data[8, key_size]] : [data[8, key_size], data[8 + key_size, value_size]]) count += 1 end count end
ruby
{ "resource": "" }
q22018
Ronin.URL.query_string
train
def query_string params = {} self.query_params.each do |param| params[param.name] = param.value end return ::URI::QueryParams.dump(params) end
ruby
{ "resource": "" }
q22019
Ronin.URL.query_string=
train
def query_string=(query) self.query_params.clear ::URI::QueryParams.parse(query).each do |name,value| self.query_params.new( :name => URLQueryParamName.first_or_new(:name => name), :value => value ) end return query end
ruby
{ "resource": "" }
q22020
Ronin.URL.to_uri
train
def to_uri # map the URL scheme to a URI class url_class = SCHEMES.fetch(self.scheme.name,::URI::Generic) host = if self.host_name self.host_name.address end port = if self.port self.port.number end query = unless self.query_params.empty? self.query_string end # build the URI return url_class.build( :scheme => self.scheme.name, :host => host, :port => port, :path => self.path, :query => query, :fragment => self.fragment ) end
ruby
{ "resource": "" }
q22021
Ronin.HostName.lookup!
train
def lookup!(nameserver=nil) resolver = Resolv.resolver(nameserver) ips = begin resolver.getaddresses(self.address) rescue [] end ips.map! do |addr| IPAddress.first_or_create( :address => addr, :host_names => [self] ) end return ips end
ruby
{ "resource": "" }
q22022
Ronin.Campaign.target!
train
def target!(addr) unless (address = Address.first(:address => addr)) raise("unknown address #{addr.dump}") end return Target.first_or_create(:campaign => self, :address => address) end
ruby
{ "resource": "" }
q22023
Ronin.IPAddress.lookup!
train
def lookup!(nameserver=nil) resolver = Resolv.resolver(nameserver) hosts = begin resolver.getnames(self.address.to_s) rescue [] end hosts.map! do |name| HostName.first_or_create( :address => name, :ip_addresses => [self] ) end return hosts end
ruby
{ "resource": "" }
q22024
Ronin.Repository.find_script
train
def find_script(sub_path) paths = @script_dirs.map { |dir| File.join(dir,sub_path) } return script_paths.first(:path => paths) end
ruby
{ "resource": "" }
q22025
Ronin.Repository.update!
train
def update! local_repo = Pullr::LocalRepository.new( :path => self.path, :scm => self.scm ) # only update if we have a repository local_repo.update(self.uri) # re-initialize the metadata initialize_metadata # save the repository if save # syncs the cached files of the repository sync_scripts! end yield self if block_given? return self end
ruby
{ "resource": "" }
q22026
Ronin.Repository.uninstall!
train
def uninstall! deactivate! FileUtils.rm_rf(self.path) if self.installed? # destroy any cached files first clean_scripts! # remove the repository from the database destroy if saved? yield self if block_given? return self end
ruby
{ "resource": "" }
q22027
Ronin.Password.digest
train
def digest(algorithm,options={}) digest_class = begin Digest.const_get(algorithm.to_s.upcase) rescue LoadError raise(ArgumentError,"Unknown Digest algorithm #{algorithm}") end hash = digest_class.new if options[:prepend_salt] hash << options[:prepend_salt].to_s end hash << self.clear_text if options[:append_salt] hash << options[:append_salt].to_s end return hash.hexdigest end
ruby
{ "resource": "" }
q22028
Ronin.MACAddress.to_i
train
def to_i self.address.split(':').inject(0) do |bits,char| bits = ((bits << 8) | char.hex) end end
ruby
{ "resource": "" }
q22029
Plugg.Dispatcher.start
train
def start(paths, params = {}) @registry = [] paths.each do |path| if path[-1] == '/' path.chop! end Dir["#{path}/*.rb"].each do |f| require File.expand_path(f) begin instance = Object.const_get(File.basename(f, '.rb')).new # `before` event callback if instance.respond_to?(:before) instance.send(:before) end # `setup` method if instance.respond_to?(:setup) instance.send(:setup, params) end @registry.push(instance) rescue Exception => e puts "#{f} Plugg Initialization Exception: #{e}" end end end end
ruby
{ "resource": "" }
q22030
Plugg.Dispatcher.on
train
def on(method, *args, &block) if [:initialize, :before, :setup, :after].include? method raise "#{method} should not be called directly" end buffer = [] # Container for the response buffer threads = [] # Container for the execution threads @registry.each do |s| if s.respond_to?(method.to_sym, false) threads << Thread.new do responder = DispatchResponder.new(s) responder.trap(@timeout) do if s.method(method.to_sym).arity == 0 s.send(method, &block) else s.send(method, *args, &block) end end responder.finalize buffer << responder.to_h end end end threads.map(&:join) buffer end
ruby
{ "resource": "" }
q22031
ActionMessenger.Base.message_to_slack
train
def message_to_slack(channel:, options: {}) @caller_method_name = caller[0][/`([^']*)'/, 1] options = apply_defaults(options) message = nil ActiveSupport::Notifications.instrument('message_to_slack.action_messenger', channel: channel, body: options[:text]) do message = slack_client.message(channel, options) end message ensure self.deliveries << DeliveryLog.new(__method__, channel, message) end
ruby
{ "resource": "" }
q22032
ActionMessenger.Base.upload_file_to_slack
train
def upload_file_to_slack(channels: ,file: ,options: {}) upload_file = nil ActiveSupport::Notifications.instrument('upload_file_to_slack.action_messenger', channels: channels) do upload_file = slack_client.upload_file(channels, file, options) end upload_file ensure self.deliveries << DeliveryLog.new(__method__, channels, upload_file) end
ruby
{ "resource": "" }
q22033
WORLDCATAPI.SruSearchResponse.extract_multiple
train
def extract_multiple(record, field, tag) a = Array.new record.fields(field).each do |field| a.push field[tag] end return a end
ruby
{ "resource": "" }
q22034
Conduit::Driver::Braintree.Base.perform
train
def perform body = perform_action parser = parser_class.new(body) Conduit::ApiResponse.new(raw_response: @raw_response, body: body, parser: parser) rescue Braintree::NotFoundError => error report_braintree_exceptions(error) rescue ArgumentError => error respond_with_error(error.message) rescue Braintree::BraintreeError => error report_braintree_exceptions(error) rescue Net::ReadTimeout, Net::OpenTimeout, Errno::ETIMEDOUT respond_with_error("Braintree timeout") end
ruby
{ "resource": "" }
q22035
Settingson::Base.ClassMethods.defaults
train
def defaults @__defaults = Settingson::Store::Default.new( klass: self ) if block_given? Rails.application.config.after_initialize do yield @__defaults end end @__defaults end
ruby
{ "resource": "" }
q22036
PoliceState.TransitionHelpers.attribute_transitioning?
train
def attribute_transitioning?(attr, options={}) options = _transform_options_for_attribute(attr, options) attribute_changed?(attr, options) end
ruby
{ "resource": "" }
q22037
ActionMessenger.MessageDelivery.deliver_now!
train
def deliver_now! messenger.handle_exceptions do ActiveSupport::Notifications.instrument('deliver_now!.action_messenger', method_name: method_name, args: args) do if args.present? messenger.public_send(method_name, *args) else messenger.public_send(method_name) end end end end
ruby
{ "resource": "" }
q22038
ActionMessenger.MessageDelivery.deliver_later!
train
def deliver_later! ActionMessenger::MessageDeliveryJob.perform_later(self.class.name, 'deliver_now!', messenger_class.to_s, method_name.to_s, *args) end
ruby
{ "resource": "" }
q22039
GraphqlGrpc.Proxy.map_functions
train
def map_functions(stub_services) return @function_map unless @function_map.empty? stub_services.keys.each do |service_name| stub = @services[service_name] = stub_services[service_name] stub.class.to_s.gsub('::Stub', '::Service').constantize.rpc_descs.values.each do |d| next if d.name.to_sym == :Healthcheck grpc_func = ::GraphqlGrpc::Function.new(service_name, stub, d) if @function_map.key?(grpc_func.name) sn = @function_map[grpc_func.name].service_name STDERR.puts "Skipping method #{grpc_func.name}; it was already defined on #{sn}" # raise ConfigurationError, "#{grpc_func.name} was already defined on #{sn}." end @function_map[grpc_func.name] = grpc_func end end @function_map end
ruby
{ "resource": "" }
q22040
GraphqlGrpc.Function.arg
train
def arg(params) rpc_desc.input.decode_json(params.reject { |k, _v| k == :selections }.to_json) end
ruby
{ "resource": "" }
q22041
Conduit::Driver::Braintree.UpdateCreditCard.whitelist_options
train
def whitelist_options @options[:options] ||= {}.tap do |h| h[:verify_card] = @options.fetch(:verify_card, true) @options.delete(:verify_card) if @options.key?(:verification_merchant_account_id) h[:verification_merchant_account_id] = @options.delete(:verification_merchant_account_id) end end super end
ruby
{ "resource": "" }
q22042
WPDB.Termable.add_term
train
def add_term(term, taxonomy, description, count) if term.respond_to?(:term_id) term_id = term.term_id else term_id = term.to_i end term_taxonomy = WPDB::TermTaxonomy.where(term_id: term_id, taxonomy: taxonomy).first unless term_taxonomy term_taxonomy = WPDB::TermTaxonomy.create( term_id: term_id, taxonomy: taxonomy, description: description, count: count ) else term_taxonomy.count += count end add_termtaxonomy(term_taxonomy) end
ruby
{ "resource": "" }
q22043
NounProjectApi.Retriever.find
train
def find(id) raise ArgumentError.new("Missing id/slug") unless id result = access_token.get("#{API_BASE}#{self.class::API_PATH}#{id}") raise ServiceError.new(result.code, result.body) unless result.code == "200" self.class::ITEM_CLASS.new(result.body) end
ruby
{ "resource": "" }
q22044
TrustedSandbox.UidPool.lock
train
def lock retries.times do atomically(timeout) do uid = available_uid if uid lock_uid uid return uid.to_i end end sleep(delay) end raise PoolTimeoutError.new('No available UIDs in the pool. Please try again later.') end
ruby
{ "resource": "" }
q22045
TrustedSandbox.Response.parse!
train
def parse! unless File.exists? output_file_path @status = 'error' @error = ContainerError.new('User code did not finish properly') @error_to_raise = @error return end begin data = File.binread output_file_path @raw_response = Marshal.load(data) rescue => e @status = 'error' @error = e @error_to_raise = ContainerError.new(e) return end unless ['success', 'error'].include? @raw_response[:status] @status = 'error' @error = InternalError.new('Output file has invalid format') @error_to_raise = @error return end @status = @raw_response[:status] @output = @raw_response[:output] @error = @raw_response[:error] @error_to_raise = UserCodeError.new(@error) if @error nil end
ruby
{ "resource": "" }
q22046
Pro.Commands.find_repo
train
def find_repo(name) return @index.base_dirs.first unless name match = FuzzyMatch.new(@index.to_a, :read => :name).find(name) match[1] unless match.nil? end
ruby
{ "resource": "" }
q22047
Pro.Commands.status
train
def status() max_name = @index.map {|repo| repo.name.length}.max + 1 @index.each do |r| next unless Dir.exists?(r.path) status = repo_status(r.path) next if status.empty? name = format("%-#{max_name}s",r.name).bold puts "#{name} > #{status}" end end
ruby
{ "resource": "" }
q22048
Pro.Commands.repo_status
train
def repo_status(path) messages = [] messages << EMPTY_MESSAGE if repo_empty?(path) messages << UNCOMMITTED_MESSAGE if commit_pending?(path) messages << UNTRACKED_MESSAGE if untracked_files?(path) messages << UNPUSHED_MESSAGE if repo_unpushed?(path) messages.join(JOIN_STRING) end
ruby
{ "resource": "" }
q22049
Pro.Commands.install_cd
train
def install_cd puts CD_INFO print "Continue with installation (yN)? " return unless gets.chomp.downcase == "y" # get name print "Name of pro cd command (default 'pd'): " name = gets.strip name = 'pd' if name.empty? # sub into function func = SHELL_FUNCTION.sub("{{name}}",name) did_any = false ['~/.profile', '~/.bashrc','~/.zshrc','~/.bash_profile'].each do |rel_path| # check if file exists path = File.expand_path(rel_path) next unless File.exists?(path) # ask the user if they want to add it print "Install #{name} function to #{rel_path} [yN]: " next unless gets.chomp.downcase == "y" # add it on to the end of the file File.open(path,'a') do |file| file.puts func end did_any = true end if did_any puts "Done! #{name} will be available in new shells." else STDERR.puts "WARNING: Did not install in any shell dotfiles.".red STDERR.puts "Maybe you should create the shell config file you want.".red end end
ruby
{ "resource": "" }
q22050
Pro.Indexer.read_cache
train
def read_cache return nil unless File.readable_real?(CACHE_PATH) index = YAML::load_file(CACHE_PATH) return nil unless index.created_version == Pro::VERSION return nil unless index.base_dirs == @base_dirs index end
ruby
{ "resource": "" }
q22051
Pro.Indexer.run_index_process
train
def run_index_process readme, writeme = IO.pipe p1 = fork { # Stop cd function from blocking on fork STDOUT.reopen(writeme) readme.close index_process unless File.exists?(INDEXER_LOCK_PATH) } Process.detach(p1) end
ruby
{ "resource": "" }
q22052
Pro.Indexer.cache_index
train
def cache_index(index) # TODO: atomic rename. Right now we just hope. File.open(CACHE_PATH, 'w' ) do |out| YAML::dump( index, out ) end end
ruby
{ "resource": "" }
q22053
Pro.Indexer.index_repos_slow
train
def index_repos_slow(base) STDERR.puts "WARNING: pro is indexing slowly, please install the 'find' command." repos = [] Find.find(base) do |path| target = path # additionally, index repos symlinked directly from a base root if FileTest.symlink?(path) next if File.dirname(path) != base target = File.readlink(path) end # dir must exist and be a git repo if FileTest.directory?(target) && File.exists?(path+"/.git") base_name = File.basename(path) repos << Repo.new(base_name,path) Find.prune end end repos end
ruby
{ "resource": "" }
q22054
Pro.Indexer.find_base_dirs
train
def find_base_dirs() bases = [] # check environment first base = ENV['PRO_BASE'] bases << base if base # next check proBase file path = ENV['HOME'] + "/.proBase" if File.exists?(path) # read lines of the pro base file bases += IO.read(path).split("\n").map {|p| File.expand_path(p.strip)} end # strip bases that do not exist # I know about select! but it doesn't exist in 1.8 bases = bases.select {|b| File.exists?(b)} # if no bases then return home bases << ENV['HOME'] if bases.empty? bases end
ruby
{ "resource": "" }
q22055
Pupistry.GPG.artifact_sign
train
def artifact_sign @signature = 'unsigned' # Clean up the existing signature file signature_cleanup Dir.chdir("#{$config['general']['app_cache']}/artifacts/") do # Generate the signature file and pick up the signature data unless system "gpg --use-agent --detach-sign artifact.#{@checksum}.tar.gz" $logger.error 'Unable to sign the artifact, an unexpected failure occured. No file uploaded.' return false end if File.exist?("artifact.#{@checksum}.tar.gz.sig") $logger.info 'A signature file was successfully generated.' else $logger.error 'A signature file was NOT generated.' return false end # Convert the signature into base64. It's easier to bundle all the # metadata into a single file and extracting it out when needed, than # having to keep track of yet-another-file. Because we encode into # ASCII here, no need to call GPG with --armor either. @signature = Base64.encode64(File.read("artifact.#{@checksum}.tar.gz.sig")) unless @signature $logger.error 'An unexpected issue occured and no signature was generated' return false end end # Make sure the public key has been uploaded if it hasn't already pubkey_upload @signature end
ruby
{ "resource": "" }
q22056
Pupistry.GPG.signature_extract
train
def signature_extract manifest = YAML.load(File.open($config['general']['app_cache'] + "/artifacts/manifest.#{@checksum}.yaml"), safe: true, raise_on_unknown_tag: true) if manifest['gpgsig'] # We have the base64 version @signature = manifest['gpgsig'] # Decode the base64 and write the signature file File.write("#{$config['general']['app_cache']}/artifacts/artifact.#{@checksum}.tar.gz.sig", Base64.decode64(@signature)) return @signature else return false end rescue StandardError => e $logger.error 'Something unexpected occured when reading the manifest file' raise e end
ruby
{ "resource": "" }
q22057
Pupistry.GPG.signature_save
train
def signature_save manifest = YAML.load(File.open($config['general']['app_cache'] + "/artifacts/manifest.#{@checksum}.yaml"), safe: true, raise_on_unknown_tag: true) manifest['gpgsig'] = @signature File.open("#{$config['general']['app_cache']}/artifacts/manifest.#{@checksum}.yaml", 'w') do |fh| fh.write YAML.dump(manifest) end return true rescue StandardError $logger.error 'Something unexpected occured when updating the manifest file with GPG signature' return false end
ruby
{ "resource": "" }
q22058
Pupistry.GPG.pubkey_upload
train
def pubkey_upload unless File.exist?("#{$config['general']['app_cache']}/artifacts/#{$config['general']['gpg_signing_key']}.publickey") # GPG key does not exist locally, we therefore assume it's not in the S3 # bucket either, so we should export out and upload. Technically this may # result in a few extra uploads (once for any new machine using Pupistry) # but it doesn't cause any issue and saves me writing more code ;-) $logger.info "Exporting GPG key #{$config['general']['gpg_signing_key']} and uploading to S3 bucket..." # If it doesn't exist on this machine, then we're a bit stuck! unless pubkey_exist? $logger.error "The public key #{$config['general']['gpg_signing_key']} does not exist on this system, so unable to export it out" return false end # Export out key unless system "gpg --export --armour 0x#{$config['general']['gpg_signing_key']} > #{$config['general']['app_cache']}/artifacts/#{$config['general']['gpg_signing_key']}.publickey" $logger.error 'A fault occured when trying to export the GPG key' return false end # Upload s3 = Pupistry::StorageAWS.new 'build' unless s3.upload "#{$config['general']['app_cache']}/artifacts/#{$config['general']['gpg_signing_key']}.publickey", "#{$config['general']['gpg_signing_key']}.publickey" $logger.error 'Unable to upload GPG key to S3 bucket' return false end end end
ruby
{ "resource": "" }
q22059
Pupistry.GPG.pubkey_install
train
def pubkey_install $logger.warn "Installing GPG key #{$config['general']['gpg_signing_key']}..." s3 = Pupistry::StorageAWS.new 'agent' unless s3.download "#{$config['general']['gpg_signing_key']}.publickey", "#{$config['general']['app_cache']}/artifacts/#{$config['general']['gpg_signing_key']}.publickey" $logger.error 'Unable to download GPG key from S3 bucket, this will prevent validation of signature' return false end unless system "gpg --import < #{$config['general']['app_cache']}/artifacts/#{$config['general']['gpg_signing_key']}.publickey > /dev/null 2>&1" $logger.error 'A fault occured when trying to import the GPG key' return false end rescue StandardError $logger.error 'Something unexpected occured when installing the GPG public key' return false end
ruby
{ "resource": "" }
q22060
Danger.DangerAutoLabel.wip=
train
def wip=(pr) label_names = [] labels.each do |label| label_names << label.name end puts("exist labels:" + label_names.join(", ")) unless wip? begin add_label("WIP") rescue Octokit::UnprocessableEntity => e puts "WIP label is already exists." puts e end end github.api.add_labels_to_an_issue(repo, pr, [wip_label]) end
ruby
{ "resource": "" }
q22061
Danger.DangerAutoLabel.set
train
def set(pr, name, color) message = "" if label?(name) message = "Set #{name} label. (Color: #{color})" else message = "Add #{name} new label. (Color: #{color})" add_label(name, color) end github.api.add_labels_to_an_issue(repo, pr, [name]) puts message end
ruby
{ "resource": "" }
q22062
Danger.DangerAutoLabel.delete
train
def delete(name) begin github.api.delete_label!(repo, name) rescue Octokit::Error => e puts "Error message: \"#{name}\" label is not existing." puts e end end
ruby
{ "resource": "" }
q22063
Danger.DangerAutoLabel.remove
train
def remove(name) begin github.api.remove_label(repo, number, name) rescue Octokit::Error => e puts "Error message: \"#{name}\" label is not existing." puts e end end
ruby
{ "resource": "" }
q22064
Coverage.Helpers.diff
train
def diff(cov1, cov2) ncov = {} old_format = true cov1.each do |path1, runs1| if cov2[path1] runs2 = cov2[path1] if runs1.is_a?(Array) && runs2.is_a?(Array) && old_format # diff two old-format (ruby 2.4 or before) coverage results ncov[path1] = diff_lines(runs1, runs2) next end # promotes from old format to new one if runs1.is_a?(Array) runs1 = { :lines => runs1 } end if runs2.is_a?(Array) runs2 = { :lines => runs2 } end if old_format old_format = false old2new!(ncov) end # diff two new-format (ruby 2.5 or later) coverage results ncov[path1] = {} [ [:lines, :diff_lines], [:branches, :diff_branches], [:methods, :diff_methods], ].each do |type, diff_func| if runs1[type] if runs2[type] ncov[path1][type] = send(diff_func, runs1[type], runs2[type]) else ncov[path1][type] = runs1[type] end end end else if runs1.is_a?(Array) && old_format ncov[path1] = runs1 next end # promotes from old format to new one if runs1.is_a?(Array) runs1 = { :lines => runs1 } end if old_format old_format = false old2new!(ncov) end ncov[path1] = runs1 end end ncov end
ruby
{ "resource": "" }
q22065
Coverage.Helpers.sanitize
train
def sanitize(cov) ncov = {} cov.each do |path, runs| if runs.is_a?(Array) ncov[path] = runs next end ncov[path] = {} ncov[path][:lines] = runs[:lines] if runs[:lines] ncov[path][:branches] = runs[:branches] if runs[:branches] if runs[:methods] ncov[path][:methods] = methods = {} runs[:methods].each do |mthd, run| klass = begin Marshal.dump(mthd[0]) mthd[0] rescue mthd[0].to_s end methods[[klass] + mthd.drop(1)] = run end end end ncov end
ruby
{ "resource": "" }
q22066
Coverage.Helpers.save
train
def save(path, cov) File.binwrite(path, Marshal.dump(sanitize(cov))) end
ruby
{ "resource": "" }
q22067
Coverage.Helpers.to_lcov_info
train
def to_lcov_info(cov, out: "", test_name: nil) out << "TN:#{ test_name }\n" cov.each do |path, runs| out << "SF:#{ path }\n" # function coverage if runs.is_a?(Hash) && runs[:methods] total = covered = 0 runs[:methods].each do |(klass, name, lineno), run| out << "FN:#{ lineno },#{ klass }##{ name }\n" total += 1 covered += 1 if run > 0 end out << "FNF:#{ total }\n" out << "FNF:#{ covered }\n" runs[:methods].each do |(klass, name, _), run| out << "FNDA:#{ run },#{ klass }##{ name }\n" end end # line coverage if runs.is_a?(Array) || (runs.is_a?(Hash) && runs[:lines]) total = covered = 0 lines = runs.is_a?(Array) ? runs : runs[:lines] lines.each_with_index do |run, lineno| next unless run out << "DA:#{ lineno + 1 },#{ run }\n" total += 1 covered += 1 if run > 0 end out << "LF:#{ total }\n" out << "LH:#{ covered }\n" end # branch coverage if runs.is_a?(Hash) && runs[:branches] total = covered = 0 id = 0 runs[:branches].each do |(_base_type, _, base_lineno), targets| i = 0 targets.each do |(_target_type, _target_lineno), run| out << "BRDA:#{ base_lineno },#{ id },#{ i },#{ run }\n" total += 1 covered += 1 if run > 0 i += 1 end id += 1 end out << "BRF:#{ total }\n" out << "BRH:#{ covered }\n" end out << "end_of_record\n" end out end
ruby
{ "resource": "" }
q22068
Hatenablog.AfterHook.after_hook
train
def after_hook(hook, *methods) methods.each do |method| origin_method = "#{method}_origin".to_sym if instance_methods.include? origin_method raise NameError, "#{origin_method} isn't a unique name" end alias_method origin_method, method define_method(method) do |*args, &block| result = send(origin_method, *args, &block) send(hook) end end end
ruby
{ "resource": "" }
q22069
Hatenablog.Client.next_feed
train
def next_feed(feed = nil) return Feed.load_xml(get_collection(collection_uri).body) if feed.nil? return nil unless feed.has_next? Feed.load_xml(get_collection(feed.next_uri).body) end
ruby
{ "resource": "" }
q22070
Hatenablog.Client.get_entry
train
def get_entry(entry_id) response = get(member_uri(entry_id)) Entry.load_xml(response.body) end
ruby
{ "resource": "" }
q22071
Hatenablog.Client.post_entry
train
def post_entry(title = '', content = '', categories = [], draft = 'no') entry_xml = entry_xml(title, content, categories, draft) response = post(entry_xml) Entry.load_xml(response.body) end
ruby
{ "resource": "" }
q22072
Hatenablog.Client.update_entry
train
def update_entry(entry_id, title = '', content = '', categories = [], draft = 'no', updated = '') entry_xml = entry_xml(title, content, categories, draft, updated) response = put(entry_xml, member_uri(entry_id)) Entry.load_xml(response.body) end
ruby
{ "resource": "" }
q22073
Hatenablog.Client.entry_xml
train
def entry_xml(title = '', content = '', categories = [], draft = 'no', updated = '', author_name = @user_id) builder = Nokogiri::XML::Builder.new(encoding: 'utf-8') do |xml| xml.entry('xmlns' => 'http://www.w3.org/2005/Atom', 'xmlns:app' => 'http://www.w3.org/2007/app') do xml.title title xml.author do xml.name author_name end xml.content(content, type: 'text/x-markdown') xml.updated updated unless updated.empty? || updated.nil? categories.each do |category| xml.category(term: category) end xml['app'].control do xml['app'].draft draft end end end builder.to_xml end
ruby
{ "resource": "" }
q22074
AsyncCache.Store.check_arguments
train
def check_arguments arguments arguments.each_with_index do |argument, index| next if argument.is_a? Numeric next if argument.is_a? String next if argument.is_a? Symbol next if argument.is_a? Hash next if argument.is_a? NilClass next if argument.is_a? TrueClass next if argument.is_a? FalseClass raise ArgumentError, "Cannot send complex data for block argument #{index + 1}: #{argument.class.name}" end arguments end
ruby
{ "resource": "" }
q22075
RackStep.Router.find_route_for
train
def find_route_for(path, verb) # Ignoring the first char if path starts with '/'. This way the path of # 'http//localhost/' will be the same of 'http://localhost' (both will # be empty strings). path = path[1..-1] if path[0] == '/' route_id = verb + path route = routes[route_id] # If no route was found, set it to 'notfound' route (maintaining the # original verb). route = routes["#{verb}notfound"] if route == nil return route end
ruby
{ "resource": "" }
q22076
Bureaucrat.Quickfields.hide
train
def hide(name) base_fields[name] = base_fields[name].dup base_fields[name].widget = Widgets::HiddenInput.new end
ruby
{ "resource": "" }
q22077
Bureaucrat.Quickfields.text
train
def text(name, options = {}) field name, CharField.new(options.merge(widget: Widgets::Textarea.new)) end
ruby
{ "resource": "" }
q22078
Bureaucrat.Quickfields.password
train
def password(name, options = {}) field name, CharField.new(options.merge(widget: Widgets::PasswordInput.new)) end
ruby
{ "resource": "" }
q22079
Bureaucrat.Quickfields.regex
train
def regex(name, regexp, options = {}) field name, RegexField.new(regexp, options) end
ruby
{ "resource": "" }
q22080
Bureaucrat.Quickfields.choice
train
def choice(name, choices = [], options = {}) field name, ChoiceField.new(choices, options) end
ruby
{ "resource": "" }
q22081
Bureaucrat.Quickfields.typed_choice
train
def typed_choice(name, choices = [], options = {}) field name, TypedChoiceField.new(choices, options) end
ruby
{ "resource": "" }
q22082
Bureaucrat.Quickfields.multiple_choice
train
def multiple_choice(name, choices = [], options = {}) field name, MultipleChoiceField.new(choices, options) end
ruby
{ "resource": "" }
q22083
Bureaucrat.Quickfields.radio_choice
train
def radio_choice(name, choices = [], options = {}) field name, ChoiceField.new(choices, options.merge(widget: Widgets::RadioSelect.new)) end
ruby
{ "resource": "" }
q22084
Bureaucrat.Quickfields.checkbox_multiple_choice
train
def checkbox_multiple_choice(name, choices = [], options = {}) field name, MultipleChoiceField.new(choices, options.merge(widget: Widgets::CheckboxSelectMultiple.new)) end
ruby
{ "resource": "" }
q22085
APICake.Base.save
train
def save(filename, path, params={}) payload = get! path, nil, params File.write filename, payload.response.body end
ruby
{ "resource": "" }
q22086
APICake.Base.csv_node
train
def csv_node(data) arrays = data.keys.select { |key| data[key].is_a? Array } arrays.empty? ? [data] : data[arrays.first] end
ruby
{ "resource": "" }
q22087
APICake.Base.http_get
train
def http_get(path, extra=nil, params={}) payload = self.class.get path, params APICake::Payload.new payload end
ruby
{ "resource": "" }
q22088
APICake.Base.normalize
train
def normalize(path, extra=nil, params={}) if extra.is_a?(Hash) and params.empty? params = extra extra = nil end path = "#{path}/#{extra}" if extra path = "/#{path}" unless path[0] == '/' query = default_query.merge params params[:query] = query unless query.empty? params = default_params.merge params [path, extra, params] end
ruby
{ "resource": "" }
q22089
PLSQL.Connection.describe_synonym
train
def describe_synonym(schema_name, synonym_name) #:nodoc: select_first( "SELECT table_owner, table_name FROM all_synonyms WHERE owner = :owner AND synonym_name = :synonym_name", schema_name.to_s.upcase, synonym_name.to_s.upcase) end
ruby
{ "resource": "" }
q22090
Moped.Connection.read
train
def read with_connection do |socket| reply = Protocol::Reply.allocate data = read_data(socket, 36) response = data.unpack(REPLY_DECODE_STR) reply.length, reply.request_id, reply.response_to, reply.op_code, reply.flags, reply.cursor_id, reply.offset, reply.count = response if reply.count == 0 reply.documents = [] else sock_read = read_data(socket, reply.length - 36) buffer = StringIO.new(sock_read) reply.documents = reply.count.times.map do ::BSON::Document.from_bson(buffer) end end reply end end
ruby
{ "resource": "" }
q22091
Moped.Connection.write
train
def write(operations) buf = "" operations.each do |operation| operation.request_id = (@request_id += 1) operation.serialize(buf) end with_connection do |socket| socket.write(buf) end end
ruby
{ "resource": "" }
q22092
Moped.Connection.read_data
train
def read_data(socket, length) data = socket.read(length) unless data raise Errors::ConnectionFailure.new( "Attempted to read #{length} bytes from the socket but nothing was returned." ) end if data.length < length data << read_data(socket, length - data.length) end data end
ruby
{ "resource": "" }
q22093
Moped.Node.command
train
def command(database, cmd, options = {}) read(Protocol::Command.new(database, cmd, options)) end
ruby
{ "resource": "" }
q22094
Moped.Node.connection
train
def connection connection_acquired = false begin pool.with do |conn| connection_acquired = true yield(conn) end rescue Timeout::Error, ConnectionPool::PoolShuttingDownError => e if e.kind_of?(ConnectionPool::PoolShuttingDownError) @pool = nil Connection::Manager.delete_pool(self) raise Errors::PoolTimeout.new(e) end raise connection_acquired ? e : Errors::PoolTimeout.new(e) end end
ruby
{ "resource": "" }
q22095
Moped.Node.ensure_connected
train
def ensure_connected(&block) unless (conn = stack(:connection)).empty? return yield(conn.first) end begin connection do |conn| connect(conn) unless conn.alive? conn.apply_credentials(@credentials) stack(:connection) << conn yield(conn) end rescue Exception => e if e.kind_of?(ConnectionPool::PoolShuttingDownError) @pool = nil Connection::Manager.delete_pool(self) end Failover.get(e).execute(e, self, &block) ensure end_execution(:connection) end end
ruby
{ "resource": "" }
q22096
Moped.Node.get_more
train
def get_more(database, collection, cursor_id, limit) read(Protocol::GetMore.new(database, collection, cursor_id, limit)) end
ruby
{ "resource": "" }
q22097
Moped.Node.insert
train
def insert(database, collection, documents, concern, options = {}) write(Protocol::Insert.new(database, collection, documents, options), concern) end
ruby
{ "resource": "" }
q22098
Moped.Node.process
train
def process(operation, &callback) if executing?(:pipeline) queue.push([ operation, callback ]) else flush([[ operation, callback ]]) end end
ruby
{ "resource": "" }
q22099
Moped.Node.query
train
def query(database, collection, selector, options = {}) read(Protocol::Query.new(database, collection, selector, options)) end
ruby
{ "resource": "" }