_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q17000
Jekyll.Site.coordinate_documents
train
def coordinate_documents(docs) regex = document_url_regex approved = {} docs.each do |doc| lang = doc.data['lang'] || @default_lang url = doc.url.gsub(regex, '/') doc.data['permalink'] = url next if @file_langs[url] == @active_lang next if @file_langs[url] == @default_lang && lang != @active_lang approved[url] = doc @file_langs[url] = lang end approved.values end
ruby
{ "resource": "" }
q17001
Jekyll.Site.process_documents
train
def process_documents(docs) return if @active_lang == @default_lang url = config.fetch('url', false) rel_regex = relative_url_regex abs_regex = absolute_url_regex(url) docs.each do |doc| relativize_urls(doc, rel_regex) if url then relativize_absolute_urls(doc, abs_regex, url) end end end
ruby
{ "resource": "" }
q17002
ActionView.OptimizedFileSystemResolver.extract_handler_and_format_and_variant
train
def extract_handler_and_format_and_variant(*args) if args.first.end_with?('md.erb') path = args.shift path = path.gsub(/\.md\.erb\z/, '.md+erb') args.unshift(path) end return original_extract_handler_and_format_and_variant(*args) end
ruby
{ "resource": "" }
q17003
GoCardlessPro.ApiService.make_request
train
def make_request(method, path, options = {}) raise ArgumentError, 'options must be a hash' unless options.is_a?(Hash) options[:headers] ||= {} options[:headers] = @headers.merge(options[:headers]) Request.new(@connection, method, @path_prefix + path, options).request end
ruby
{ "resource": "" }
q17004
GoCardlessPro.Paginator.enumerator
train
def enumerator response = get_initial_response Enumerator.new do |yielder| loop do response.records.each { |item| yielder << item } after_cursor = response.after break if after_cursor.nil? @options[:params] ||= {} @options[:params] = @options[:params].merge(after: after_cursor) response = @service.list(@options.merge(after: after_cursor)) end end.lazy end
ruby
{ "resource": "" }
q17005
GoCardlessPro.Client.custom_options
train
def custom_options(options) return default_options if options.nil? return default_options.merge(options) unless options[:default_headers] opts = default_options.merge(options) opts[:default_headers] = default_options[:default_headers].merge(options[:default_headers]) opts end
ruby
{ "resource": "" }
q17006
Ruumba.Parser.extract
train
def extract(contents) file_text, matches = parse(contents) extracted_ruby = +'' last_match = [0, 0] matches.each do |start_index, end_index| handle_region_before(start_index, last_match.last, file_text, extracted_ruby) extracted_ruby << extract_match(file_text, start_index, end_index) last_match = [start_index, end_index] end extracted_ruby << file_text[last_match.last..-1].gsub(/./, ' ') extracted_ruby.gsub!(/[^\S\r\n]+$/, '') # if we replaced <%== with <%= raw, try to shift the columns back to the # left so they match the original again extracted_ruby.gsub!(/ raw/, 'raw') extracted_ruby end
ruby
{ "resource": "" }
q17007
Ruumba.RakeTask.run
train
def run # Like RuboCop itself, we'll lazy load so the task # doesn't substantially impact Rakefile load time. require 'ruumba' analyzer = Ruumba::Analyzer.new(@options) puts 'Running Ruumba...' exit(analyzer.run(@dir)) end
ruby
{ "resource": "" }
q17008
Ruumba.Analyzer.run
train
def run(files_or_dirs = ARGV) if options[:tmp_folder] analyze(File.expand_path(options[:tmp_folder]), files_or_dirs) else Dir.mktmpdir do |dir| analyze(dir, files_or_dirs) end end end
ruby
{ "resource": "" }
q17009
Chandler.Logger.error
train
def error(message) message = message.red unless message.color? puts(stderr, message) end
ruby
{ "resource": "" }
q17010
LitleOnline.LitleTransaction.giftCardAuth_reversal
train
def giftCardAuth_reversal(options) transaction = GiftCardAuthReversal.new transaction.litleTxnId = options['litleTxnId'] transaction.card = GiftCardCardType.from_hash(options,'card') transaction.originalRefCode = options['originalRefCode'] transaction.originalAmount = options['originalAmount'] transaction.originalTxnTime = options['originalTxnTime'] transaction.originalSystemTraceId = options['originalSystemTraceId'] transaction.originalSequenceNumber = options['originalSequenceNumber'] return transaction end
ruby
{ "resource": "" }
q17011
LitleOnline.LitleTransaction.fast_access_funding
train
def fast_access_funding(options) transaction = FastAccessFunding.new transaction.reportGroup = get_report_group(options) transaction.transactionId = options['id'] transaction.customerId = options['customerId'] transaction.fundingSubmerchantId = options['fundingSubmerchantId'] transaction.submerchantName = options['submerchantName'] transaction.fundsTransferId = options['fundsTransferId'] transaction.amount = options['amount'] transaction.card = Card.from_hash(options) transaction.token = CardToken.from_hash(options,'token') transaction.paypage = CardPaypage.from_hash(options,'paypage') return transaction end
ruby
{ "resource": "" }
q17012
LitleOnline.LitleRequest.finish_request
train
def finish_request File.open(@path_to_request, 'w') do |f| #jam dat header in there f.puts(build_request_header()) #read into the request file from the batches file File.foreach(@path_to_batches) do |li| f.puts li end #finally, let's poot in a header, for old time's sake f.puts '</litleRequest>' end #rename the requests file File.rename(@path_to_request, @path_to_request + COMPLETE_FILE_SUFFIX) #we don't need the master batch file anymore File.delete(@path_to_batches) end
ruby
{ "resource": "" }
q17013
Inq.CLI.parse
train
def parse(argv) parser, options = parse_main(argv) # Options that are mutually-exclusive with everything else. options = {:help => true} if options[:help] options = {:version => true} if options[:version] validate_options!(options) @options = options @help_text = parser.to_s self end
ruby
{ "resource": "" }
q17014
Inq.Config.load_site_configs
train
def load_site_configs(*files) # Allows both: # load_site_configs('foo', 'bar') # load_site_configs(['foo', bar']) # but not: # load_site_configs(['foo'], 'bar') files = files[0] if files.length == 1 && files[0].is_a?(Array) load_files(*files) end
ruby
{ "resource": "" }
q17015
Inq.Config.load
train
def load(*configs) configs.each do |config| config.each do |k, v| if self[k] && self[k].is_a?(Array) self[k] += v else self[k] = v end end end self end
ruby
{ "resource": "" }
q17016
Inq.Config.load_env
train
def load_env Inq::Text.puts "Using configuration from environment variables." gh_token = ENV["INQ_GITHUB_TOKEN"] || ENV["HOWIS_GITHUB_TOKEN"] gh_username = ENV["INQ_GITHUB_USERNAME"] || ENV["HOWIS_GITHUB_USERNAME"] raise "INQ_GITHUB_TOKEN environment variable is not set" \ unless gh_token raise "INQ_GITHUB_USERNAME environment variable is not set" \ unless gh_username load({ "sources/github" => { "username" => gh_username, "token" => gh_token, }, }) end
ruby
{ "resource": "" }
q17017
Inq.DateTimeHelpers.date_le
train
def date_le(left, right) left = str_to_dt(left) right = str_to_dt(right) left <= right end
ruby
{ "resource": "" }
q17018
Inq.DateTimeHelpers.date_ge
train
def date_ge(left, right) left = str_to_dt(left) right = str_to_dt(right) left >= right end
ruby
{ "resource": "" }
q17019
Inq.ReportCollection.metadata
train
def metadata(repository) end_date = DateTime.strptime(@date, "%Y-%m-%d") friendly_end_date = end_date.strftime("%B %d, %y") { sanitized_repository: repository.tr("/", "-"), repository: repository, date: end_date, friendly_date: friendly_end_date, } end
ruby
{ "resource": "" }
q17020
Inq.ReportCollection.to_h
train
def to_h results = {} defaults = @config["default_reports"] || {} @config["repositories"].map { |repo_config| repo = repo_config["repository"] config = config_for(repo) config["reports"].map { |format, report_config| # Sometimes report_data has unused keys, which generates a warning, but # we're okay with it, so we wrap it with silence_warnings {}. filename = silence_warnings { tmp_filename = report_config["filename"] || defaults[format]["filename"] tmp_filename % metadata(repo) } directory = report_config["directory"] || defaults[format]["directory"] file = File.join(directory, filename) # Export +report+ to the specified +format+ with the specified # +frontmatter+. frontmatter = report_config["frontmatter"] || {} if defaults.has_key?(format) && defaults[format].has_key?("frontmatter") frontmatter = defaults[format]["frontmatter"].merge(frontmatter) end frontmatter = nil if frontmatter == {} export = @reports[repo].send("to_#{format}", frontmatter) results[file] = export } } results end
ruby
{ "resource": "" }
q17021
Inq.ReportCollection.save_all
train
def save_all reports = to_h reports.each do |file, report| File.write(file, report) end reports.keys end
ruby
{ "resource": "" }
q17022
Rails.Application.asset_precompiled?
train
def asset_precompiled?(logical_path) if precompiled_assets.include?(logical_path) true elsif !config.cache_classes # Check to see if precompile list has been updated precompiled_assets(true).include?(logical_path) else false end end
ruby
{ "resource": "" }
q17023
Rails.Application.precompiled_assets
train
def precompiled_assets(clear_cache = false) @precompiled_assets = nil if clear_cache @precompiled_assets ||= assets_manifest.find(config.assets.precompile).map(&:logical_path).to_set end
ruby
{ "resource": "" }
q17024
FakeRedis.SortedSetArgumentHandler.handle
train
def handle(item) case item when "WEIGHTS" self.type = :weights self.weights = [] when "AGGREGATE" self.type = :aggregate when nil # This should never be called, raise a syntax error if we manage to hit it raise(Redis::CommandError, "ERR syntax error") else send "handle_#{type}", item end self end
ruby
{ "resource": "" }
q17025
FakeRedis.SortedSetStore.computed_values
train
def computed_values unless defined?(@computed_values) && @computed_values # Do nothing if all weights are 1, as n * 1 is n @computed_values = hashes if weights.all? {|weight| weight == 1 } # Otherwise, multiply the values in each hash by that hash's weighting @computed_values ||= hashes.each_with_index.map do |hash, index| weight = weights[index] Hash[hash.map {|k, v| [k, (v * weight)]}] end end @computed_values end
ruby
{ "resource": "" }
q17026
FakeRedis.ZSet._floatify
train
def _floatify(str, increment = true) if (( inf = str.to_s.match(/^([+-])?inf/i) )) (inf[1] == "-" ? -1.0 : 1.0) / 0.0 elsif (( number = str.to_s.match(/^\((\d+)/i) )) number[1].to_i + (increment ? 1 : -1) else Float str.to_s end rescue ArgumentError raise Redis::CommandError, "ERR value is not a valid float" end
ruby
{ "resource": "" }
q17027
Cequel.Uuids.uuid
train
def uuid(value = nil) if value.nil? timeuuid_generator.now elsif value.is_a?(Time) timeuuid_generator.at(value) elsif value.is_a?(DateTime) timeuuid_generator.at(Time.at(value.to_f)) else Type::Timeuuid.instance.cast(value) end end
ruby
{ "resource": "" }
q17028
DBF.Table.find
train
def find(command, options = {}, &block) case command when Integer record(command) when Array command.map { |i| record(i) } when :all find_all(options, &block) when :first find_first(options) end end
ruby
{ "resource": "" }
q17029
DBF.Table.record
train
def record(index) seek_to_record(index) return nil if deleted_record? DBF::Record.new(@data.read(record_length), columns, version, @memo) end
ruby
{ "resource": "" }
q17030
DBF.Table.to_csv
train
def to_csv(path = nil) out_io = path ? File.open(path, 'w') : $stdout csv = CSV.new(out_io, force_quotes: true) csv << column_names each { |record| csv << record.to_a } end
ruby
{ "resource": "" }
q17031
Routemaster.Cache.get
train
def get(url, version: nil, locale: nil) @client.get(url, headers: headers(version: version, locale: locale)) end
ruby
{ "resource": "" }
q17032
Routemaster.RedisBroker.inject
train
def inject(clients={}) @_injected_clients = true clients.each_pair do |name, client| _close_if_present(@_connections[name]) @_connections[name] = Redis::Namespace.new(DEFAULT_NAMESPACE, redis: client) end end
ruby
{ "resource": "" }
q17033
Aruba.Api.keep_trying
train
def keep_trying(timeout=10, tries=0) puts "Try: #{tries}" if @announce_env yield rescue RSpec::Expectations::ExpectationNotMetError if tries < timeout sleep 1 tries += 1 retry else raise end end
ruby
{ "resource": "" }
q17034
BowerRails.Performer.perform_command
train
def perform_command(remove_components = true, &block) # Load in bower json file txt = File.read(File.join(root_path, "bower.json")) json = JSON.parse(txt) # Load and merge root .bowerrc dot_bowerrc = JSON.parse(File.read(File.join(root_path, '.bowerrc'))) rescue {} dot_bowerrc["directory"] = components_directory if json.reject{ |key| ['lib', 'vendor'].include? key }.empty? folders = json.keys else raise "Assuming a standard bower package but cannot find the required 'name' key" unless !!json['name'] folders = ['vendor'] end folders.each do |dir| data = json[dir] # Assume using standard bower.json if folder name is not found data = json if data.nil? # Check folder existence and create? dir = File.join(root_path, dir, "assets") FileUtils.mkdir_p dir unless File.directory? dir # Go in to dir to act Dir.chdir(dir) do # Remove old components FileUtils.rm_rf("#{components_directory}/*") if remove_components # Create bower.json File.open("bower.json", "w") do |f| f.write(data.to_json) end # Create .bowerrc File.open(".bowerrc", "w") do |f| f.write(JSON.pretty_generate(dot_bowerrc)) end # Run command yield if block_given? # Remove bower.json FileUtils.rm("bower.json") # Remove .bowerrc FileUtils.rm(".bowerrc") end if data && !data["dependencies"].empty? end end
ruby
{ "resource": "" }
q17035
ActiveRecord::Turntable.ConnectionProxy.with_shard
train
def with_shard(shard) shard = cluster.to_shard(shard) old_shard = current_shard old_fixed = fixed_shard self.current_shard = shard self.fixed_shard = shard yield ensure self.fixed_shard = old_fixed self.current_shard = old_shard end
ruby
{ "resource": "" }
q17036
ActiveRecord::Turntable.ConnectionProxy.with_all
train
def with_all(continue_on_error = false) cluster.shards.map do |shard| begin with_shard(shard) { yield } rescue Exception => err unless continue_on_error raise err end err end end end
ruby
{ "resource": "" }
q17037
ActiveRecord::Turntable.ConnectionProxy.with_default_and_all
train
def with_default_and_all(continue_on_error = false) ([default_shard] + cluster.shards).map do |shard| begin with_shard(shard) { yield } rescue Exception => err unless continue_on_error raise err end err end end end
ruby
{ "resource": "" }
q17038
Patron.Response.parse_headers
train
def parse_headers(header_data_for_multiple_responses) @headers = {} responses = Patron::HeaderParser.parse(header_data_for_multiple_responses) last_response = responses[-1] # Only use the last response (for proxies and redirects) @status_line = last_response.status_line last_response.headers.each do |line| hdr, val = line.split(":", 2) val.strip! unless val.nil? if @headers.key?(hdr) @headers[hdr] = [@headers[hdr]] unless @headers[hdr].kind_of? Array @headers[hdr] << val else @headers[hdr] = val end end end
ruby
{ "resource": "" }
q17039
Patron.Request.auth_type=
train
def auth_type=(type=:basic) @auth_type = case type when :basic, "basic" Request::AuthBasic when :digest, "digest" Request::AuthDigest when :any, "any" Request::AuthAny else raise "#{type.inspect} is an unknown authentication type" end end
ruby
{ "resource": "" }
q17040
Patron.Request.action=
train
def action=(action) if !VALID_ACTIONS.include?(action.to_s.upcase) raise ArgumentError, "Action must be one of #{VALID_ACTIONS.join(', ')}" end @action = action.downcase.to_sym end
ruby
{ "resource": "" }
q17041
Patron.Session.handle_cookies
train
def handle_cookies(file_path = nil) if file_path path = Pathname(file_path).expand_path if !File.exists?(file_path) && !File.writable?(path.dirname) raise ArgumentError, "Can't create file #{path} (permission error)" elsif File.exists?(file_path) && !File.writable?(file_path) raise ArgumentError, "Can't read or write file #{path} (permission error)" end else path = nil end # Apparently calling this with an empty string sets the cookie file, # but calling it with a path to a writable file sets that file to be # the cookie jar (new cookies are written there) add_cookie_file(path.to_s) self end
ruby
{ "resource": "" }
q17042
Patron.Session.post
train
def post(url, data, headers = {}) if data.is_a?(Hash) data = data.map {|k,v| urlencode(k.to_s) + '=' + urlencode(v.to_s) }.join('&') headers['Content-Type'] = 'application/x-www-form-urlencoded' end request(:post, url, headers, :data => data) end
ruby
{ "resource": "" }
q17043
Patron.Session.post_multipart
train
def post_multipart(url, data, filename, headers = {}) request(:post, url, headers, {:data => data, :file => filename, :multipart => true}) end
ruby
{ "resource": "" }
q17044
Patron.Session.build_request
train
def build_request(action, url, headers, options = {}) # If the Expect header isn't set uploads are really slow headers['Expect'] ||= '' Request.new.tap do |req| req.action = action req.headers = self.headers.merge headers req.automatic_content_encoding = options.fetch :automatic_content_encoding, self.automatic_content_encoding req.timeout = options.fetch :timeout, self.timeout req.connect_timeout = options.fetch :connect_timeout, self.connect_timeout req.dns_cache_timeout = options.fetch :dns_cache_timeout, self.dns_cache_timeout req.low_speed_time = options.fetch :low_speed_time, self.low_speed_time req.low_speed_limit = options.fetch :low_speed_limit, self.low_speed_limit req.force_ipv4 = options.fetch :force_ipv4, self.force_ipv4 req.max_redirects = options.fetch :max_redirects, self.max_redirects req.username = options.fetch :username, self.username req.password = options.fetch :password, self.password req.proxy = options.fetch :proxy, self.proxy req.proxy_type = options.fetch :proxy_type, self.proxy_type req.auth_type = options.fetch :auth_type, self.auth_type req.insecure = options.fetch :insecure, self.insecure req.ssl_version = options.fetch :ssl_version, self.ssl_version req.http_version = options.fetch :http_version, self.http_version req.cacert = options.fetch :cacert, self.cacert req.ignore_content_length = options.fetch :ignore_content_length, self.ignore_content_length req.buffer_size = options.fetch :buffer_size, self.buffer_size req.download_byte_limit = options.fetch :download_byte_limit, self.download_byte_limit req.progress_callback = options.fetch :progress_callback, self.progress_callback req.multipart = options[:multipart] req.upload_data = options[:data] req.file_name = options[:file] base_url = self.base_url.to_s url = url.to_s raise ArgumentError, "Empty URL" if base_url.empty? && url.empty? uri = URI.parse(base_url.empty? ? url : File.join(base_url, url)) query = uri.query.to_s.split('&') query += options[:query].is_a?(Hash) ? Util.build_query_pairs_from_hash(options[:query]) : options[:query].to_s.split('&') uri.query = query.join('&') uri.query = nil if uri.query.empty? url = uri.to_s req.url = url end end
ruby
{ "resource": "" }
q17045
ReverseMarkdown.Cleaner.clean_tag_borders
train
def clean_tag_borders(string) result = string.gsub(/\s?\*{2,}.*?\*{2,}\s?/) do |match| preserve_border_whitespaces(match, default_border: ReverseMarkdown.config.tag_border) do match.strip.sub('** ', '**').sub(' **', '**') end end result = result.gsub(/\s?\_{2,}.*?\_{2,}\s?/) do |match| preserve_border_whitespaces(match, default_border: ReverseMarkdown.config.tag_border) do match.strip.sub('__ ', '__').sub(' __', '__') end end result = result.gsub(/\s?~{2,}.*?~{2,}\s?/) do |match| preserve_border_whitespaces(match, default_border: ReverseMarkdown.config.tag_border) do match.strip.sub('~~ ', '~~').sub(' ~~', '~~') end end result.gsub(/\s?\[.*?\]\s?/) do |match| preserve_border_whitespaces(match) do match.strip.sub('[ ', '[').sub(' ]', ']') end end end
ruby
{ "resource": "" }
q17046
Mustermann.FileUtils.glob_map
train
def glob_map(map = {}, **options, &block) map = Mapper === map ? map : Mapper.new(map, **options) mapped = glob(*map.to_h.keys).map { |f| [f, unescape(map[f])] } block ? mapped.map(&block) : Hash[mapped] end
ruby
{ "resource": "" }
q17047
Mustermann.FileUtils.cp
train
def cp(map = {}, recursive: false, **options) utils_opts, opts = split_options(:preserve, :dereference_root, :remove_destination, **options) cp_method = recursive ? :cp_r : :cp glob_map(map, **opts) { |o,n| f.send(cp_method, o, n, **utils_opts) } end
ruby
{ "resource": "" }
q17048
Mustermann.FileUtils.mv
train
def mv(map = {}, **options) utils_opts, opts = split_options(**options) glob_map(map, **opts) { |o,n| f.mv(o, n, **utils_opts) } end
ruby
{ "resource": "" }
q17049
Mustermann.FileUtils.ln
train
def ln(map = {}, symbolic: false, **options) utils_opts, opts = split_options(**options) link_method = symbolic ? :ln_s : :ln glob_map(map, **opts) { |o,n| f.send(link_method, o, n, **utils_opts) } end
ruby
{ "resource": "" }
q17050
Mustermann.FileUtils.ln_sf
train
def ln_sf(map = {}, **options) ln(map, symbolic: true, force: true, **options) end
ruby
{ "resource": "" }
q17051
Mustermann.FileUtils.pattern_with_glob_pattern
train
def pattern_with_glob_pattern(*pattern, **options) options[:uri_decode] ||= false pattern = Mustermann.new(*pattern.flatten, **options) @glob_patterns ||= {} @glob_patterns[pattern] ||= GlobPattern.generate(pattern) [pattern, @glob_patterns[pattern]] end
ruby
{ "resource": "" }
q17052
Mustermann.Concat.pump
train
def pump(string, inject_with: :+, initial: nil, with_size: false) substring = string results = Array(initial) patterns.each do |pattern| result, size = yield(pattern, substring) return unless result results << result size ||= result substring = substring[size..-1] end results = results.inject(inject_with) with_size ? [results, string.size - substring.size] : results end
ruby
{ "resource": "" }
q17053
Mustermann.Concat.combined_ast
train
def combined_ast payload = patterns.map { |p| AST::Node[:group].new(p.to_ast.payload) } AST::Node[:root].new(payload) end
ruby
{ "resource": "" }
q17054
Mustermann.Sinatra.|
train
def |(other) return super unless converted = self.class.try_convert(other, **options) return super unless converted.names.empty? or names.empty? self.class.new(safe_string + "|" + converted.safe_string, **options) end
ruby
{ "resource": "" }
q17055
Mustermann.StringScanner.scan_until
train
def scan_until(pattern, **options) result, prefix = check_until_with_prefix(pattern, **options) track_result(prefix, result) end
ruby
{ "resource": "" }
q17056
Mustermann.StringScanner.unscan
train
def unscan raise ScanError, 'unscan failed: previous match record not exist' if @history.empty? previous = @history[0..-2] reset previous.each { |r| track_result(*r) } self end
ruby
{ "resource": "" }
q17057
Mustermann.StringScanner.check
train
def check(pattern, **options) params, length = create_pattern(pattern, **options).peek_params(rest) ScanResult.new(self, @position, length, params) if params end
ruby
{ "resource": "" }
q17058
Mustermann.Identity.expand
train
def expand(behavior = nil, values = {}) return to_s if values.empty? or behavior == :ignore raise ExpandError, "cannot expand with keys %p" % values.keys.sort if behavior == :raise raise ArgumentError, "unknown behavior %p" % behavior if behavior != :append params = values.map { |key, value| @@uri.escape(key.to_s) + "=" + @@uri.escape(value.to_s, /[^\w]/) } separator = @string.include?(??) ? ?& : ?? @string + separator + params.join(?&) end
ruby
{ "resource": "" }
q17059
Mustermann.Versions.new
train
def new(*args, version: nil, **options) return super(*args, **options) unless versions.any? self[version].new(*args, **options) end
ruby
{ "resource": "" }
q17060
Mustermann.Versions.version
train
def version(*list, inherit_from: nil, &block) superclass = self[inherit_from] || self subclass = Class.new(superclass, &block) list.each { |v| versions[v] = subclass } end
ruby
{ "resource": "" }
q17061
Mustermann.Versions.[]
train
def [](version) return versions.values.last unless version detected = versions.detect { |v,_| version.start_with?(v) } raise ArgumentError, 'unsupported version %p' % version unless detected detected.last end
ruby
{ "resource": "" }
q17062
Mustermann.Caster.cast
train
def cast(hash) return hash if empty? merge = {} hash.delete_if do |key, value| next unless casted = lazy.map { |e| e.cast(key, value) }.detect { |e| e } casted = { key => casted } unless casted.respond_to? :to_hash merge.update(casted.to_hash) end hash.update(merge) end
ruby
{ "resource": "" }
q17063
Mustermann.Mapper.update
train
def update(map) map.to_h.each_pair do |input, output| input = Mustermann.new(input, **@options) output = Expander.new(*output, additional_values: @additional_values, **@options) unless output.is_a? Expander @map << [input, output] end end
ruby
{ "resource": "" }
q17064
Mustermann.Mapper.convert
train
def convert(input, values = {}) @map.inject(input) do |current, (pattern, expander)| params = pattern.params(current) params &&= Hash[values.merge(params).map { |k,v| [k.to_s, v] }] expander.expandable?(params) ? expander.expand(params) : current end end
ruby
{ "resource": "" }
q17065
Ethon.Easy.escape
train
def escape(value) string_pointer = Curl.easy_escape(handle, value, value.bytesize) returned_string = string_pointer.read_string Curl.free(string_pointer) returned_string end
ruby
{ "resource": "" }
q17066
WordsCounted.Counter.token_frequency
train
def token_frequency tokens.each_with_object(Hash.new(0)) { |token, hash| hash[token] += 1 }.sort_by_value_desc end
ruby
{ "resource": "" }
q17067
WordsCounted.Counter.token_lengths
train
def token_lengths tokens.uniq.each_with_object({}) { |token, hash| hash[token] = token.length }.sort_by_value_desc end
ruby
{ "resource": "" }
q17068
WordsCounted.Counter.token_density
train
def token_density(precision: 2) token_frequency.each_with_object({}) { |(token, freq), hash| hash[token] = (freq / token_count.to_f).round(precision) }.sort_by_value_desc end
ruby
{ "resource": "" }
q17069
WordsCounted.Tokeniser.tokenise
train
def tokenise(pattern: TOKEN_REGEXP, exclude: nil) filter_proc = filter_to_proc(exclude) @input.scan(pattern).map(&:downcase).reject { |token| filter_proc.call(token) } end
ruby
{ "resource": "" }
q17070
WordsCounted.Tokeniser.filter_to_proc
train
def filter_to_proc(filter) if filter.respond_to?(:to_a) filter_procs_from_array(filter) elsif filter.respond_to?(:to_str) filter_proc_from_string(filter) elsif regexp_filter = Regexp.try_convert(filter) ->(token) { token =~ regexp_filter } elsif filter.respond_to?(:to_proc) filter.to_proc else raise ArgumentError, "`filter` must be a `String`, `Regexp`, `lambda`, `Symbol`, or an `Array` of any combination of those types" end end
ruby
{ "resource": "" }
q17071
ContentfulModel.Configuration.to_hash
train
def to_hash Hash[instance_variables.map { |name| [name.to_s.delete('@').to_sym, instance_variable_get(name)] }] end
ruby
{ "resource": "" }
q17072
PdfForms.PdftkWrapper.cat
train
def cat(*args) in_files = [] page_ranges = [] file_handle = "A" output = normalize_path args.pop args.flatten.compact.each do |in_file| if in_file.is_a? Hash path = in_file.keys.first page_ranges.push *in_file.values.first.map {|range| "#{file_handle}#{range}"} else path = in_file page_ranges.push "#{file_handle}" end in_files.push "#{file_handle}=#{normalize_path(path)}" file_handle.next! end call_pdftk in_files, 'cat', page_ranges, 'output', output end
ruby
{ "resource": "" }
q17073
PdfForms.DataFormat.to_pdf_data
train
def to_pdf_data pdf_data = header @data.each do |key, value| if Hash === value value.each do |sub_key, sub_value| pdf_data << field("#{key}_#{sub_key}", sub_value) end else pdf_data << field(key, value) end end pdf_data << footer return encode_data(pdf_data) end
ruby
{ "resource": "" }
q17074
TTY.Spinner.auto_spin
train
def auto_spin CURSOR_LOCK.synchronize do start sleep_time = 1.0 / @interval spin @thread = Thread.new do sleep(sleep_time) while @started_at if Thread.current['pause'] Thread.stop Thread.current['pause'] = false end spin sleep(sleep_time) end end end ensure if @hide_cursor write(TTY::Cursor.show, false) end end
ruby
{ "resource": "" }
q17075
TTY.Spinner.run
train
def run(stop_message = '', &block) job(&block) auto_spin @work = Thread.new { execute_job } @work.join ensure stop(stop_message) end
ruby
{ "resource": "" }
q17076
TTY.Spinner.spin
train
def spin synchronize do return if @done emit(:spin) if @hide_cursor && !spinning? write(TTY::Cursor.hide) end data = message.gsub(MATCHER, @frames[@current]) data = replace_tokens(data) write(data, true) @current = (@current + 1) % @length @state = :spinning data end end
ruby
{ "resource": "" }
q17077
TTY.Spinner.fetch_format
train
def fetch_format(token, property) if FORMATS.key?(token) FORMATS[token][property] else raise ArgumentError, "Unknown format token `:#{token}`" end end
ruby
{ "resource": "" }
q17078
Xlsxtream.Row.auto_format
train
def auto_format(value) case value when TRUE_STRING true when FALSE_STRING false when NUMBER_PATTERN value.include?('.') ? value.to_f : value.to_i when DATE_PATTERN Date.parse(value) rescue value when TIME_PATTERN DateTime.parse(value) rescue value else value end end
ruby
{ "resource": "" }
q17079
Xlsxtream.Row.time_to_oa_date
train
def time_to_oa_date(time) time = time.to_time if time.respond_to?(:to_time) # Local dates are stored as UTC by truncating the offset: # 1970-01-01 00:00:00 +0200 => 1970-01-01 00:00:00 UTC # This is done because SpreadsheetML is not timezone aware. (time + time.utc_offset).utc.to_f / 24 / 3600 + 25569 end
ruby
{ "resource": "" }
q17080
FFI_Yajl.MapLibraryName.expanded_library_names
train
def expanded_library_names library_names.map do |libname| pathname = File.expand_path(File.join(Libyajl2.opt_path, libname)) pathname if File.file?(pathname) end.compact end
ruby
{ "resource": "" }
q17081
FFI_Yajl.MapLibraryName.dlopen_yajl_library
train
def dlopen_yajl_library found = false ( expanded_library_names + library_names ).each do |libname| begin dlopen(libname) found = true break rescue ArgumentError end end raise "cannot find yajl library for platform" unless found end
ruby
{ "resource": "" }
q17082
FFI_Yajl.MapLibraryName.ffi_open_yajl_library
train
def ffi_open_yajl_library found = false expanded_library_names.each do |libname| begin ffi_lib libname found = true rescue LoadError end end ffi_lib "yajl" unless found end
ruby
{ "resource": "" }
q17083
GrpcKit.Server.graceful_shutdown
train
def graceful_shutdown(timeout: true) @stopping = true Thread.new do GrpcKit.logger.debug('graceful shutdown') @mutex.synchronize { @sessions.each(&:drain) } begin sec = timeout ? @shutdown_timeout : 0 Timeout.timeout(sec) do sleep 1 until @sessions.empty? end rescue Timeout::Error => _ GrpcKit.logger.error("Graceful shutdown is timeout (#{@shutdown_timeout}sec). Perform shutdown forceibly") shutdown_sessions end end end
ruby
{ "resource": "" }
q17084
AST.Node.updated
train
def updated(type=nil, children=nil, properties=nil) new_type = type || @type new_children = children || @children new_properties = properties || {} if @type == new_type && @children == new_children && properties.nil? self else original_dup.send :initialize, new_type, new_children, new_properties end end
ruby
{ "resource": "" }
q17085
AST.Node.to_sexp
train
def to_sexp(indent=0) indented = " " * indent sexp = "#{indented}(#{fancy_type}" children.each do |child| if child.is_a?(Node) sexp += "\n#{child.to_sexp(indent + 1)}" else sexp += " #{child.inspect}" end end sexp += ")" sexp end
ruby
{ "resource": "" }
q17086
AST.Node.to_sexp_array
train
def to_sexp_array children_sexp_arrs = children.map do |child| if child.is_a?(Node) child.to_sexp_array else child end end [type, *children_sexp_arrs] end
ruby
{ "resource": "" }
q17087
Pwwka.Configuration.payload_parser
train
def payload_parser @payload_parser ||= if @receive_raw_payload ->(payload) { payload } else ->(payload) { ActiveSupport::HashWithIndifferentAccess.new(JSON.parse(payload)) } end end
ruby
{ "resource": "" }
q17088
Pwwka.Configuration.omit_payload_from_log?
train
def omit_payload_from_log?(level_of_message_with_payload) return true if @receive_raw_payload Pwwka::Logging::LEVELS[Pwwka.configuration.payload_logging.to_sym] > Pwwka::Logging::LEVELS[level_of_message_with_payload.to_sym] end
ruby
{ "resource": "" }
q17089
Danger.DangerDuplicateLocalizableStrings.localizable_duplicate_entries
train
def localizable_duplicate_entries localizable_files = (git.modified_files + git.added_files) - git.deleted_files localizable_files.select! { |line| line.end_with?('.strings') } duplicate_entries = [] localizable_files.each do |file| lines = File.readlines(file) # Grab just the keys, translations might be different keys = lines.map { |e| e.split('=').first } # Filter newlines and comments keys = keys.select do |e| e != "\n" && !e.start_with?('/*') && !e.start_with?('//') end # Grab keys that appear more than once duplicate_keys = keys.select { |e| keys.rindex(e) != keys.index(e) } # And make sure we have one entry per duplicate key duplicate_keys = duplicate_keys.uniq duplicate_keys.each do |key| duplicate_entries << { 'file' => file, 'key' => key } end end duplicate_entries end
ruby
{ "resource": "" }
q17090
ProxyFetcher.Document.xpath
train
def xpath(*args) backend.xpath(*args).map { |node| backend.proxy_node.new(node) } end
ruby
{ "resource": "" }
q17091
ProxyFetcher.Document.css
train
def css(*args) backend.css(*args).map { |node| backend.proxy_node.new(node) } end
ruby
{ "resource": "" }
q17092
ProxyFetcher.ProxyValidator.connectable?
train
def connectable? ssl_context = OpenSSL::SSL::SSLContext.new ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE @http.head(URL_TO_CHECK, ssl_context: ssl_context).status.success? rescue StandardError false end
ruby
{ "resource": "" }
q17093
ProxyFetcher.ProvidersRegistry.class_for
train
def class_for(provider_name) provider_name = provider_name.to_sym providers.fetch(provider_name) rescue KeyError raise ProxyFetcher::Exceptions::UnknownProvider, provider_name end
ruby
{ "resource": "" }
q17094
ProxyFetcher.Configuration.setup_custom_class
train
def setup_custom_class(klass, required_methods: []) unless klass.respond_to?(*required_methods) raise ProxyFetcher::Exceptions::WrongCustomClass.new(klass, required_methods) end klass end
ruby
{ "resource": "" }
q17095
ProxyFetcher.HTTPClient.fetch
train
def fetch response = process_http_request response.body.to_s rescue StandardError => error ProxyFetcher.logger.warn("Failed to process request to #{url} (#{error.message})") '' end
ruby
{ "resource": "" }
q17096
ProxyFetcher.Manager.refresh_list!
train
def refresh_list!(filters = nil) @proxies = [] threads = [] lock = Mutex.new ProxyFetcher.config.providers.each do |provider_name| threads << Thread.new do provider = ProxyFetcher::Configuration.providers_registry.class_for(provider_name) provider_filters = filters && filters.fetch(provider_name.to_sym, filters) provider_proxies = provider.fetch_proxies!(provider_filters) lock.synchronize do @proxies.concat(provider_proxies) end end end threads.each(&:join) @proxies end
ruby
{ "resource": "" }
q17097
ActiveMocker.Queries.delete_all
train
def delete_all(conditions = nil) check_for_limit_scope! collection = conditions.nil? ? to_a.each(&:delete).clear : where(conditions) collection.map(&:delete).count end
ruby
{ "resource": "" }
q17098
ActiveMocker.Queries.find_by
train
def find_by(conditions = {}) to_a.detect do |record| Find.new(record).is_of(conditions) end end
ruby
{ "resource": "" }
q17099
ActiveMocker.Queries.limit
train
def limit(num) relation = __new_relation__(all.take(num)) relation.send(:set_from_limit) relation end
ruby
{ "resource": "" }