_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q18300
TTY.Command.run
train
def run(*args, &block) cmd = command(*args) result = execute_command(cmd, &block) if result && result.failure? raise ExitError.new(cmd.to_command, result) end result end
ruby
{ "resource": "" }
q18301
TTY.Command.ruby
train
def ruby(*args, &block) options = args.last.is_a?(Hash) ? args.pop : {} if args.length > 1 run(*([RUBY] + args + [options]), &block) else run("#{RUBY} #{args.first}", options, &block) end end
ruby
{ "resource": "" }
q18302
TTY.Command.find_printer_class
train
def find_printer_class(name) const_name = name.to_s.split('_').map(&:capitalize).join.to_sym if const_name.empty? || !TTY::Command::Printers.const_defined?(const_name) raise ArgumentError, %(Unknown printer type "#{name}") end TTY::Command::Printers.const_get(const_name) end
ruby
{ "resource": "" }
q18303
Hatchet.Config.init_config!
train
def init_config!(config) set_internal_config!(config) config.each do |(directory, git_repos)| git_repos.each do |git_repo| git_repo = git_repo.include?("github.com") ? git_repo : "https://github.com/#{git_repo}.git" repo_name = name_from_git_repo(git_repo) repo_path = File.join(repo_directory_path, directory, repo_name) if repos.key? repo_name puts " warning duplicate repo found: #{repo_name.inspect}" repos[repo_name] = false else repos[repo_name] = repo_path end dirs[repo_path] = git_repo end end end
ruby
{ "resource": "" }
q18304
Hatchet.App.setup!
train
def setup! return self if @app_is_setup puts "Hatchet setup: #{name.inspect} for #{repo_name.inspect}" create_git_repo! unless is_git_repo? create_app set_labs! buildpack_list = @buildpacks.map { |pack| { buildpack: pack } } api_rate_limit.call.buildpack_installation.update(name, updates: buildpack_list) set_config @app_config call_before_deploy @app_is_setup = true self end
ruby
{ "resource": "" }
q18305
Hatchet.Reaper.get_apps
train
def get_apps apps = @api_rate_limit.call.app.list.sort_by { |app| DateTime.parse(app["created_at"]) }.reverse @app_count = apps.count @hatchet_apps = apps.select {|app| app["name"].match(@regex) } end
ruby
{ "resource": "" }
q18306
Uirusu.VTResult.to_stdout
train
def to_stdout result_string = String.new hashes = Array.new @results.sort_by {|k| k[:scanner] }.each do |result| unless hashes.include? result[:hash].downcase result_string << "#{result[:hash]}:\n" hashes << result[:hash].downcase end result_string << "#{result[:scanner]}: ".rjust(25) + "#{result[:result]}\n" end if @results != nil result_string end
ruby
{ "resource": "" }
q18307
Uirusu.VTResult.to_xml
train
def to_xml result_string = String.new result_string << "<results>\n" @results.each do |result| result_string << "\t<vtresult>\n" RESULT_FIELDS.each{|field| result_string << "\t\t<#{field.to_s}>#{result[field]}</#{field.to_s}>\n" unless field == :permalink and result['permalink'].nil? } result_string << "\t</vtresult>\n" end if @results != nil result_string << "</results>\n" result_string end
ruby
{ "resource": "" }
q18308
Babelish.Strings2CSV.load_strings
train
def load_strings(strings_filename) strings = {} comments = {} # genstrings uses utf16, so that's what we expect. utf8 should not be impact file = File.open(strings_filename, "r:utf-16:utf-8") begin contents = file.read if RUBY_VERSION == "1.9.2" # fixes conversion, see http://po-ru.com/diary/fixing-invalid-utf-8-in-ruby-revisited/ require 'iconv' ic = Iconv.new('UTF-8//IGNORE', 'UTF-8') contents = ic.iconv(contents + ' ')[0..-2] end rescue Encoding::InvalidByteSequenceError => e # silent error # faults back to utf8 contents = File.open(strings_filename, "r:utf-8") end previous_comment = nil contents.each_line do |line| key, value = self.parse_dotstrings_line(line) if key strings.merge!({key => value}) comments[key] = previous_comment if previous_comment else previous_comment = self.parse_comment_line(line) end end [strings, comments] end
ruby
{ "resource": "" }
q18309
Babelish.Csv2Base.convert
train
def convert(name = @csv_filename) rowIndex = 0 excludedCols = [] defaultCol = 0 CSV.foreach(name, :quote_char => '"', :col_sep => @csv_separator, :row_sep => :auto) do |row| if rowIndex == 0 #check there's at least two columns return unless row.count > 1 else #skip empty lines (or sections) next if row == nil or row[@keys_column].nil? end # go through columns row.size.times do |i| next if excludedCols.include? i #header if rowIndex == 0 # defaultCol can be the keyValue defaultCol = i if self.default_lang == row[i] # ignore all headers not listed in langs to create files (excludedCols << i and next) unless @langs.has_key?(row[i]) language = Language.new(row[i]) if @langs[row[i]].is_a?(Array) @langs[row[i]].each do |id| language.add_language_id(id.to_s) end else language.add_language_id(@langs[row[i]].to_s) end @languages[i] = language elsif !@state_column || (row[@state_column].nil? || row[@state_column] == '' || !@excluded_states.include?(row[@state_column])) key = row[@keys_column] comment = @comments_column ? row[@comments_column] : nil key.strip! if @stripping default_value = self.default_lang ? row[defaultCol] : nil value = self.process_value(row[i], default_value) @comments[key] = comment @languages[i].add_content_pair(key, value) end end rowIndex += 1 end write_content end
ruby
{ "resource": "" }
q18310
Babelish.Base2Csv.convert
train
def convert(write_to_file = true) strings = {} keys = nil comments = {} @filenames.each do |fname| header = fname strings[header], file_comments = load_strings(fname) keys ||= strings[header].keys comments.merge!(file_comments) unless file_comments.nil? end if write_to_file # Create csv file puts "Creating #{@csv_filename}" create_csv_file(keys, strings, comments) else return keys, strings end end
ruby
{ "resource": "" }
q18311
Babelish.Base2Csv.basename
train
def basename(file_path) filename = File.basename(file_path) return filename.split('.')[0].to_sym if file_path end
ruby
{ "resource": "" }
q18312
Babelish.Base2Csv.create_csv_file
train
def create_csv_file(keys, strings, comments = nil) raise "csv_filename must not be nil" unless @csv_filename CSV.open(@csv_filename, "wb") do |csv| @headers << "Comments" if !comments.nil? && !comments.empty? csv << @headers keys.each do |key| line = [key] default_val = strings[@default_lang][key] if strings[@default_lang] @filenames.each do |fname| lang = fname current_val = (lang != default_lang && strings[lang][key] == default_val) ? '' : strings[lang][key] line << current_val end line << comments[key] if comments && comments[key] csv << line end puts "Done" end end
ruby
{ "resource": "" }
q18313
Clipboard.Utils.popen
train
def popen(cmd, data, read_output_stream = false) Open3.popen2(cmd) { |input, output, waiter_thread| output_thread = Thread.new { output.read } if read_output_stream begin input.write data rescue Errno::EPIPE end input.close output_thread.value if read_output_stream waiter_thread.value } end
ruby
{ "resource": "" }
q18314
Neovim.Executable.version
train
def version @version ||= IO.popen([@path, "--version"]) do |io| io.gets[VERSION_PATTERN, 1] end rescue => e raise Error, "Couldn't load #{@path}: #{e}" end
ruby
{ "resource": "" }
q18315
Neovim.API.functions
train
def functions @functions ||= @api_info.fetch("functions").inject({}) do |acc, func| function = Function.new(func) acc.merge(function.name => function) end end
ruby
{ "resource": "" }
q18316
Neovim.Window.cursor=
train
def cursor=(coords) x, y = coords x = [x, 1].max y = [y, 0].max + 1 @session.request(:nvim_eval, "cursor(#{x}, #{y})") end
ruby
{ "resource": "" }
q18317
Neovim.LineRange.each
train
def each(&block) (0...@buffer.count).each_slice(5000) do |linenos| start, stop = linenos[0], linenos[-1] + 1 @buffer.get_lines(start, stop, true).each(&block) end end
ruby
{ "resource": "" }
q18318
Neovim.LineRange.[]=
train
def []=(*args) *target, val = args pos, len = target if pos.is_a?(Range) @buffer.set_lines(*range_indices(pos), true, Array(val)) else start, stop = length_indices(pos, len || 1) @buffer.set_lines(start, stop, true, Array(val)) end end
ruby
{ "resource": "" }
q18319
Neovim.Session.request
train
def request(method, *args) main_thread_only do @request_id += 1 blocking = Fiber.current == @main_fiber log(:debug) do { method_name: method, request_id: @request_id, blocking: blocking, arguments: args } end @event_loop.request(@request_id, method, *args) response = blocking ? blocking_response : yielding_response raise(Exited) if response.nil? raise(response.error) if response.error response.value end end
ruby
{ "resource": "" }
q18320
TzuMock.Mocker.mock_proc
train
def mock_proc(klass, methods, success, result, type) Proc.new do methods.each do |method| allow(klass).to receive(method) do |&block| outcome = Tzu::Outcome.new(success, result, type) outcome.handle(&block) if block outcome end end end end
ruby
{ "resource": "" }
q18321
DepSelector.Selector.find_solution
train
def find_solution(solution_constraints, valid_packages = nil) # this is a performance optimization so that packages that are # completely unreachable by the solution constraints don't get # added to the CSP packages_to_include_in_solve = trim_unreachable_packages(dep_graph, solution_constraints) begin # first, try to solve the whole set of constraints solve(dep_graph.clone, solution_constraints, valid_packages, packages_to_include_in_solve) rescue Exceptions::NoSolutionFound, Exceptions::TimeBoundExceededNoSolution # since we're here, solving the whole system failed, so add # the solution_constraints one-by-one and try to solve in # order to find the constraint that breaks the system in order # to give helpful debugging info # # TODO [cw,2010/11/28]: for an efficiency gain, instead of # continually re-building the problem and looking for a # solution, turn solution_constraints into a Generator and # iteratively add and solve in order to re-use # propagations. This will require separating setting up the # constraints from searching for the solution. Timeout::timeout(@time_bound, Exceptions::TimeBoundExceededNoSolution) do solution_constraints.each_index do |idx| workspace = dep_graph.clone begin solve(workspace, solution_constraints[0..idx], valid_packages, packages_to_include_in_solve) rescue Exceptions::NoSolutionFound => nsf disabled_packages = packages_to_include_in_solve.inject([]) do |acc, elt| pkg = workspace.package(elt.name) acc << pkg if nsf.unsatisfiable_problem.is_package_disabled?(pkg.gecode_package_id) acc end # disambiguate between packages disabled becuase they # don't exist and those that have otherwise problematic # constraints disabled_non_existent_packages = [] disabled_most_constrained_packages = [] disabled_packages.each do |disabled_pkg| disabled_collection = if disabled_pkg.valid? || (valid_packages && valid_packages.include?(disabled_pkg)) disabled_most_constrained_packages else disabled_non_existent_packages end disabled_collection << disabled_pkg end # Pick the first non-existent or most-constrained package # that was required or the package whose constraints had # to be disabled in order to find a solution and generate # feedback for it. We only report feedback for one # package, because it is in fact actionable and dispalying # feedback for every disabled package would probably be # too long. The full set of disabled packages is # accessible in the NoSolutionExists exception. disabled_package_to_report_on = disabled_non_existent_packages.first || disabled_most_constrained_packages.first feedback = error_reporter.give_feedback(dep_graph, solution_constraints, idx, disabled_package_to_report_on) raise Exceptions::NoSolutionExists.new(feedback, solution_constraints[idx], disabled_non_existent_packages, disabled_most_constrained_packages) end end end end end
ruby
{ "resource": "" }
q18322
DepSelector.Selector.process_soln_constraints
train
def process_soln_constraints(workspace, solution_constraints, valid_packages) gecode = workspace.gecode_wrapper # create shadow package whose dependencies are the solution constraints soln_constraints_pkg_id = gecode.add_package(0, 0, 0) soln_constraints_on_non_existent_packages = [] soln_constraints_that_match_no_versions = [] # generate constraints imposed by solution_constraints solution_constraints.each do |soln_constraint| # look up the package in the cloned dep_graph that corresponds to soln_constraint pkg_name = soln_constraint.package.name pkg = workspace.package(pkg_name) constraint = soln_constraint.constraint # record invalid solution constraints and raise an exception # afterwards unless pkg.valid? || (valid_packages && valid_packages.include?(pkg)) soln_constraints_on_non_existent_packages << soln_constraint next end if pkg[constraint].empty? soln_constraints_that_match_no_versions << soln_constraint next end pkg_id = pkg.gecode_package_id gecode.mark_preferred_to_be_at_latest(pkg_id, 10) gecode.mark_required(pkg_id) if constraint acceptable_versions = pkg.densely_packed_versions[constraint] gecode.add_version_constraint(soln_constraints_pkg_id, 0, pkg_id, acceptable_versions.min, acceptable_versions.max) else # this restricts the domain of the variable to >= 0, which # means -1, the shadow package, cannot be assigned, meaning # the package must be bound to an actual version gecode.add_version_constraint(soln_constraints_pkg_id, 0, pkg_id, 0, pkg.densely_packed_versions.range.max) end end if soln_constraints_on_non_existent_packages.any? || soln_constraints_that_match_no_versions.any? raise Exceptions::InvalidSolutionConstraints.new(soln_constraints_on_non_existent_packages, soln_constraints_that_match_no_versions) end end
ruby
{ "resource": "" }
q18323
DepSelector.Selector.trim_solution
train
def trim_solution(soln_constraints, soln, workspace) trimmed_soln = {} soln_constraints.each do |soln_constraint| package = workspace.package(soln_constraint.package.name) expand_package(trimmed_soln, package, soln) end trimmed_soln end
ruby
{ "resource": "" }
q18324
DepSelector.Selector.trim_unreachable_packages
train
def trim_unreachable_packages(workspace, soln_constraints) reachable_packages = [] soln_constraints.each do |soln_constraint| find_reachable_packages(workspace, soln_constraint.package, soln_constraint.constraint, reachable_packages) end reachable_packages end
ruby
{ "resource": "" }
q18325
Tus.Server.validate_partial_uploads!
train
def validate_partial_uploads!(part_uids) input = Queue.new part_uids.each { |part_uid| input << part_uid } input.close results = Queue.new thread_count = storage.concurrency[:concatenation] if storage.respond_to?(:concurrency) thread_count ||= 10 threads = thread_count.times.map do Thread.new do begin loop do part_uid = input.pop or break part_info = storage.read_info(part_uid) results << Tus::Info.new(part_info) end nil rescue => error input.clear error end end end errors = threads.map(&:value).compact if errors.any? { |error| error.is_a?(Tus::NotFound) } error!(400, "One or more partial uploads were not found") elsif errors.any? fail errors.first end part_infos = Array.new(results.size) { results.pop } # convert Queue into an Array unless part_infos.all?(&:partial?) error!(400, "One or more uploads were not partial") end if max_size && part_infos.map(&:length).inject(0, :+) > max_size error!(400, "The sum of partial upload lengths exceed Tus-Max-Size") end end
ruby
{ "resource": "" }
q18326
ChunkyPNG.Image.metadata_chunks
train
def metadata_chunks metadata.map do |key, value| if value.length >= METADATA_COMPRESSION_TRESHOLD ChunkyPNG::Chunk::CompressedText.new(key, value) else ChunkyPNG::Chunk::Text.new(key, value) end end end
ruby
{ "resource": "" }
q18327
ChunkyPNG.RMagick.import
train
def import(image) pixels = image.export_pixels_to_str(0, 0, image.columns, image.rows, "RGBA") ChunkyPNG::Canvas.from_rgba_stream(image.columns, image.rows, pixels) end
ruby
{ "resource": "" }
q18328
ChunkyPNG.RMagick.export
train
def export(canvas) image = Magick::Image.new(canvas.width, canvas.height) image.import_pixels(0, 0, canvas.width, canvas.height, "RGBA", canvas.pixels.pack("N*")) image end
ruby
{ "resource": "" }
q18329
ChunkyPNG.Vector.each_edge
train
def each_edge(close = true) raise ChunkyPNG::ExpectationFailed, "Not enough points in this path to draw an edge!" if length < 2 points.each_cons(2) { |a, b| yield(a, b) } yield(points.last, points.first) if close end
ruby
{ "resource": "" }
q18330
ChunkyPNG.Datastream.to_blob
train
def to_blob str = StringIO.new str.set_encoding("ASCII-8BIT") write(str) str.string end
ruby
{ "resource": "" }
q18331
ChunkyPNG.Dimension.include?
train
def include?(*point_like) point = ChunkyPNG::Point(*point_like) point.x >= 0 && point.x < width && point.y >= 0 && point.y < height end
ruby
{ "resource": "" }
q18332
ChunkyPNG.Color.parse
train
def parse(source) return source if source.is_a?(Integer) case source.to_s when /^\d+$/ then source.to_s.to_i when HEX3_COLOR_REGEXP, HEX6_COLOR_REGEXP then from_hex(source.to_s) when HTML_COLOR_REGEXP then html_color(source.to_s) else raise ArgumentError, "Don't know how to create a color from #{source.inspect}!" end end
ruby
{ "resource": "" }
q18333
ChunkyPNG.Color.from_hex
train
def from_hex(hex_value, opacity = nil) base_color = case hex_value when HEX3_COLOR_REGEXP $1.gsub(/([0-9a-f])/i, '\1\1').hex << 8 when HEX6_COLOR_REGEXP $1.hex << 8 else raise ArgumentError, "Not a valid hex color notation: #{hex_value.inspect}!" end opacity ||= $2 ? $2.hex : 0xff base_color | opacity end
ruby
{ "resource": "" }
q18334
ChunkyPNG.Color.from_hsv
train
def from_hsv(hue, saturation, value, alpha = 255) raise ArgumentError, "Hue must be between 0 and 360" unless (0..360).cover?(hue) raise ArgumentError, "Saturation must be between 0 and 1" unless (0..1).cover?(saturation) raise ArgumentError, "Value/brightness must be between 0 and 1" unless (0..1).cover?(value) chroma = value * saturation rgb = cylindrical_to_cubic(hue, saturation, value, chroma) rgb.map! { |component| ((component + value - chroma) * 255).to_i } rgb << alpha rgba(*rgb) end
ruby
{ "resource": "" }
q18335
ChunkyPNG.Color.from_hsl
train
def from_hsl(hue, saturation, lightness, alpha = 255) raise ArgumentError, "Hue #{hue} was not between 0 and 360" unless (0..360).cover?(hue) raise ArgumentError, "Saturation #{saturation} was not between 0 and 1" unless (0..1).cover?(saturation) raise ArgumentError, "Lightness #{lightness} was not between 0 and 1" unless (0..1).cover?(lightness) chroma = (1 - (2 * lightness - 1).abs) * saturation rgb = cylindrical_to_cubic(hue, saturation, lightness, chroma) rgb.map! { |component| ((component + lightness - 0.5 * chroma) * 255).to_i } rgb << alpha rgba(*rgb) end
ruby
{ "resource": "" }
q18336
ChunkyPNG.Color.cylindrical_to_cubic
train
def cylindrical_to_cubic(hue, saturation, y_component, chroma) hue_prime = hue.fdiv(60) x = chroma * (1 - (hue_prime % 2 - 1).abs) case hue_prime when (0...1) then [chroma, x, 0] when (1...2) then [x, chroma, 0] when (2...3) then [0, chroma, x] when (3...4) then [0, x, chroma] when (4...5) then [x, 0, chroma] when (5..6) then [chroma, 0, x] end end
ruby
{ "resource": "" }
q18337
ChunkyPNG.Color.grayscale?
train
def grayscale?(value) r(value) == b(value) && b(value) == g(value) end
ruby
{ "resource": "" }
q18338
ChunkyPNG.Color.compose_quick
train
def compose_quick(fg, bg) return fg if opaque?(fg) || fully_transparent?(bg) return bg if fully_transparent?(fg) a_com = int8_mult(0xff - a(fg), a(bg)) new_r = int8_mult(a(fg), r(fg)) + int8_mult(a_com, r(bg)) new_g = int8_mult(a(fg), g(fg)) + int8_mult(a_com, g(bg)) new_b = int8_mult(a(fg), b(fg)) + int8_mult(a_com, b(bg)) new_a = a(fg) + a_com rgba(new_r, new_g, new_b, new_a) end
ruby
{ "resource": "" }
q18339
ChunkyPNG.Color.compose_precise
train
def compose_precise(fg, bg) return fg if opaque?(fg) || fully_transparent?(bg) return bg if fully_transparent?(fg) fg_a = a(fg).to_f / MAX bg_a = a(bg).to_f / MAX a_com = (1.0 - fg_a) * bg_a new_r = (fg_a * r(fg) + a_com * r(bg)).round new_g = (fg_a * g(fg) + a_com * g(bg)).round new_b = (fg_a * b(fg) + a_com * b(bg)).round new_a = ((fg_a + a_com) * MAX).round rgba(new_r, new_g, new_b, new_a) end
ruby
{ "resource": "" }
q18340
ChunkyPNG.Color.interpolate_quick
train
def interpolate_quick(fg, bg, alpha) return fg if alpha >= 255 return bg if alpha <= 0 alpha_com = 255 - alpha new_r = int8_mult(alpha, r(fg)) + int8_mult(alpha_com, r(bg)) new_g = int8_mult(alpha, g(fg)) + int8_mult(alpha_com, g(bg)) new_b = int8_mult(alpha, b(fg)) + int8_mult(alpha_com, b(bg)) new_a = int8_mult(alpha, a(fg)) + int8_mult(alpha_com, a(bg)) rgba(new_r, new_g, new_b, new_a) end
ruby
{ "resource": "" }
q18341
ChunkyPNG.Color.fade
train
def fade(color, factor) new_alpha = int8_mult(a(color), factor) (color & 0xffffff00) | new_alpha end
ruby
{ "resource": "" }
q18342
ChunkyPNG.Color.decompose_color
train
def decompose_color(color, mask, bg, tolerance = 1) if alpha_decomposable?(color, mask, bg, tolerance) mask & 0xffffff00 | decompose_alpha(color, mask, bg) else mask & 0xffffff00 end end
ruby
{ "resource": "" }
q18343
ChunkyPNG.Color.alpha_decomposable?
train
def alpha_decomposable?(color, mask, bg, tolerance = 1) components = decompose_alpha_components(color, mask, bg) sum = components.inject(0) { |a, b| a + b } max = components.max * 3 components.max <= 255 && components.min >= 0 && (sum + tolerance * 3) >= max end
ruby
{ "resource": "" }
q18344
ChunkyPNG.Color.decompose_alpha
train
def decompose_alpha(color, mask, bg) components = decompose_alpha_components(color, mask, bg) (components.inject(0) { |a, b| a + b } / 3.0).round end
ruby
{ "resource": "" }
q18345
ChunkyPNG.Color.decompose_alpha_component
train
def decompose_alpha_component(channel, color, mask, bg) cc, mc, bc = send(channel, color), send(channel, mask), send(channel, bg) return 0x00 if bc == cc return 0xff if bc == mc return 0xff if cc == mc (((bc - cc).to_f / (bc - mc).to_f) * MAX).round end
ruby
{ "resource": "" }
q18346
ChunkyPNG.Color.decompose_alpha_components
train
def decompose_alpha_components(color, mask, bg) [ decompose_alpha_component(:r, color, mask, bg), decompose_alpha_component(:g, color, mask, bg), decompose_alpha_component(:b, color, mask, bg), ] end
ruby
{ "resource": "" }
q18347
ChunkyPNG.Color.to_hsv
train
def to_hsv(color, include_alpha = false) hue, chroma, max, _ = hue_and_chroma(color) value = max saturation = chroma.zero? ? 0.0 : chroma.fdiv(value) include_alpha ? [hue, saturation, value, a(color)] : [hue, saturation, value] end
ruby
{ "resource": "" }
q18348
ChunkyPNG.Color.to_hsl
train
def to_hsl(color, include_alpha = false) hue, chroma, max, min = hue_and_chroma(color) lightness = 0.5 * (max + min) saturation = chroma.zero? ? 0.0 : chroma.fdiv(1 - (2 * lightness - 1).abs) include_alpha ? [hue, saturation, lightness, a(color)] : [hue, saturation, lightness] end
ruby
{ "resource": "" }
q18349
ChunkyPNG.Color.to_truecolor_alpha_bytes
train
def to_truecolor_alpha_bytes(color) [r(color), g(color), b(color), a(color)] end
ruby
{ "resource": "" }
q18350
ChunkyPNG.Color.html_color
train
def html_color(color_name, opacity = nil) if color_name.to_s =~ HTML_COLOR_REGEXP opacity ||= $2 ? ($2.to_f * 255.0).round : 0xff base_color_name = $1.gsub(/[^a-z]+/i, "").downcase.to_sym return PREDEFINED_COLORS[base_color_name] | opacity if PREDEFINED_COLORS.key?(base_color_name) end raise ArgumentError, "Unknown color name #{color_name}!" end
ruby
{ "resource": "" }
q18351
ChunkyPNG.Color.samples_per_pixel
train
def samples_per_pixel(color_mode) case color_mode when ChunkyPNG::COLOR_INDEXED then 1 when ChunkyPNG::COLOR_TRUECOLOR then 3 when ChunkyPNG::COLOR_TRUECOLOR_ALPHA then 4 when ChunkyPNG::COLOR_GRAYSCALE then 1 when ChunkyPNG::COLOR_GRAYSCALE_ALPHA then 2 else raise ChunkyPNG::NotSupported, "Don't know the number of samples for this colormode: #{color_mode}!" end end
ruby
{ "resource": "" }
q18352
ChunkyPNG.Color.pass_bytesize
train
def pass_bytesize(color_mode, depth, width, height) return 0 if width == 0 || height == 0 (scanline_bytesize(color_mode, depth, width) + 1) * height end
ruby
{ "resource": "" }
q18353
ChunkyPNG.Canvas.[]=
train
def []=(x, y, color) assert_xy!(x, y) @pixels[y * width + x] = ChunkyPNG::Color.parse(color) end
ruby
{ "resource": "" }
q18354
ChunkyPNG.Canvas.set_pixel_if_within_bounds
train
def set_pixel_if_within_bounds(x, y, color) return unless include_xy?(x, y) @pixels[y * width + x] = color end
ruby
{ "resource": "" }
q18355
ChunkyPNG.Canvas.column
train
def column(x) assert_x!(x) (0...height).inject([]) { |pixels, y| pixels << get_pixel(x, y) } end
ruby
{ "resource": "" }
q18356
ChunkyPNG.Canvas.replace_row!
train
def replace_row!(y, vector) assert_y!(y) && assert_width!(vector.length) pixels[y * width, width] = vector end
ruby
{ "resource": "" }
q18357
ChunkyPNG.Canvas.replace_column!
train
def replace_column!(x, vector) assert_x!(x) && assert_height!(vector.length) for y in 0...height do set_pixel(x, y, vector[y]) end end
ruby
{ "resource": "" }
q18358
ChunkyPNG.Canvas.inspect
train
def inspect inspected = "<#{self.class.name} #{width}x#{height} [" for y in 0...height inspected << "\n\t[" << row(y).map { |p| ChunkyPNG::Color.to_hex(p) }.join(" ") << "]" end inspected << "\n]>" end
ruby
{ "resource": "" }
q18359
ChunkyPNG.Canvas.replace_canvas!
train
def replace_canvas!(new_width, new_height, new_pixels) unless new_pixels.length == new_width * new_height raise ArgumentError, "The provided pixel array should have #{new_width * new_height} items" end @width, @height, @pixels = new_width, new_height, new_pixels self end
ruby
{ "resource": "" }
q18360
ChunkyPNG.Canvas.assert_xy!
train
def assert_xy!(x, y) unless include_xy?(x, y) raise ChunkyPNG::OutOfBounds, "Coordinates (#{x},#{y}) out of bounds!" end true end
ruby
{ "resource": "" }
q18361
ChunkyPNG.Canvas.assert_size!
train
def assert_size!(matrix_width, matrix_height) if width != matrix_width raise ChunkyPNG::ExpectationFailed, "The width of the matrix does not match the canvas width!" end if height != matrix_height raise ChunkyPNG::ExpectationFailed, "The height of the matrix does not match the canvas height!" end true end
ruby
{ "resource": "" }
q18362
ChunkyPNG.Palette.to_trns_chunk
train
def to_trns_chunk ChunkyPNG::Chunk::Transparency.new("tRNS", map { |c| ChunkyPNG::Color.a(c) }.pack("C*")) end
ruby
{ "resource": "" }
q18363
ChunkyPNG.Palette.to_plte_chunk
train
def to_plte_chunk @encoding_map = {} colors = [] each_with_index do |color, index| @encoding_map[color] = index colors += ChunkyPNG::Color.to_truecolor_bytes(color) end ChunkyPNG::Chunk::Palette.new("PLTE", colors.pack("C*")) end
ruby
{ "resource": "" }
q18364
ChunkyPNG.Palette.best_color_settings
train
def best_color_settings if black_and_white? [ChunkyPNG::COLOR_GRAYSCALE, 1] elsif grayscale? if opaque? [ChunkyPNG::COLOR_GRAYSCALE, 8] else [ChunkyPNG::COLOR_GRAYSCALE_ALPHA, 8] end elsif indexable? [ChunkyPNG::COLOR_INDEXED, determine_bit_depth] elsif opaque? [ChunkyPNG::COLOR_TRUECOLOR, 8] else [ChunkyPNG::COLOR_TRUECOLOR_ALPHA, 8] end end
ruby
{ "resource": "" }
q18365
HeapInfo.Fastbin.title
train
def title class_name = Helper.color(Helper.class_name(self), sev: :bin) size_str = index.nil? ? nil : "[#{Helper.color(format('%#x', idx_to_size))}]" "#{class_name}#{size_str}: " end
ruby
{ "resource": "" }
q18366
HeapInfo.Fastbin.inspect
train
def inspect title + list.map do |ptr| next "(#{ptr})\n" if ptr.is_a?(Symbol) next " => (nil)\n" if ptr.nil? format(' => %s', Helper.color(format('%#x', ptr))) end.join end
ruby
{ "resource": "" }
q18367
HeapInfo.UnsortedBin.pretty_list
train
def pretty_list(list) center = nil list.map.with_index do |c, idx| next center = Helper.color('[self]', sev: :bin) if c == @base color_c = Helper.color(format('%#x', c)) fwd = fd_of(c) next "#{color_c}(invalid)" if fwd.nil? # invalid c bck = bk_of(c) if center.nil? # bk side format('%s%s', color_c, fwd == list[idx + 1] ? nil : Helper.color(format('(%#x)', fwd))) else # fd side format('%s%s', bck == list[idx - 1] ? nil : Helper.color(format('(%#x)', bck)), color_c) end end.join(' === ') end
ruby
{ "resource": "" }
q18368
HeapInfo.UnsortedBin.link_list
train
def link_list(expand_size) list = [@base] # fd work = proc do |ptr, nxt, append| sz = 0 dup = {} while ptr != @base && sz < expand_size append.call(ptr) break if ptr.nil? || dup[ptr] # invalid or duplicated pointer dup[ptr] = true ptr = __send__(nxt, ptr) sz += 1 end end work.call(@fd, :fd_of, ->(ptr) { list << ptr }) work.call(@bk, :bk_of, ->(ptr) { list.unshift(ptr) }) list end
ruby
{ "resource": "" }
q18369
HeapInfo.Dumper.x
train
def x(count, address) commands = [address, count * size_t] base = base_of(*commands) res = dump(*commands).unpack(size_t == 4 ? 'L*' : 'Q*') str = res.group_by.with_index { |_, i| i / (16 / size_t) }.map do |round, values| Helper.hex(base + round * 16) + ":\t" + values.map { |v| Helper.color(format("0x%0#{size_t * 2}x", v)) }.join("\t") end.join("\n") puts str end
ruby
{ "resource": "" }
q18370
HeapInfo.Dumper.cstring
train
def cstring(address) base = base_of(address) len = 1 cur = '' loop do cur << (dump(base + len - 1, len) || '') break if cur.index("\x00") len <<= 1 return cur if cur.size != len - 1 # reached undumpable memory end cur[0, cur.index("\x00")] end
ruby
{ "resource": "" }
q18371
HeapInfo.Dumper.base_len_of
train
def base_len_of(arg, len = DUMP_BYTES) segments = @info.call(:segments) || {} segments = segments.each_with_object({}) do |(k, seg), memo| memo[k] = seg.base end base = case arg when Integer then arg when Symbol then segments[arg] when String then Helper.evaluate(arg, store: segments) end raise ArgumentError, "Invalid base: #{arg.inspect}" unless base.is_a?(Integer) # invalid usage [base, len] end
ruby
{ "resource": "" }
q18372
HeapInfo.ProcessInfo.segments
train
def segments EXPORT.map do |sym| seg = __send__(sym) [sym, seg] if seg.is_a?(Segment) end.compact.to_h end
ruby
{ "resource": "" }
q18373
HeapInfo.ProcessInfo.to_segment
train
def to_segment(sym) return nil unless EXPORT.include?(sym) seg = __send__(sym) return nil unless seg.is_a?(Segment) seg end
ruby
{ "resource": "" }
q18374
HeapInfo.Libc.main_arena
train
def main_arena return @main_arena.reload! if defined? @main_arena off = main_arena_offset return if off.nil? @main_arena = Arena.new(off + base, size_t, dumper) end
ruby
{ "resource": "" }
q18375
HeapInfo.Libc.info
train
def info return @info if defined? @info # Try to fetch from cache first. key = HeapInfo::Cache.key_libc_info(name) @info = HeapInfo::Cache.read(key) @info ||= execute_libc_info.tap { |i| HeapInfo::Cache.write(key, i) } end
ruby
{ "resource": "" }
q18376
HeapInfo.Nil.method_missing
train
def method_missing(method_sym, *args, &block) # rubocop:disable Style/MethodMissingSuper return nil.__send__(method_sym, *args, &block) if nil.respond_to?(method_sym) self end
ruby
{ "resource": "" }
q18377
HeapInfo.Glibc.invalid_pointer
train
def invalid_pointer(ptr, size) errmsg = "free(): invalid pointer\n" # unsigned compare malloc_assert(ptr <= ulong(-size)) { errmsg + format('ptr(0x%x) > -size(0x%x)', ptr, ulong(-size)) } malloc_assert((ptr % (size_t * 2)).zero?) { errmsg + format('ptr(0x%x) %% %d != 0', ptr, size_t * 2) } end
ruby
{ "resource": "" }
q18378
HeapInfo.Chunk.flags
train
def flags mask = @size - size flag = [] flag << :non_main_arena unless (mask & 4).zero? flag << :mmapped unless (mask & 2).zero? flag << :prev_inuse unless (mask & 1).zero? flag end
ruby
{ "resource": "" }
q18379
HeapInfo.Process.offset
train
def offset(addr, sym = nil) return unless load? segment = @info.to_segment(sym) if segment.nil? sym, segment = @info.segments .select { |_, seg| seg.base <= addr } .min_by { |_, seg| addr - seg } end return $stdout.puts "Invalid address #{Helper.hex(addr)}" if segment.nil? $stdout.puts Helper.color(Helper.hex(addr - segment)) + ' after ' + Helper.color(sym, sev: :sym) end
ruby
{ "resource": "" }
q18380
HeapInfo.Process.find
train
def find(pattern, from, length = :unlimited, rel: false) return Nil.instance unless load? dumper.find(pattern, from, length, rel) end
ruby
{ "resource": "" }
q18381
HeapInfo.Process.find_all
train
def find_all(pattern, segment = :all) return Nil.instance unless load? segments = segment == :all ? %i[elf heap libc ld stack] : Array(segment) result = findall_raw(pattern, segments).reject { |(_, _, ary)| ary.empty? } target = pattern.is_a?(Integer) ? Helper.hex(pattern) : pattern.inspect str = ["Searching #{Helper.color(target)}:\n"] str.concat(result.map do |(sym, base, ary)| "In #{Helper.color(sym, sev: :bin)} (#{Helper.color(Helper.hex(base))}):\n" + ary.map { |v| " #{Helper.color(sym, sev: :bin)}+#{Helper.color(Helper.hex(v))}\n" }.join end) $stdout.puts str end
ruby
{ "resource": "" }
q18382
HeapInfo.Process.to_s
train
def to_s return 'Process not found' unless load? "Program: #{Helper.color(program.name)} PID: #{Helper.color(pid)}\n" + program.to_s + heap.to_s + stack.to_s + libc.to_s + ld.to_s + format("%-28s\tvalue: #{Helper.color(format('%#x', canary), sev: :sym)}", Helper.color('canary', sev: :sym)) end
ruby
{ "resource": "" }
q18383
HeapInfo.Process.canary
train
def canary return Nil.instance unless load? addr = @info.auxv[:random] Helper.unpack(bits / 8, @dumper.dump(addr, bits / 8)) & 0xffffffffffffff00 end
ruby
{ "resource": "" }
q18384
Bashcov.Xtrace.read
train
def read @field_stream.read = @read field_count = FIELDS.length fields = @field_stream.each( self.class.delimiter, field_count, PS4_START_REGEXP ) # +take(field_count)+ would be more natural here, but doesn't seem to # play nicely with +Enumerator+s backed by +IO+ objects. loop do break if (hit = (1..field_count).map { fields.next }).empty? parse_hit!(*hit) end @read.close unless @read.closed? @files end
ruby
{ "resource": "" }
q18385
Bashcov.Xtrace.update_wd_stacks!
train
def update_wd_stacks!(pwd, oldpwd) @pwd_stack[0] ||= pwd @oldpwd_stack[0] ||= oldpwd unless oldpwd.to_s.empty? # We haven't changed working directories; short-circuit. return if pwd == @pwd_stack[-1] # If the current +pwd+ is identical to the top of the +@oldpwd_stack+ and # the current +oldpwd+ is identical to the second-to-top entry, then a # previous cd/pushd has been undone. if pwd == @oldpwd_stack[-1] && oldpwd == @oldpwd_stack[-2] @pwd_stack.pop @oldpwd_stack.pop else # New cd/pushd @pwd_stack << pwd @oldpwd_stack << oldpwd end end
ruby
{ "resource": "" }
q18386
Bashcov.Detective.shellscript?
train
def shellscript?(filename) return false unless File.exist?(filename) && File.readable?(filename) \ && File.file?(File.realpath(filename)) shellscript_shebang?(filename) || \ (shellscript_extension?(filename) && shellscript_syntax?(filename)) end
ruby
{ "resource": "" }
q18387
Bashcov.FieldStream.each
train
def each(delimiter, field_count, start_match) return enum_for(__method__, delimiter, field_count, start_match) unless block_given? chunked = each_field(delimiter).chunk(&chunk_matches(start_match)) yield_fields = lambda do |(_, chunk)| chunk.each { |e| yield e } (field_count - chunk.size).times { yield "" } end # Skip junk that might appear before the first start-of-fields match begin n, chunk = chunked.next yield_fields.call([n, chunk]) unless n.zero? rescue StopIteration return end chunked.each(&yield_fields) end
ruby
{ "resource": "" }
q18388
Thermite.Package.build_package
train
def build_package filename = config.tarball_filename(config.toml[:package][:version]) relative_library_path = config.ruby_extension_path.sub("#{config.ruby_toplevel_dir}/", '') prepare_built_library Zlib::GzipWriter.open(filename) do |tgz| Dir.chdir(config.ruby_toplevel_dir) do Archive::Tar::Minitar.pack(relative_library_path, tgz) end end end
ruby
{ "resource": "" }
q18389
Thermite.Util.debug
train
def debug(msg) # Should probably replace with a Logger return unless config.debug_filename @debug ||= File.open(config.debug_filename, 'w') @debug.write("#{msg}\n") @debug.flush end
ruby
{ "resource": "" }
q18390
Thermite.CustomBinary.download_binary_from_custom_uri
train
def download_binary_from_custom_uri return false unless config.binary_uri_format version = config.crate_version uri ||= format( config.binary_uri_format, filename: config.tarball_filename(version), version: version ) return false unless (tgz = download_versioned_binary(uri, version)) debug "Unpacking binary from Cargo version: #{File.basename(uri)}" unpack_tarball(tgz) prepare_downloaded_library true end
ruby
{ "resource": "" }
q18391
GithubCLI.DSL.on_error
train
def on_error yield rescue Github::Error::NotFound => e terminal.newline ui.error 'Resource Not Found' terminal.newline exit 15 rescue GithubCLI::GithubCLIError => e GithubCLI.ui.error e.message GithubCLI.ui.debug e exit e.status_code rescue Interrupt => e GithubCLI.ui.error "\nQuitting..." GithubCLI.ui.debug e exit 1 rescue SystemExit => e exit e.status rescue Exception => e GithubCLI.ui.error "\nFatal error has occurred. " + e.message.to_s GithubCLI.ui.debug e exit 1 end
ruby
{ "resource": "" }
q18392
GithubCLI.Util.convert_value
train
def convert_value(value) case value when true then "true" when false then "false" when Hash then convert_value(value.values) when Array then value.map(&:to_s) else value.to_s end end
ruby
{ "resource": "" }
q18393
GithubCLI.Manpage.man_dir
train
def man_dir(path = nil) if @man_dir.nil? || path man_path = path || File.expand_path('../man', __FILE__) if File.directory?(man_path) @man_dir = man_path else fail "Manuals directory `#{man_path}` does not exist" end end @man_dir end
ruby
{ "resource": "" }
q18394
GithubCLI.Manpage.manpage?
train
def manpage?(name, section = nil) return false if name.nil? manpages(name, section).any? end
ruby
{ "resource": "" }
q18395
GithubCLI.Manpage.read
train
def read(name, section = nil) return if name.nil? paths = manpages(name) return if paths.empty? if paths.size == 1 manpath = paths[0] elsif paths.size > 1 prompt = TTY::Prompt.new manpath = prompt.select("Choose manual to view?", paths) end if manpath run(manpath) else abort("No manuals found for #{name}") end end
ruby
{ "resource": "" }
q18396
GithubCLI.Pager.page
train
def page return if not $stdout.tty? read_io, write_io = IO.pipe if Kernel.fork $stdin.reopen(read_io) read_io.close write_io.close # Don't page if the input is short enough ENV['LESS'] = 'FSRX' # Wait until we have input before we start the pager Kernel.select [$stdin] pager = Pager.pager_command Kernel.exec pager rescue Kernel.exec "/bin/sh", "-c", pager else # Child process $stdout.reopen(write_io) $stderr.reopen(write_io) if $stderr.tty? write_io.close read_io.close end end
ruby
{ "resource": "" }
q18397
RocketPants.Rescuable.process_action
train
def process_action(*args) super rescue Exception => exception raise if RocketPants.pass_through_errors? # Otherwise, use the default built in handler. logger.error "Exception occured: #{exception.class.name} - #{exception.message}" logger.error "Exception backtrace:" exception.backtrace[0, 10].each do |backtrace_line| logger.error "=> #{backtrace_line}" end exception_notifier_callback.call(self, exception, request) render_error exception end
ruby
{ "resource": "" }
q18398
RocketPants.ErrorHandling.error!
train
def error!(name, *args) context = args.extract_options! klass = Errors[name] || Error exception = klass.new(*args).tap { |e| e.context = context } raise exception end
ruby
{ "resource": "" }
q18399
RocketPants.ErrorHandling.lookup_error_metadata
train
def lookup_error_metadata(exception) context = lookup_error_context exception context.fetch(:metadata, {}).merge lookup_error_extras(exception) end
ruby
{ "resource": "" }