_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q18700
TLAW.DataTable.to_h
train
def to_h keys.map { |k| [k, map { |h| h[k] }] }.to_h end
ruby
{ "resource": "" }
q18701
ElmInstall.DirectorySource.copy_to
train
def copy_to(_, directory) # Delete the directory to make sure no pervious version remains FileUtils.rm_rf(directory) if directory.exist? # Create parent directory FileUtils.mkdir_p(directory.parent) # Create symlink FileUtils.ln_s(@dir.expand_path, directory) nil end
ruby
{ "resource": "" }
q18702
ElmInstall.Identifier.identify
train
def identify(directory) raw = json(directory) dependencies = raw['dependencies'].to_h dependency_sources = raw['dependency-sources'] .to_h .merge(@dependency_sources) dependencies.map do |package, constraint| constraints = Utils.transform_constraint constraint type = if dependency_sources.key?(package) source = dependency_sources[package] case source when Hash uri_type source['url'], Branch::Just(source['ref']) when String if File.exist?(source) Type::Directory(Pathname.new(source)) else uri_type source, Branch::Just('master') end end else Type::Git(Uri::Github(package), Branch::Nothing()) end type.source.identifier = self type.source.options = @options Dependency.new(package, type.source, constraints) end end
ruby
{ "resource": "" }
q18703
ElmInstall.Identifier.uri_type
train
def uri_type(url, branch) uri = GitCloneUrl.parse(url) case uri when URI::SshGit::Generic Type::Git(Uri::Ssh(uri), branch) when URI::HTTP Type::Git(Uri::Http(uri), branch) end end
ruby
{ "resource": "" }
q18704
ElmInstall.Identifier.json
train
def json(directory) path = File.join(directory, 'elm-package.json') JSON.parse(File.read(path)) rescue JSON::ParserError warn "Invalid JSON in file: #{path.bold}" rescue Errno::ENOENT warn "Could not find file: #{path.bold}" end
ruby
{ "resource": "" }
q18705
ElmInstall.Utils.transform_constraint
train
def transform_constraint(elm_constraint) elm_constraint.gsub!(/\s/, '') CONVERTERS .map { |regexp, prefix| [elm_constraint.match(regexp), prefix] } .select { |(match)| match } .map { |(match, prefix)| "#{prefix} #{match[1]}" } .map { |constraint| Solve::Constraint.new constraint } end
ruby
{ "resource": "" }
q18706
ElmInstall.Installer.results
train
def results Solve .it!(@graph, initial_solve_constraints) .map do |name, version| dep = @resolver.dependencies[name] dep.version = Semverse::Version.new(version) dep end end
ruby
{ "resource": "" }
q18707
ElmInstall.Installer.initial_solve_constraints
train
def initial_solve_constraints @identifier.initial_dependencies.flat_map do |dependency| dependency.constraints.map do |constraint| [dependency.name, constraint] end end end
ruby
{ "resource": "" }
q18708
ElmInstall.Resolver.resolve_dependency
train
def resolve_dependency(dependency) @dependencies[dependency.name] ||= dependency dependency .source .versions( dependency.constraints, @identifier.initial_elm_version, !@options[:skip_update], @options[:only_update] ) .each do |version| next if @graph.artifact?(dependency.name, version) resolve_dependencies(dependency, version) end nil end
ruby
{ "resource": "" }
q18709
ElmInstall.Resolver.resolve_dependencies
train
def resolve_dependencies(main, version) dependencies = @identifier.identify(main.source.fetch(version)) artifact = @graph.artifact main.name, version dependencies.each do |dependency| dependency.constraints.each do |constraint| artifact.depends dependency.name, constraint end resolve_dependency dependency end nil end
ruby
{ "resource": "" }
q18710
ElmInstall.GitSource.fetch
train
def fetch(version) # Get the reference from the branch ref = case @branch when Branch::Just @branch.ref when Branch::Nothing case version when String version else version.to_simple end end repository.checkout ref end
ruby
{ "resource": "" }
q18711
ElmInstall.GitSource.copy_to
train
def copy_to(version, directory) # Delete the directory to make sure no pervious version remains if # we are using a branch or symlink if using Dir. FileUtils.rm_rf(directory) if directory.exist? # Create directory if not exists FileUtils.mkdir_p directory # Copy hole repository FileUtils.cp_r("#{fetch(version).path}/.", directory) # Remove .git directory FileUtils.rm_rf(File.join(directory, '.git')) nil end
ruby
{ "resource": "" }
q18712
ElmInstall.GitSource.versions
train
def versions(constraints, elm_version, should_update, only_update) if repository.cloned? && !repository.fetched && should_update && (!only_update || only_update == package_name) # Get updates from upstream Logger.arrow "Getting updates for: #{package_name.bold}" repository.fetch end case @branch when Branch::Just [identifier.version(fetch(@branch.ref))] when Branch::Nothing matching_versions constraints, elm_version end end
ruby
{ "resource": "" }
q18713
ElmInstall.GitSource.matching_versions
train
def matching_versions(constraints, elm_version) repository .versions .select do |version| elm_version_of(version.to_s) == elm_version && constraints.all? { |constraint| constraint.satisfies?(version) } end .sort .reverse end
ruby
{ "resource": "" }
q18714
ElmInstall.Populator.log_dependency
train
def log_dependency(dependency) log = "#{dependency.name} - " log += dependency.source.to_log.to_s log += " (#{dependency.version.to_simple})" Logger.dot log nil end
ruby
{ "resource": "" }
q18715
ElmInstall.Repository.versions
train
def versions @versions ||= repo .tags .map(&:name) .select { |tag| tag =~ /(.*\..*\..*)/ } .map { |tag| Semverse::Version.try_new tag } .compact end
ruby
{ "resource": "" }
q18716
SimpleEnum.ViewHelpers.enum_option_pairs
train
def enum_option_pairs(record, enum, encode_as_value = false) reader = enum.to_s.pluralize record = record.class unless record.respond_to?(reader) record.send(reader).map { |key, value| name = record.human_enum_name(enum, key) if record.respond_to?(:human_enum_name) name ||= translate_enum_key(enum, key) [name, encode_as_value ? value : key] } end
ruby
{ "resource": "" }
q18717
GeneValidator.AlignmentValidation.array_to_ranges
train
def array_to_ranges(ar) prev = ar[0] ranges = ar.slice_before do |e| prev2 = prev prev = e prev2 + 1 != e end.map { |a| a[0]..a[-1] } ranges end
ruby
{ "resource": "" }
q18718
GeneValidator.DuplicationValidation.find_local_alignment
train
def find_local_alignment(hit, prediction, hsp) # indexing in blast starts from 1 hit_local = hit.raw_sequence[hsp.hit_from - 1..hsp.hit_to - 1] query_local = prediction.raw_sequence[hsp.match_query_from - 1..hsp.match_query_to - 1] # in case of nucleotide prediction sequence translate into protein # use translate with reading frame 1 because # to/from coordinates of the hsp already correspond to the # reading frame in which the prediction was read to match this hsp if @type == :nucleotide s = Bio::Sequence::NA.new(query_local) query_local = s.translate end opt = ['--maxiterate', '1000', '--localpair', '--anysymbol', '--quiet', '--thread', @num_threads.to_s] mafft = Bio::MAFFT.new('mafft', opt) # local alignment for hit and query seqs = [hit_local, query_local] report = mafft.query_align(seqs) report.alignment.map(&:to_s) rescue StandardError raise NoMafftInstallationError end
ruby
{ "resource": "" }
q18719
GeneValidator.Cluster.print
train
def print warn "Cluster: mean = #{mean}, density = #{density}" lengths.sort { |a, b| a <=> b }.each do |elem| warn "#{elem[0]}, #{elem[1]}" end warn '--------------------------' end
ruby
{ "resource": "" }
q18720
GeneValidator.Validations.get_info_on_query_sequence
train
def get_info_on_query_sequence(seq_type = @config[:type], index = @config[:idx]) query = GeneValidator.extract_input_fasta_sequence(index) parse_query = query.scan(/^>([^\n]*)\n([A-Za-z\n]*)/)[0] prediction = Query.new prediction.definition = parse_query[0].delete("\n") prediction.identifier = prediction.definition.gsub(/ .*/, '') prediction.type = seq_type prediction.raw_sequence = parse_query[1].delete("\n") prediction.length_protein = prediction.raw_sequence.length prediction.length_protein /= 3 if seq_type == :nucleotide prediction end
ruby
{ "resource": "" }
q18721
GeneValidator.Validate.length_validation_scores
train
def length_validation_scores(validations, scores) lcv = validations.select { |v| v.class == LengthClusterValidationOutput } lrv = validations.select { |v| v.class == LengthRankValidationOutput } if lcv.length == 1 && lrv.length == 1 score_lcv = (lcv[0].result == lcv[0].expected) score_lrv = (lrv[0].result == lrv[0].expected) if score_lcv == true && score_lrv == true scores[:successes] -= 1 # if both are true: counted as 1 success elsif score_lcv == false && score_lrv == false scores[:fails] -= 1 # if both are false: counted as 1 fail else scores[:successes] -= 0.5 scores[:fails] -= 0.5 end end scores end
ruby
{ "resource": "" }
q18722
GeneValidator.OutputFiles.turn_off_automated_sorting
train
def turn_off_automated_sorting js_file = File.join(@dirs[:output_dir], 'html_files/js/gv.compiled.min.js') original_content = File.read(js_file) # removes the automatic sort on page load updated_content = original_content.gsub(',sortList:[[0,0]]', '') File.open("#{script_file}.tmp", 'w') { |f| f.puts updated_content } FileUtils.mv("#{script_file}.tmp", script_file) end
ruby
{ "resource": "" }
q18723
GeneValidator.OutputFiles.overview_html_hash
train
def overview_html_hash(evaluation, less) data = [@overview[:scores].group_by { |a| a }.map do |k, vs| { 'key': k, 'value': vs.length, 'main': false } end] { data: data, type: :simplebars, aux1: 10, aux2: '', title: 'Overall GeneValidator Score Evaluation', footer: '', xtitle: 'Validation Score', ytitle: 'Number of Queries', less: less, evaluation: evaluation } end
ruby
{ "resource": "" }
q18724
RailsPushNotifications.BaseApp.push_notifications
train
def push_notifications pending = find_pending to_send = pending.map do |notification| notification_type.new notification.destinations, notification.data end pusher = build_pusher pusher.push to_send pending.each_with_index do |p, i| p.update_attributes! results: to_send[i].results end end
ruby
{ "resource": "" }
q18725
ActiveRecord::Associations.HasManyForActiveModelAssociation.replace
train
def replace(other_array) original_target = load_target.dup other_array.each { |val| raise_on_type_mismatch!(val) } target_ids = reflection.options[:target_ids] owner[target_ids] = other_array.map(&:id) old_records = original_target - other_array old_records.each do |record| @target.delete(record) end other_array.each do |record| if index = @target.index(record) @target[index] = record else @target << record end end end
ruby
{ "resource": "" }
q18726
ActiveRecord::Associations.HasManyForActiveModelAssociation.concat
train
def concat(*records) load_target flatten_records = records.flatten flatten_records.each { |val| raise_on_type_mismatch!(val) } target_ids = reflection.options[:target_ids] owner[target_ids] ||= [] owner[target_ids].concat(flatten_records.map(&:id)) flatten_records.each do |record| if index = @target.index(record) @target[index] = record else @target << record end end target end
ruby
{ "resource": "" }
q18727
ActiveModel::Associations.OverrideMethods.association
train
def association(name) #:nodoc: association = association_instance_get(name) if association.nil? reflection = self.class.reflect_on_association(name) if reflection.options[:active_model] association = ActiveRecord::Associations::HasManyForActiveModelAssociation.new(self, reflection) else association = reflection.association_class.new(self, reflection) end association_instance_set(name, association) end association end
ruby
{ "resource": "" }
q18728
SchemaToScaffold.Path.choose
train
def choose validate_path search_paths_list = search_paths if search_paths_list.empty? puts "\nThere is no /schema[^\/]*.rb$/ in the directory #{@path}" exit end search_paths_list.each_with_index {|path,indx| puts "#{indx}. #{path}" } begin print "\nSelect a path to the target schema: " end while search_paths_list[(id = STDIN.gets.to_i)].nil? search_paths_list[id] end
ruby
{ "resource": "" }
q18729
SimpleScheduler.SchedulerJob.load_config
train
def load_config @config = YAML.safe_load(ERB.new(File.read(config_path)).result) @queue_ahead = @config["queue_ahead"] || Task::DEFAULT_QUEUE_AHEAD_MINUTES @queue_name = @config["queue_name"] || "default" @time_zone = @config["tz"] || Time.zone.tzinfo.name @config.delete("queue_ahead") @config.delete("queue_name") @config.delete("tz") end
ruby
{ "resource": "" }
q18730
SimpleScheduler.SchedulerJob.queue_future_jobs
train
def queue_future_jobs tasks.each do |task| # Schedule the new run times using the future job wrapper. new_run_times = task.future_run_times - task.existing_run_times new_run_times.each do |time| SimpleScheduler::FutureJob.set(queue: @queue_name, wait_until: time) .perform_later(task.params, time.to_i) end end end
ruby
{ "resource": "" }
q18731
SimpleScheduler.SchedulerJob.tasks
train
def tasks @config.map do |task_name, options| task_params = options.symbolize_keys task_params[:queue_ahead] ||= @queue_ahead task_params[:name] = task_name task_params[:tz] ||= @time_zone Task.new(task_params) end end
ruby
{ "resource": "" }
q18732
SimpleScheduler.Task.existing_jobs
train
def existing_jobs @existing_jobs ||= SimpleScheduler::Task.scheduled_set.select do |job| next unless job.display_class == "SimpleScheduler::FutureJob" task_params = job.display_args[0].symbolize_keys task_params[:class] == job_class_name && task_params[:name] == name end.to_a end
ruby
{ "resource": "" }
q18733
SimpleScheduler.Task.future_run_times
train
def future_run_times future_run_times = existing_run_times.dup last_run_time = future_run_times.last || at - frequency last_run_time = last_run_time.in_time_zone(time_zone) # Ensure there are at least two future jobs scheduled and that the queue ahead time is filled while future_run_times.length < 2 || minutes_queued_ahead(last_run_time) < queue_ahead last_run_time = frequency.from_now(last_run_time) # The hour may not match because of a shift caused by DST in previous run times, # so we need to ensure that the hour matches the specified hour if given. last_run_time = last_run_time.change(hour: at.hour, min: at.min) if at.hour? future_run_times << last_run_time end future_run_times end
ruby
{ "resource": "" }
q18734
SimpleScheduler.FutureJob.perform
train
def perform(task_params, scheduled_time) @task = Task.new(task_params) @scheduled_time = Time.at(scheduled_time).in_time_zone(@task.time_zone) raise Expired if expired? queue_task end
ruby
{ "resource": "" }
q18735
SimpleScheduler.FutureJob.expire_duration
train
def expire_duration split_duration = @task.expires_after.split(".") duration = split_duration[0].to_i duration_units = split_duration[1] duration.send(duration_units) end
ruby
{ "resource": "" }
q18736
SimpleScheduler.FutureJob.expired?
train
def expired? return false if @task.expires_after.blank? expire_duration.from_now(@scheduled_time) < Time.now.in_time_zone(@task.time_zone) end
ruby
{ "resource": "" }
q18737
SimpleScheduler.FutureJob.handle_expired_task
train
def handle_expired_task(exception) exception.run_time = Time.now.in_time_zone(@task.time_zone) exception.scheduled_time = @scheduled_time exception.task = @task SimpleScheduler.expired_task_blocks.each do |block| block.call(exception) end end
ruby
{ "resource": "" }
q18738
SimpleScheduler.FutureJob.queue_task
train
def queue_task if @task.job_class.instance_method(:perform).arity.zero? @task.job_class.send(perform_method) else @task.job_class.send(perform_method, @scheduled_time.to_i) end end
ruby
{ "resource": "" }
q18739
SimpleScheduler.At.parsed_time
train
def parsed_time return @parsed_time if @parsed_time @parsed_time = parsed_day change_hour = next_hour # There is no hour 24, so we need to move to the next day if change_hour == 24 @parsed_time = 1.day.from_now(@parsed_time) change_hour = 0 end @parsed_time = @parsed_time.change(hour: change_hour, min: at_min) # If the parsed time is still before the current time, add an additional day if # the week day wasn't specified or add an additional week to get the correct time. @parsed_time += at_wday? ? 1.week : 1.day if now > @parsed_time @parsed_time end
ruby
{ "resource": "" }
q18740
ActionSubscriber.DSL.exchange_names
train
def exchange_names(*names) @_exchange_names ||= [] @_exchange_names += names.flatten.map(&:to_s) if @_exchange_names.empty? return [ ::ActionSubscriber.config.default_exchange ] else return @_exchange_names.compact.uniq end end
ruby
{ "resource": "" }
q18741
ActionSubscriber.Subscribable.queue_name_for_method
train
def queue_name_for_method(method_name) return queue_names[method_name] if queue_names[method_name] queue_name = generate_queue_name(method_name) queue_for(method_name, queue_name) return queue_name end
ruby
{ "resource": "" }
q18742
ActionSubscriber.Subscribable.routing_key_name_for_method
train
def routing_key_name_for_method(method_name) return routing_key_names[method_name] if routing_key_names[method_name] routing_key_name = generate_routing_key_name(method_name) routing_key_for(method_name, routing_key_name) return routing_key_name end
ruby
{ "resource": "" }
q18743
ECDSA.Point.double
train
def double return self if infinity? gamma = field.mod((3 * x * x + @group.param_a) * field.inverse(2 * y)) new_x = field.mod(gamma * gamma - 2 * x) new_y = field.mod(gamma * (x - new_x) - y) self.class.new(group, new_x, new_y) end
ruby
{ "resource": "" }
q18744
ECDSA.Point.multiply_by_scalar
train
def multiply_by_scalar(i) raise ArgumentError, 'Scalar is not an integer.' if !i.is_a?(Integer) raise ArgumentError, 'Scalar is negative.' if i < 0 result = group.infinity v = self while i > 0 result = result.add_to_point(v) if i.odd? v = v.double i >>= 1 end result end
ruby
{ "resource": "" }
q18745
ECDSA.PrimeField.square_roots
train
def square_roots(n) raise ArgumentError, "Not a member of the field: #{n}." if !include?(n) case when prime == 2 then [n] when (prime % 4) == 3 then square_roots_for_p_3_mod_4(n) when (prime % 8) == 5 then square_roots_for_p_5_mod_8(n) else square_roots_default(n) end end
ruby
{ "resource": "" }
q18746
WaveFile.Writer.write_header
train
def write_header(sample_frame_count) extensible = @format.channels > 2 || (@format.sample_format == :pcm && @format.bits_per_sample != 8 && @format.bits_per_sample != 16) || (@format.channels == 1 && @format.speaker_mapping != [:front_center]) || (@format.channels == 2 && @format.speaker_mapping != [:front_left, :front_right]) format_code = extensible ? :extensible : @format.sample_format requires_fact_chunk = (format_code != :pcm) sample_data_byte_count = sample_frame_count * @format.block_align riff_chunk_size = CANONICAL_HEADER_BYTE_LENGTH[format_code] + sample_data_byte_count if sample_data_byte_count.odd? riff_chunk_size += 1 end # Write the header for the RIFF chunk header = CHUNK_IDS[:riff] header += [riff_chunk_size].pack(UNSIGNED_INT_32) header += WAVEFILE_FORMAT_CODE # Write the format chunk header += CHUNK_IDS[:format] header += [FORMAT_CHUNK_BYTE_LENGTH[format_code]].pack(UNSIGNED_INT_32) header += [FORMAT_CODES[format_code]].pack(UNSIGNED_INT_16) header += [@format.channels].pack(UNSIGNED_INT_16) header += [@format.sample_rate].pack(UNSIGNED_INT_32) header += [@format.byte_rate].pack(UNSIGNED_INT_32) header += [@format.block_align].pack(UNSIGNED_INT_16) header += [@format.bits_per_sample].pack(UNSIGNED_INT_16) if format_code == :float header += [0].pack(UNSIGNED_INT_16) end if extensible header += [22].pack(UNSIGNED_INT_16) header += [@format.bits_per_sample].pack(UNSIGNED_INT_16) header += [pack_speaker_mapping(@format.speaker_mapping)].pack(UNSIGNED_INT_32) if @format.sample_format == :pcm format_guid = WaveFile::SUB_FORMAT_GUID_PCM elsif @format.sample_format == :float format_guid = WaveFile::SUB_FORMAT_GUID_FLOAT end header += format_guid end # Write the FACT chunk, if necessary if requires_fact_chunk header += CHUNK_IDS[:fact] header += [4].pack(UNSIGNED_INT_32) header += [sample_frame_count].pack(UNSIGNED_INT_32) end # Write the header for the data chunk header += CHUNK_IDS[:data] header += [sample_data_byte_count].pack(UNSIGNED_INT_32) @io.write(header) end
ruby
{ "resource": "" }
q18747
ObfuscateId.ClassMethods.obfuscate_id_default_spin
train
def obfuscate_id_default_spin alphabet = Array("a".."z") number = name.split("").collect do |char| alphabet.index(char) end number.shift(12).join.to_i end
ruby
{ "resource": "" }
q18748
Praxis.ValidationHandler.handle!
train
def handle!(summary:, request:, stage:, errors: nil, exception: nil, **opts) documentation = Docs::LinkBuilder.instance.for_request request Responses::ValidationError.new(summary: summary, errors: errors, exception: exception, documentation: documentation, **opts) end
ruby
{ "resource": "" }
q18749
Praxis.Response.validate
train
def validate(action, validate_body: false) return if response_name == :validation_error unless (response_definition = action.responses[response_name]) raise Exceptions::Validation, "Attempting to return a response with name #{response_name} " \ "but no response definition with that name can be found" end response_definition.validate(self, validate_body: validate_body) end
ruby
{ "resource": "" }
q18750
Praxis.Config.define
train
def define(key=nil, type=Attributor::Struct, **opts, &block) if key.nil? && type != Attributor::Struct raise Exceptions::InvalidConfiguration.new( "You cannot define the top level configuration with a non-Struct type. Got: #{type.inspect}" ) end case key when String, Symbol, NilClass top = key.nil? ? @attribute : @attribute.attributes[key] if top #key defined...redefine unless type == Attributor::Struct && top.type < Attributor::Struct raise Exceptions::InvalidConfiguration.new( "Incompatible type received for extending configuration key [#{key}]. Got type #{type.name}" ) end top.options.merge!(opts) top.type.attributes(opts, &block) else @attribute.attributes[key] = Attributor::Attribute.new(type, opts, &block) end else raise Exceptions::InvalidConfiguration.new( "Defining a configuration key requires a String or a Symbol key. Got: #{key.inspect}" ) end end
ruby
{ "resource": "" }
q18751
Praxis.MultipartPart.derive_content_type
train
def derive_content_type(handler_name) possible_values = if self.content_type.match 'text/plain' _, content_type_attribute = self.headers_attribute && self.headers_attribute.attributes.find { |k,v| k.to_s =~ /^content[-_]{1}type$/i } if content_type_attribute && content_type_attribute.options.key?(:values) content_type_attribute.options[:values] else [] end else [self.content_type] end # generic default encoding is the best we can do if possible_values.empty? return MediaTypeIdentifier.load("application/#{handler_name}") end # if any defined value match the preferred handler_name, return it possible_values.each do |ct| mti = MediaTypeIdentifier.load(ct) return mti if mti.handler_name == handler_name end # otherwise, pick the first pick = MediaTypeIdentifier.load(possible_values.first) # and return that one if it already corresponds to a registered handler # otherwise, add the encoding if Praxis::Application.instance.handlers.include?(pick.handler_name) return pick else return pick + handler_name end end
ruby
{ "resource": "" }
q18752
Praxis.ActionDefinition.precomputed_header_keys_for_rack
train
def precomputed_header_keys_for_rack @precomputed_header_keys_for_rack ||= begin @headers.attributes.keys.each_with_object(Hash.new) do |key,hash| name = key.to_s name = "HTTP_#{name.gsub('-','_').upcase}" unless ( name == "CONTENT_TYPE" || name == "CONTENT_LENGTH" ) hash[name] = key end end end
ruby
{ "resource": "" }
q18753
Praxis.ActionDefinition.derive_content_type
train
def derive_content_type(example, handler_name) # MultipartArrays *must* use the provided content_type if example.kind_of? Praxis::Types::MultipartArray return MediaTypeIdentifier.load(example.content_type) end _, content_type_attribute = self.headers && self.headers.attributes.find { |k,v| k.to_s =~ /^content[-_]{1}type$/i } if content_type_attribute && content_type_attribute.options.key?(:values) # if any defined value match the preferred handler_name, return it content_type_attribute.options[:values].each do |ct| mti = MediaTypeIdentifier.load(ct) return mti if mti.handler_name == handler_name end # otherwise, pick the first pick = MediaTypeIdentifier.load(content_type_attribute.options[:values].first) # and return that one if it already corresponds to a registered handler # otherwise, add the encoding if Praxis::Application.instance.handlers.include?(pick.handler_name) return pick else return pick + handler_name end end # generic default encoding MediaTypeIdentifier.load("application/#{handler_name}") end
ruby
{ "resource": "" }
q18754
Praxis.MediaTypeIdentifier.without_parameters
train
def without_parameters if self.parameters.empty? self else MediaTypeIdentifier.load(type: self.type, subtype: self.subtype, suffix: self.suffix) end end
ruby
{ "resource": "" }
q18755
Praxis.ResponseDefinition.validate_status!
train
def validate_status!(response) return unless status # Validate status code if defined in the spec if response.status != status raise Exceptions::Validation.new( "Invalid response code detected. Response %s dictates status of %s but this response is returning %s." % [name, status.inspect, response.status.inspect] ) end end
ruby
{ "resource": "" }
q18756
Praxis.ResponseDefinition.validate_location!
train
def validate_location!(response) return if location.nil? || location === response.headers['Location'] raise Exceptions::Validation.new("LOCATION does not match #{location.inspect}") end
ruby
{ "resource": "" }
q18757
Praxis.ResponseDefinition.validate_content_type!
train
def validate_content_type!(response) return unless media_type response_content_type = response.content_type expected_content_type = Praxis::MediaTypeIdentifier.load(media_type.identifier) unless expected_content_type.match(response_content_type) raise Exceptions::Validation.new( "Bad Content-Type header. #{response_content_type}" + " is incompatible with #{expected_content_type} as described in response: #{self.name}" ) end end
ruby
{ "resource": "" }
q18758
Praxis.ResponseDefinition.validate_body!
train
def validate_body!(response) return unless media_type return if media_type.kind_of? SimpleMediaType errors = self.media_type.validate(self.media_type.load(response.body)) if errors.any? message = "Invalid response body for #{media_type.identifier}." + "Errors: #{errors.inspect}" raise Exceptions::Validation.new(message, errors: errors) end end
ruby
{ "resource": "" }
q18759
Harmony.Page.load
train
def load(*paths) paths.flatten.each do |path| window.load(path.to_s) end self end
ruby
{ "resource": "" }
q18760
Middleman.S3SyncExtension.read_config
train
def read_config(io = nil) unless io root_path = ::Middleman::Application.root config_file_path = File.join(root_path, ".s3_sync") # skip if config file does not exist return unless File.exists?(config_file_path) io = File.open(config_file_path, "r") end config = (YAML.load(io) || {}).symbolize_keys config.each do |key, value| options[key.to_sym] = value end end
ruby
{ "resource": "" }
q18761
Geo.Coord.strfcoord
train
def strfcoord(formatstr) h = full_hash DIRECTIVES.reduce(formatstr) do |memo, (from, to)| memo.gsub(from) do to = to.call(Regexp.last_match) if to.is_a?(Proc) res = to % h res, carrymin = guard_seconds(to, res) h[carrymin] += 1 if carrymin res end end end
ruby
{ "resource": "" }
q18762
RubySpeech.GenericElement.read_attr
train
def read_attr(attr_name, to_call = nil) val = self[attr_name] val && to_call ? val.__send__(to_call) : val end
ruby
{ "resource": "" }
q18763
RubySpeech.GenericElement.write_attr
train
def write_attr(attr_name, value, to_call = nil) self[attr_name] = value && to_call ? value.__send__(to_call) : value end
ruby
{ "resource": "" }
q18764
RubySpeech.GenericElement.namespace=
train
def namespace=(namespaces) case namespaces when Nokogiri::XML::Namespace super namespaces when String ns = self.add_namespace nil, namespaces super ns end end
ruby
{ "resource": "" }
q18765
ActiveModel::Datastore.PropertyValues.default_property_value
train
def default_property_value(attr, value) if value.is_a?(TrueClass) || value.is_a?(FalseClass) send("#{attr.to_sym}=", value) if send(attr.to_sym).nil? else send("#{attr.to_sym}=", send(attr.to_sym).presence || value) end end
ruby
{ "resource": "" }
q18766
ActiveModel::Datastore.PropertyValues.format_property_value
train
def format_property_value(attr, type) return unless send(attr.to_sym).present? case type.to_sym when :integer send("#{attr.to_sym}=", send(attr.to_sym).to_i) when :float send("#{attr.to_sym}=", send(attr.to_sym).to_f) when :boolean send("#{attr.to_sym}=", ActiveModel::Type::Boolean.new.cast(send(attr.to_sym))) else raise ArgumentError, 'Supported types are :boolean, :integer, :float' end end
ruby
{ "resource": "" }
q18767
ActiveModel::Datastore.NestedAttr.nested_models
train
def nested_models model_entities = [] nested_attributes.each { |attr| model_entities << send(attr.to_sym) } if nested_attributes? model_entities.flatten end
ruby
{ "resource": "" }
q18768
ActiveModel::Datastore.NestedAttr.assign_nested_attributes
train
def assign_nested_attributes(association_name, attributes, options = {}) attributes = validate_attributes(attributes) association_name = association_name.to_sym send("#{association_name}=", []) if send(association_name).nil? attributes.each_value do |params| if params['id'].blank? unless reject_new_record?(params, options) new = association_name.to_c.new(params.except(*UNASSIGNABLE_KEYS)) send(association_name).push(new) end else existing = send(association_name).detect { |record| record.id.to_s == params['id'].to_s } assign_to_or_mark_for_destruction(existing, params) end end (self.nested_attributes ||= []).push(association_name) end
ruby
{ "resource": "" }
q18769
ActiveModel::Datastore.NestedAttr.assign_to_or_mark_for_destruction
train
def assign_to_or_mark_for_destruction(record, attributes) record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS)) record.mark_for_destruction if destroy_flag?(attributes) end
ruby
{ "resource": "" }
q18770
ActiveModel::Datastore.ClassMethods.find_entity
train
def find_entity(id_or_name, parent = nil) key = CloudDatastore.dataset.key name, id_or_name key.parent = parent if parent.present? retry_on_exception { CloudDatastore.dataset.find key } end
ruby
{ "resource": "" }
q18771
ActiveModel::Datastore.ClassMethods.all
train
def all(options = {}) next_cursor = nil query = build_query(options) query_results = retry_on_exception { CloudDatastore.dataset.run query } if options[:limit] next_cursor = query_results.cursor if query_results.size == options[:limit] return from_entities(query_results.all), next_cursor end from_entities(query_results.all) end
ruby
{ "resource": "" }
q18772
ActiveModel::Datastore.ClassMethods.find_by
train
def find_by(args) query = CloudDatastore.dataset.query name query.ancestor(args[:ancestor]) if args[:ancestor] query.limit(1) query.where(args.keys[0].to_s, '=', args.values[0]) query_results = retry_on_exception { CloudDatastore.dataset.run query } from_entity(query_results.first) end
ruby
{ "resource": "" }
q18773
ActiveModel::Datastore.ClassMethods.query_sort
train
def query_sort(query, options) query.order(options[:order]) if options[:order] query.order(options[:desc_order], :desc) if options[:desc_order] query end
ruby
{ "resource": "" }
q18774
Pxlsrt.Image.diagonalColumnRow
train
def diagonalColumnRow(d, i) { 'column' => (d.to_i < 0 ? i : d.to_i + i).to_i, 'row' => (d.to_i < 0 ? d.to_i.abs + i : i).to_i } end
ruby
{ "resource": "" }
q18775
Pxlsrt.Image.getSobel
train
def getSobel(x, y) if !defined?(@sobels) @sobel_x ||= [[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]] @sobel_y ||= [[-1, -2, -1], [0, 0, 0], [1, 2, 1]] return 0 if x.zero? || (x == (@width - 1)) || y.zero? || (y == (@height - 1)) t1 = @grey[y - 1][x - 1] t2 = @grey[y - 1][x] t3 = @grey[y - 1][x + 1] t4 = @grey[y][x - 1] t5 = @grey[y][x] t6 = @grey[y][x + 1] t7 = @grey[y + 1][x - 1] t8 = @grey[y + 1][x] t9 = @grey[y + 1][x + 1] pixel_x = (@sobel_x[0][0] * t1) + (@sobel_x[0][1] * t2) + (@sobel_x[0][2] * t3) + (@sobel_x[1][0] * t4) + (@sobel_x[1][1] * t5) + (@sobel_x[1][2] * t6) + (@sobel_x[2][0] * t7) + (@sobel_x[2][1] * t8) + (@sobel_x[2][2] * t9) pixel_y = (@sobel_y[0][0] * t1) + (@sobel_y[0][1] * t2) + (@sobel_y[0][2] * t3) + (@sobel_y[1][0] * t4) + (@sobel_y[1][1] * t5) + (@sobel_y[1][2] * t6) + (@sobel_y[2][0] * t7) + (@sobel_y[2][1] * t8) + (@sobel_y[2][2] * t9) Math.sqrt(pixel_x * pixel_x + pixel_y * pixel_y).ceil else @sobels[y * @width + x] end end
ruby
{ "resource": "" }
q18776
Pxlsrt.Image.getSobels
train
def getSobels unless defined?(@sobels) l = [] (0...(@width * @height)).each do |xy| s = getSobel(xy % @width, (xy / @width).floor) l.push(s) end @sobels = l end @sobels end
ruby
{ "resource": "" }
q18777
Middleware.Runner.build_call_chain
train
def build_call_chain(stack) # We need to instantiate the middleware stack in reverse # order so that each middleware can have a reference to # the next middleware it has to call. The final middleware # is always the empty middleware, which does nothing but return. stack.reverse.inject(EMPTY_MIDDLEWARE) do |next_middleware, current_middleware| # Unpack the actual item klass, args, block = current_middleware # Default the arguments to an empty array. Otherwise in Ruby 1.8 # a `nil` args will actually pass `nil` into the class. Not what # we want! args ||= [] if klass.is_a?(Class) # If the klass actually is a class, then instantiate it with # the app and any other arguments given. klass.new(next_middleware, *args, &block) elsif klass.respond_to?(:call) # Make it a lambda which calls the item then forwards up # the chain. lambda do |env| next_middleware.call(klass.call(env, *args)) end else fail "Invalid middleware, doesn't respond to `call`: #{klass.inspect}" end end end
ruby
{ "resource": "" }
q18778
Middleware.Builder.insert_before_each
train
def insert_before_each(middleware, *args, &block) self.stack = stack.reduce([]) do |carry, item| carry.push([middleware, args, block], item) end end
ruby
{ "resource": "" }
q18779
Middleware.Builder.delete
train
def delete(index) index = self.index(index) unless index.is_a?(Integer) stack.delete_at(index) end
ruby
{ "resource": "" }
q18780
Pxpay.Notification.to_hash
train
def to_hash doc = ::Nokogiri::XML( self.response ) hash = {} doc.at_css("Response").element_children.each do |attribute| hash[attribute.name.underscore.to_sym] = attribute.inner_text end hash[:valid] = doc.at_css("Response")['valid'] hash end
ruby
{ "resource": "" }
q18781
CloudFoundry.Client.valid_target_url?
train
def valid_target_url? return false unless cloud_info = cloud_info() return false unless cloud_info[:name] return false unless cloud_info[:build] return false unless cloud_info[:support] return false unless cloud_info[:version] true rescue false end
ruby
{ "resource": "" }
q18782
Jikan.Search.result
train
def result case @type when :anime iter { |i| Jikan::AnimeResult.new(i) } when :manga iter { |i| Jikan::MangaResult.new(i) } when :character iter { |i| Jikan::CharacterResult.new(i) } when :person iter { |i| Jikan::PersonResult.new(i) } end end
ruby
{ "resource": "" }
q18783
OctocatalogDiff.PuppetDB.parse_url
train
def parse_url(url) uri = URI(url) if URI.split(url)[3].nil? uri.port = uri.scheme == 'https' ? DEFAULT_HTTPS_PORT : DEFAULT_HTTP_PORT end raise ArgumentError, "URL #{url} has invalid scheme" unless uri.scheme =~ /^https?$/ parsed_url = { ssl: uri.scheme == 'https', host: uri.host, port: uri.port } if uri.user || uri.password parsed_url[:username] = uri.user parsed_url[:password] = uri.password end parsed_url rescue URI::InvalidURIError => exc raise exc.class, "Invalid URL: #{url} (#{exc.message})" end
ruby
{ "resource": "" }
q18784
OctocatalogDiff.Catalog.resources
train
def resources build raise OctocatalogDiff::Errors::CatalogError, 'Catalog does not appear to have been built' if !valid? && error_message.nil? raise OctocatalogDiff::Errors::CatalogError, error_message unless valid? return @catalog['data']['resources'] if @catalog['data'].is_a?(Hash) && @catalog['data']['resources'].is_a?(Array) return @catalog['resources'] if @catalog['resources'].is_a?(Array) # This is a bug condition # :nocov: raise "BUG: catalog has no data::resources or ::resources array. Please report this. #{@catalog.inspect}" # :nocov: end
ruby
{ "resource": "" }
q18785
OctocatalogDiff.Facts.facts
train
def facts(node = @node, timestamp = false) raise "Expected @facts to be a hash but it is a #{@facts.class}" unless @facts.is_a?(Hash) raise "Expected @facts['values'] to be a hash but it is a #{@facts['values'].class}" unless @facts['values'].is_a?(Hash) f = @facts.dup f['name'] = node unless node.nil? || node.empty? f['values'].delete('_timestamp') f.delete('expiration') if timestamp f['timestamp'] = Time.now.to_s f['values']['timestamp'] = f['timestamp'] f['expiration'] = (Time.now + (24 * 60 * 60)).to_s end f end
ruby
{ "resource": "" }
q18786
OctocatalogDiff.Facts.without
train
def without(remove) r = remove.is_a?(Array) ? remove : [remove] obj = dup r.each { |fact| obj.remove_fact_from_list(fact) } obj end
ruby
{ "resource": "" }
q18787
OctocatalogDiff.Facts.facts_to_yaml
train
def facts_to_yaml(node = @node) # Add the header that Puppet needs to treat this as facts. Save the results # as a string in the option. f = facts(node) fact_file = f.to_yaml.split(/\n/) fact_file[0] = '--- !ruby/object:Puppet::Node::Facts' if fact_file[0] =~ /^---/ fact_file.join("\n") end
ruby
{ "resource": "" }
q18788
JasperRails.JasperReportsRenderer.parameter_value_of
train
def parameter_value_of(param) _String = Rjb::import 'java.lang.String' # Using Rjb::import('java.util.HashMap').new, it returns an instance of # Rjb::Rjb_JavaProxy, so the Rjb_JavaProxy parent is the Rjb module itself. if param.class.parent == Rjb param else _String.new(param.to_s, "UTF-8") end end
ruby
{ "resource": "" }
q18789
Delayed.RecurringJob.schedule!
train
def schedule! options = {} options = options.dup if run_every = options.delete(:run_every) options[:run_interval] = serialize_duration(run_every) end @schedule_options = options.reverse_merge(@schedule_options || {}).reverse_merge( run_at: self.class.run_at, timezone: self.class.timezone, run_interval: serialize_duration(self.class.run_every), priority: self.class.priority, queue: self.class.queue ) enqueue_opts = { priority: @schedule_options[:priority], run_at: next_run_time } enqueue_opts[:queue] = @schedule_options[:queue] if @schedule_options[:queue] Delayed::Job.transaction do self.class.jobs(@schedule_options).destroy_all if Gem.loaded_specs['delayed_job'].version.to_s.first.to_i < 3 Delayed::Job.enqueue self, enqueue_opts[:priority], enqueue_opts[:run_at] else Delayed::Job.enqueue self, enqueue_opts end end end
ruby
{ "resource": "" }
q18790
Yelp.Client.create_methods_from_instance
train
def create_methods_from_instance(instance) instance.public_methods(false).each do |method_name| add_method(instance, method_name) end end
ruby
{ "resource": "" }
q18791
Yelp.Client.add_method
train
def add_method(instance, method_name) define_singleton_method(method_name) do |*args| instance.public_send(method_name, *args) end end
ruby
{ "resource": "" }
q18792
Yelp.Configuration.auth_keys
train
def auth_keys AUTH_KEYS.inject({}) do |keys_hash, key| keys_hash[key] = send(key) keys_hash end end
ruby
{ "resource": "" }
q18793
Pod.Lockfile.write_to_disk
train
def write_to_disk(path) # code here mimics the original method but with link filtering filename = File.basename(path) path.dirname.mkpath unless path.dirname.exist? yaml = to_link_yaml File.open(path, 'w') { |f| f.write(yaml) } self.defined_in_file = path end
ruby
{ "resource": "" }
q18794
Pod.Lockfile.to_link_hash
train
def to_link_hash # retrieve the lock contents with links after_hash = to_hash unless File.exists?(PODFILE_LOCK) return after_hash end # retrieve the lock content before the links before_hash = YAML.load(File.read(PODFILE_LOCK)) # retrieve installed links links = Pod::Command::Links.installed_links # # Logic: # Here we will replace anything that changed in the contents that will be dumped in the # Podfile.lock due to links with the data that previously exists in the Podfile.lock. This # allows the Podfile.lock with the dependency trees to remain unchanged when linking # developement pods. The Podfile.lock contains several keys, but we only need to alter the # following: # # - PODS # - DEPENDENCIES # - EXTERNAL SOURCES # - CHECKOUT OPTIONS # - SPEC CHECKSUMS # after_hash['PODS'] = merge_pods links, before_hash['PODS'], after_hash['PODS'] after_hash['DEPENDENCIES'] = merge_dependencies links, before_hash['DEPENDENCIES'], after_hash['DEPENDENCIES'] after_hash['EXTERNAL SOURCES'] = merge_hashes links, before_hash['EXTERNAL SOURCES'], after_hash['EXTERNAL SOURCES'] after_hash['CHECKOUT OPTIONS'] = merge_hashes links, before_hash['CHECKOUT OPTIONS'], after_hash['CHECKOUT OPTIONS'] after_hash['SPEC CHECKSUMS'] = merge_hashes links, before_hash['SPEC CHECKSUMS'], after_hash['SPEC CHECKSUMS'] return after_hash end
ruby
{ "resource": "" }
q18795
Pod.Lockfile.merge_dependencies
train
def merge_dependencies(links, before, after) links.each do |link| before_index = find_dependency_index before, link after_index = find_dependency_index after, link unless before_index.nil? || after_index.nil? after[after_index] = before[before_index] end end return after end
ruby
{ "resource": "" }
q18796
Pod.Lockfile.merge_hashes
train
def merge_hashes(links, before, after) if before.nil? return after end links.each do |link| if before.has_key?(link) after[link] = before[link] else if after.has_key?(link) after.delete(link) end end end return after end
ruby
{ "resource": "" }
q18797
Chaskiq.SesSenderJob.perform
train
def perform(campaign, subscription) subscriber = subscription.subscriber return if subscriber.blank? mailer = campaign.prepare_mail_to(subscription) response = mailer.deliver message_id = response.message_id.gsub("@email.amazonses.com", "") campaign.metrics.create( trackable: subscription, action: "deliver", data: message_id) end
ruby
{ "resource": "" }
q18798
Chaskiq.Campaign.clean_inline_css
train
def clean_inline_css(url) premailer = Premailer.new(url, :adapter => :nokogiri, :escape_url_attributes => false) premailer.to_inline_css.gsub("Drop Content Blocks Here", "") end
ruby
{ "resource": "" }
q18799
Chaskiq.MailSenderJob.perform
train
def perform(campaign) campaign.apply_premailer campaign.list.subscriptions.availables.each do |s| campaign.push_notification(s) end end
ruby
{ "resource": "" }