_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q19500
ComfyBootstrapForm.FormBuilder.draw_control_column
train
def draw_control_column(bootstrap, offset:) return yield unless bootstrap.horizontal? css_class = bootstrap.control_col_class.to_s css_class += " #{bootstrap.offset_col_class}" if offset content_tag(:div, class: css_class) do yield end end
ruby
{ "resource": "" }
q19501
ComfyBootstrapForm.FormBuilder.draw_form_group_fieldset
train
def draw_form_group_fieldset(bootstrap, method) options = {} unless bootstrap.label[:hide] label_text = bootstrap.label[:text] label_text ||= ActionView::Helpers::Tags::Label::LabelBuilder .new(@template, @object_name.to_s, method, @object, nil).translation add_css_class!(options, "col-form-label pt-0") add_css_class!(options, bootstrap.label[:class]) if bootstrap.horizontal? add_css_class!(options, bootstrap.label_col_class) add_css_class!(options, bootstrap.label_align_class) end label = content_tag(:legend, options) do label_text end end content_tag(:fieldset, class: "form-group") do content = "".html_safe content << label if label.present? content << draw_control_column(bootstrap, offset: bootstrap.label[:hide]) do yield end if bootstrap.horizontal? content_tag(:div, content, class: "row") else content end end end
ruby
{ "resource": "" }
q19502
ComfyBootstrapForm.ViewHelper.bootstrap_form_with
train
def bootstrap_form_with(**options, &block) bootstrap_options = options[:bootstrap] || {} css_classes = options.delete(:class) if bootstrap_options[:layout].to_s == "inline" css_classes = [css_classes, "form-inline"].compact.join(" ") end form_options = options.reverse_merge(builder: ComfyBootstrapForm::FormBuilder) form_options.merge!(class: css_classes) unless css_classes.blank? supress_form_field_errors do form_with(**form_options, &block) end end
ruby
{ "resource": "" }
q19503
ComfyBootstrapForm.ViewHelper.supress_form_field_errors
train
def supress_form_field_errors original_proc = ActionView::Base.field_error_proc ActionView::Base.field_error_proc = proc { |input, _instance| input } yield ensure ActionView::Base.field_error_proc = original_proc end
ruby
{ "resource": "" }
q19504
Parse.Client.request
train
def request(uri, method = :get, body = nil, query = nil, content_type = nil) headers = {} { 'Content-Type' => content_type || 'application/json', 'User-Agent' => "Parse for Ruby, #{VERSION}", Protocol::HEADER_MASTER_KEY => @master_key, Protocol::HEADER_APP_ID => @application_id, Protocol::HEADER_API_KEY => @api_key, Protocol::HEADER_SESSION_TOKEN => @session_token }.each do |key, value| headers[key] = value if value end uri = ::File.join(path, uri) response = @session.send(method, uri, query || body || {}, headers) response.body # NOTE: Don't leak our internal libraries to our clients. # Extend this list of exceptions as needed. rescue Faraday::Error::ClientError => e raise Parse::ConnectionError, e.message end
ruby
{ "resource": "" }
q19505
Parse.Object.save
train
def save if @parse_object_id method = :put merge!(@op_fields) # use ops instead of our own view of the columns else method = :post end body = safe_hash.to_json data = @client.request(uri, method, body) if data # array ops can return mutated view of array which needs to be parsed object = Parse.parse_json(class_name, data) object = Parse.copy_client(@client, object) parse object end if @class_name == Parse::Protocol::CLASS_USER delete('password') delete(:username) delete(:password) end self end
ruby
{ "resource": "" }
q19506
Parse.Object.safe_hash
train
def safe_hash Hash[map do |key, value| if Protocol::RESERVED_KEYS.include?(key) nil elsif value.is_a?(Hash) && value[Protocol::KEY_TYPE] == Protocol::TYPE_RELATION nil elsif value.nil? [key, Protocol::DELETE_OP] else [key, Parse.pointerize_value(value)] end end.compact] end
ruby
{ "resource": "" }
q19507
Parse.Object.increment
train
def increment(field, amount = 1) # value = (self[field] || 0) + amount # self[field] = value # if !@parse_object_id # # TODO - warn that the object must be stored first # return nil # end body = { field => Parse::Increment.new(amount) }.to_json data = @client.request(uri, :put, body) parse data self end
ruby
{ "resource": "" }
q19508
Parse.Object.parse
train
def parse(data) return unless data @parse_object_id ||= data[Protocol::KEY_OBJECT_ID] if data.key? Protocol::KEY_CREATED_AT @created_at = DateTime.parse data[Protocol::KEY_CREATED_AT] end if data.key? Protocol::KEY_UPDATED_AT @updated_at = DateTime.parse data[Protocol::KEY_UPDATED_AT] end data.each do |k, v| k = k.to_s if k.is_a? Symbol self[k] = v if k != Parse::Protocol::KEY_TYPE end self end
ruby
{ "resource": "" }
q19509
Moonshot.UnicodeTable.draw_children
train
def draw_children first = true @children.each do |child| child.draw(1, first) first = false end puts '└──' end
ruby
{ "resource": "" }
q19510
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.upload_to_s3
train
def upload_to_s3(file, key) attempts = 0 begin super unless (checksum = checksum_file(file)).nil? verify_s3_checksum(key, checksum, attempt: attempts) end rescue RuntimeError => e unless (attempts += 1) > 3 # Wait 10 seconds before trying again. sleep 10 retry end raise e end end
ruby
{ "resource": "" }
q19511
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.download_from_github
train
def download_from_github(version) file_pattern = "*#{version}*.tar.gz" attempts = 0 Retriable.retriable on: RuntimeError do # Make sure the directory is empty before downloading the release. FileUtils.rm(Dir.glob('*')) # Download the release and find the actual build file. sh_out("hub release download #{version}") raise "File '#{file_pattern}' not found." if Dir.glob(file_pattern).empty? file = Dir.glob(file_pattern).fetch(0) unless (checksum = checksum_file(file)).nil? verify_download_checksum(file, checksum, attempt: attempts) end attempts += 1 file end end
ruby
{ "resource": "" }
q19512
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.verify_download_checksum
train
def verify_download_checksum(build_file, checksum_file, attempt: 0) expected = File.read(checksum_file) actual = Digest::MD5.file(build_file).hexdigest if actual != expected log.error("GitHub fie #{build_file} checksum should be #{expected} " \ "but was #{actual}.") backup_failed_github_file(build_file, attempt) raise "Checksum for #{build_file} could not be verified." end log.info('Verified downloaded file checksum.') end
ruby
{ "resource": "" }
q19513
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.backup_failed_github_file
train
def backup_failed_github_file(build_file, attempt) basename = File.basename(build_file, '.tar.gz') destination = File.join(Dir.tmpdir, basename, ".gh.failure.#{attempt}.tar.gz") FileUtils.cp(build_file, destination) log.info("Copied #{build_file} to #{destination}") end
ruby
{ "resource": "" }
q19514
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.verify_s3_checksum
train
def verify_s3_checksum(s3_name, checksum_file, attempt: 0) headers = s3_client.head_object( key: s3_name, bucket: @bucket_name ) expected = File.read(checksum_file) actual = headers.etag.tr('"', '') if actual != expected log.error("S3 file #{s3_name} checksum should be #{expected} but " \ "was #{actual}.") backup_failed_s3_file(s3_name, attempt) raise "Checksum for #{s3_name} could not be verified." end log.info('Verified uploaded file checksum.') end
ruby
{ "resource": "" }
q19515
Moonshot::ArtifactRepository.S3BucketViaGithubReleases.backup_failed_s3_file
train
def backup_failed_s3_file(s3_name, attempt) basename = File.basename(s3_name, '.tar.gz') destination = "#{Dir.tmpdir}/#{basename}.s3.failure.#{attempt}.tar.gz" s3_client.get_object( response_target: destination, key: s3_name, bucket: @bucket_name ) log.info("Copied #{s3_name} to #{destination}") end
ruby
{ "resource": "" }
q19516
Moonshot.StackASGPrinter.get_addl_info
train
def get_addl_info(instance_ids) resp = ec2_client.describe_instances(instance_ids: instance_ids) data = {} resp.reservations.map(&:instances).flatten.each do |instance| data[instance.instance_id] = instance end data end
ruby
{ "resource": "" }
q19517
Moonshot::BuildMechanism.TravisDeploy.wait_for_build
train
def wait_for_build(version) # Attempt to find the build. Re-attempt if the build can not # be found on travis yet. retry_opts = { tries: MAX_BUILD_FIND_ATTEMPTS, base_interval: 10 } job_number = nil sh_retry("bundle exec travis show #{@cli_args} #{version}", opts: retry_opts) do |build_out| raise CommandError, "Build for #{version} not found.\n#{build_out}" \ unless (job_number = build_out.match(/^#(\d+\.\d+) .+BUILD=1.+/)[1]) end job_number end
ruby
{ "resource": "" }
q19518
Moonshot::BuildMechanism.TravisDeploy.wait_for_job
train
def wait_for_job(job_number) authenticate # Wait for the job to complete or hit the timeout. start = Time.new job = repo.job(job_number) ilog.start_threaded("Waiting for job #{job_number} to complete.") do |s| while !job.finished? && Time.new - start < @timeout s.continue("Job status: #{job.state}") sleep 10 job.reload end if job.finished? s.success else s.failure("Job #{job_number} did not complete within time limit of " \ "#{@timeout} seconds") end end end
ruby
{ "resource": "" }
q19519
Moonshot.Stack.default_values
train
def default_values h = {} template.parameters.each do |p| h[p.name] = h.default end h end
ruby
{ "resource": "" }
q19520
Moonshot::BuildMechanism.GithubRelease.git_tag_exists
train
def git_tag_exists(tag, sha) exists = false sh_step("git tag -l #{tag}") do |_, output| exists = (output.strip == tag) end # If the tag does exist, make sure the existing SHA matches the SHA we're # trying to build from. if exists sh_step("git rev-list -n 1 #{tag}") do |_, output| raise "#{tag} already exists at a different SHA" \ if output.strip != sha end log.info("tag #{tag} already exists") end exists end
ruby
{ "resource": "" }
q19521
Moonshot::BuildMechanism.GithubRelease.hub_release_exists
train
def hub_release_exists(semver) exists = false sh_step("hub release show #{semver}", fail: false) do |_, output| first_line = output.split("\n").first exists = !first_line.nil? && first_line.strip == semver.to_s end log.info("release #{semver} already exists") if exists exists end
ruby
{ "resource": "" }
q19522
Moonshot::BuildMechanism.GithubRelease.check_ci_status
train
def check_ci_status(sha) out = nil retry_opts = { max_elapsed_time: @ci_status_timeout, base_interval: 10 } ilog.start_threaded("Check CI status for #{sha}.") do |step| out = sh_retry("hub ci-status --verbose #{sha}", opts: retry_opts) step.success end out end
ruby
{ "resource": "" }
q19523
GamesDice::ProbabilityValidations.ClassMethods.prob_h_to_ao
train
def prob_h_to_ao h rmin,rmax = h.keys.minmax o = rmin s = 1 + rmax - rmin raise ArgumentError, "Range of possible results too large" if s > 1000000 a = Array.new( s, 0.0 ) h.each { |k,v| a[k-rmin] = Float(v) } [a,o] end
ruby
{ "resource": "" }
q19524
GamesDice::ProbabilityValidations.ClassMethods.prob_ao_to_h
train
def prob_ao_to_h a, o h = Hash.new a.each_with_index { |v,i| h[i+o] = v if v > 0.0 } h end
ruby
{ "resource": "" }
q19525
Legato.Query.basic_options
train
def basic_options Hash[BASIC_OPTION_KEYS.map { |k| [k, send(k)] }].reject {|_,v| v.nil?} end
ruby
{ "resource": "" }
q19526
Legato.Query.results
train
def results(profile=nil, options={}) query = loaded? ? Query.from_query(self) : self options, profile = profile, self.profile if profile.is_a?(Hash) query.profile = profile query.apply_options(self.basic_options.merge(options)) query end
ruby
{ "resource": "" }
q19527
Qless.Job.requeue
train
def requeue(queue, opts = {}) queue_name = case queue when String, Symbol then queue else queue.name end note_state_change :requeue do @client.call('requeue', @client.worker_name, queue_name, @jid, @klass_name, JSON.dump(opts.fetch(:data, @data)), opts.fetch(:delay, 0), 'priority', opts.fetch(:priority, @priority), 'tags', JSON.dump(opts.fetch(:tags, @tags)), 'retries', opts.fetch(:retries, @original_retries), 'depends', JSON.dump(opts.fetch(:depends, @dependencies)) ) end end
ruby
{ "resource": "" }
q19528
Qless.Job.fail
train
def fail(group, message) note_state_change :fail do @client.call( 'fail', @jid, @worker_name, group, message, JSON.dump(@data)) || false end rescue Qless::LuaScriptError => err raise CantFailError.new(err.message) end
ruby
{ "resource": "" }
q19529
Qless.Subscriber.start
train
def start queue = ::Queue.new @thread = Thread.start do @listener_redis.subscribe(@channel, @my_channel) do |on| on.subscribe do |channel| queue.push(:subscribed) if channel == @channel end on.message do |channel, message| handle_message(channel, message) end end end queue.pop end
ruby
{ "resource": "" }
q19530
Qless.Queue.pop
train
def pop(count = nil) jids = JSON.parse(@client.call('pop', @name, worker_name, (count || 1))) jobs = jids.map { |j| Job.new(@client, j) } count.nil? ? jobs[0] : jobs end
ruby
{ "resource": "" }
q19531
Qless.Queue.peek
train
def peek(count = nil) jids = JSON.parse(@client.call('peek', @name, (count || 1))) jobs = jids.map { |j| Job.new(@client, j) } count.nil? ? jobs[0] : jobs end
ruby
{ "resource": "" }
q19532
Qless.Queue.length
train
def length (@client.redis.multi do %w[ locks work scheduled depends ].each do |suffix| @client.redis.zcard("ql:q:#{@name}-#{suffix}") end end).inject(0, :+) end
ruby
{ "resource": "" }
q19533
FriendlyId.History.scope_for_slug_generator
train
def scope_for_slug_generator relation = super return relation if new_record? relation = relation.merge(Slug.where('sluggable_id <> ?', id)) if friendly_id_config.uses?(:scoped) relation = relation.where(Slug.arel_table[:scope].eq(serialized_scope)) end relation end
ruby
{ "resource": "" }
q19534
ChefApply.TargetHost.connect!
train
def connect! # Keep existing connections return unless @backend.nil? @backend = train_connection.connection @backend.wait_until_ready # When the testing function `mock_instance` is used, it will set # this instance variable to false and handle this function call # after the platform data is mocked; this will allow binding # of mixin functions based on the mocked platform. mix_in_target_platform! unless @mocked_connection rescue Train::UserError => e raise ConnectionFailure.new(e, config) rescue Train::Error => e # These are typically wrapper errors for other problems, # so we'll prefer to use e.cause over e if available. raise ConnectionFailure.new(e.cause || e, config) end
ruby
{ "resource": "" }
q19535
ChefApply.TargetHost.fetch_file_contents
train
def fetch_file_contents(remote_path) result = backend.file(remote_path) if result.exist? && result.file? result.content else nil end end
ruby
{ "resource": "" }
q19536
ChefApply.TargetResolver.targets
train
def targets return @targets unless @targets.nil? expanded_urls = [] @split_targets.each do |target| expanded_urls = (expanded_urls | expand_targets(target)) end @targets = expanded_urls.map do |url| config = @conn_options.merge(config_for_target(url)) TargetHost.new(config.delete(:url), config) end end
ruby
{ "resource": "" }
q19537
ChefApply.RecipeLookup.load_cookbook
train
def load_cookbook(path_or_name) require "chef/exceptions" if File.directory?(path_or_name) cookbook_path = path_or_name # First, is there a cookbook in the specified dir that matches? require "chef/cookbook/cookbook_version_loader" begin v = Chef::Cookbook::CookbookVersionLoader.new(cookbook_path) v.load! cookbook = v.cookbook_version rescue Chef::Exceptions::CookbookNotFoundInRepo raise InvalidCookbook.new(cookbook_path) end else cookbook_name = path_or_name # Second, is there a cookbook in their local repository that matches? require "chef/cookbook_loader" cb_loader = Chef::CookbookLoader.new(cookbook_repo_paths) cb_loader.load_cookbooks_without_shadow_warning begin cookbook = cb_loader[cookbook_name] rescue Chef::Exceptions::CookbookNotFoundInRepo cookbook_repo_paths.each do |repo_path| cookbook_path = File.join(repo_path, cookbook_name) if File.directory?(cookbook_path) raise InvalidCookbook.new(cookbook_path) end end raise CookbookNotFound.new(cookbook_name, cookbook_repo_paths) end end cookbook end
ruby
{ "resource": "" }
q19538
ChefApply.RecipeLookup.find_recipe
train
def find_recipe(cookbook, recipe_name = nil) recipes = cookbook.recipe_filenames_by_name if recipe_name.nil? default_recipe = recipes["default"] raise NoDefaultRecipe.new(cookbook.root_dir, cookbook.name) if default_recipe.nil? default_recipe else recipe = recipes[recipe_name] raise RecipeNotFound.new(cookbook.root_dir, recipe_name, recipes.keys, cookbook.name) if recipe.nil? recipe end end
ruby
{ "resource": "" }
q19539
ChefApply.CLI.connect_target
train
def connect_target(target_host, reporter) connect_message = T.status.connecting(target_host.user) reporter.update(connect_message) do_connect(target_host, reporter) end
ruby
{ "resource": "" }
q19540
ChefApply.CLI.generate_local_policy
train
def generate_local_policy(reporter) action = Action::GenerateLocalPolicy.new(cookbook: temp_cookbook) action.run do |event, data| case event when :generating reporter.update(TS.generate_local_policy.generating) when :exporting reporter.update(TS.generate_local_policy.exporting) when :success reporter.success(TS.generate_local_policy.success) else handle_message(event, data, reporter) end end action.archive_file_location end
ruby
{ "resource": "" }
q19541
ChefApply.CLI.converge
train
def converge(reporter, local_policy_path, target_host) reporter.update(TS.converge.converging(temp_cookbook.descriptor)) converge_args = { local_policy_path: local_policy_path, target_host: target_host } converger = Action::ConvergeTarget.new(converge_args) converger.run do |event, data| case event when :success reporter.success(TS.converge.success(temp_cookbook.descriptor)) when :converge_error reporter.error(TS.converge.failure(temp_cookbook.descriptor)) when :creating_remote_policy reporter.update(TS.converge.creating_remote_policy) when :uploading_trusted_certs reporter.update(TS.converge.uploading_trusted_certs) when :running_chef reporter.update(TS.converge.converging(temp_cookbook.descriptor)) when :reboot reporter.success(TS.converge.reboot) else handle_message(event, data, reporter) end end end
ruby
{ "resource": "" }
q19542
ChefApply.CLI.handle_message
train
def handle_message(message, data, reporter) if message == :error # data[0] = exception # Mark the current task as failed with whatever data is available to us reporter.error(ChefApply::UI::ErrorPrinter.error_summary(data[0])) end end
ruby
{ "resource": "" }
q19543
FayeRails.RackAdapter.map
train
def map(opts) if opts.is_a? Hash opts.each do |channel, controller| if channel.is_a? String if FayeRails::Matcher.match? '/**', channel routing_extension.map(channel, controller) else raise ArgumentError, "Invalid channel: #{channel}" end elsif channel == :default if controller == :block routing_extension.block_unknown_channels! elsif controller == :drop routing_extension.drop_unknown_channels! elsif controller == :allow routing_extension.allow_unknown_channels! end end end end end
ruby
{ "resource": "" }
q19544
ContentfulRails.MarkdownRenderer.image
train
def image(link, title, alt_text) # add the querystring to the image if @image_parameters.present? prefix = link.include?('?') ? '&' : '?' link += "#{prefix}#{@image_parameters.to_query}" end # return a content tag content_tag(:img, nil, src: link.to_s, alt: alt_text, title: title) end
ruby
{ "resource": "" }
q19545
ContentfulRails.NestedResource.get_child_entity_from_path_by
train
def get_child_entity_from_path_by(field, children) # the next child in the path child_value = children.shift # get the child entity child = send(:children).find { |c| c.send(field) == child_value } # we have some recursion to do - we're not at the end of the array # so call this method again with a smaller set of children return child.get_child_entity_from_path_by(field, children) if child && !children.empty? child # this is the final thing in the array - return it end
ruby
{ "resource": "" }
q19546
ContentfulRails.MarkdownHelper.parse_markdown
train
def parse_markdown(markdown_string, renderer_options: {}, markdown_options: {}, image_options: {}) markdown_string ||= '' markdown_opts = { no_intr_emphasis: true, tables: true, fenced_code_blocks: true, autolink: true, disable_indented_code_blocks: true, strikethrough: true, lax_spacing: true, space_after_headers: false, superscript: true, underline: true, highlight: true, footnotes: true }.merge(markdown_options) renderer_opts = { filter_html: false, # we want to allow HTML in the markdown blocks no_images: false, no_links: false, no_styles: false, escape_html: false, safe_links_only: false, with_toc_data: true, hard_wrap: true, xhtml: false, prettify: false, link_attributes: {}, image_options: image_options }.merge(renderer_options) renderer = ContentfulRails::MarkdownRenderer.new(renderer_opts) markdown = Redcarpet::Markdown.new(renderer, markdown_opts) markdown.render(markdown_string).html_safe end
ruby
{ "resource": "" }
q19547
ContentfulRails.WebhooksController.create
train
def create # The only things we need to handle in here (for now at least) are entries. # If there's been an update or a deletion, we just remove the cached timestamp. # The updated_at method which is included in ContentfulModel::Base in this gem # will check the cache first before making the call to the API. # We can then just use normal Rails russian doll caching without expensive API calls. request.format = :json update_type = request.headers['HTTP_X_CONTENTFUL_TOPIC'] # All we do here is publish an ActiveSupport::Notification, which is subscribed to # elsewhere. In this gem are subscription options for timestamp or object caching, # implement your own and subscribe in an initializer. ActiveSupport::Notifications.instrument("Contentful.#{update_type}", params) # must return an empty response render body: nil end
ruby
{ "resource": "" }
q19548
Govspeak.AttachmentPresenter.references_for_title
train
def references_for_title references = [] references << "ISBN: #{attachment[:isbn]}" if attachment[:isbn].present? references << "Unique reference: #{attachment[:unique_reference]}" if attachment[:unique_reference].present? references << "Command paper number: #{attachment[:command_paper_number]}" if attachment[:command_paper_number].present? references << "HC: #{attachment[:hoc_paper_number]} #{attachment[:parliamentary_session]}" if attachment[:hoc_paper_number].present? prefix = references.size == 1 ? "and its reference" : "and its references" references.any? ? ", #{prefix} (" + references.join(", ") + ")" : "" end
ruby
{ "resource": "" }
q19549
ModBus.TCP.open_tcp_connection
train
def open_tcp_connection(ipaddr, port, opts = {}) @ipaddr, @port = ipaddr, port timeout = opts[:connect_timeout] ||= 1 io = nil begin io = Socket.tcp(@ipaddr, @port, nil, nil, connect_timeout: timeout) rescue Errno::ECONNREFUSED, Errno::ETIMEDOUT raise ModBusTimeout.new, 'Timed out attempting to create connection' end io end
ruby
{ "resource": "" }
q19550
ModBus.Debug.logging_bytes
train
def logging_bytes(msg) result = "" msg.each_byte do |c| byte = if c < 16 '0' + c.to_s(16) else c.to_s(16) end result << "[#{byte}]" end result end
ruby
{ "resource": "" }
q19551
ModBus.Slave.write_single_coil
train
def write_single_coil(addr, val) if val == 0 query("\x5" + addr.to_word + 0.to_word) else query("\x5" + addr.to_word + 0xff00.to_word) end self end
ruby
{ "resource": "" }
q19552
ModBus.Slave.write_multiple_coils
train
def write_multiple_coils(addr, vals) nbyte = ((vals.size-1) >> 3) + 1 sum = 0 (vals.size - 1).downto(0) do |i| sum = sum << 1 sum |= 1 if vals[i] > 0 end s_val = "" nbyte.times do s_val << (sum & 0xff).chr sum >>= 8 end query("\xf" + addr.to_word + vals.size.to_word + nbyte.chr + s_val) self end
ruby
{ "resource": "" }
q19553
ModBus.Slave.read_discrete_inputs
train
def read_discrete_inputs(addr, ninputs) query("\x2" + addr.to_word + ninputs.to_word).unpack_bits[0..ninputs-1] end
ruby
{ "resource": "" }
q19554
ModBus.Slave.write_multiple_registers
train
def write_multiple_registers(addr, vals) s_val = "" vals.each do |reg| s_val << reg.to_word end query("\x10" + addr.to_word + vals.size.to_word + (vals.size * 2).chr + s_val) self end
ruby
{ "resource": "" }
q19555
ModBus.Slave.mask_write_register
train
def mask_write_register(addr, and_mask, or_mask) query("\x16" + addr.to_word + and_mask.to_word + or_mask.to_word) self end
ruby
{ "resource": "" }
q19556
ModBus.Slave.query
train
def query(request) tried = 0 response = "" begin ::Timeout.timeout(@read_retry_timeout, ModBusTimeout) do send_pdu(request) response = read_pdu end rescue ModBusTimeout => err log "Timeout of read operation: (#{@read_retries - tried})" tried += 1 retry unless tried >= @read_retries raise ModBusTimeout.new, "Timed out during read attempt" end return nil if response.size == 0 read_func = response.getbyte(0) if read_func >= 0x80 exc_id = response.getbyte(1) raise Exceptions[exc_id] unless Exceptions[exc_id].nil? raise ModBusException.new, "Unknown error" end check_response_mismatch(request, response) if raise_exception_on_mismatch response[2..-1] end
ruby
{ "resource": "" }
q19557
ModBus.ReadOnlyProxy.[]
train
def [](key) if key.instance_of?(0.class) @slave.send("read_#{@type}", key, 1) elsif key.instance_of?(Range) @slave.send("read_#{@type}s", key.first, key.count) else raise ModBus::Errors::ProxyException, "Invalid argument, must be integer or range. Was #{key.class}" end end
ruby
{ "resource": "" }
q19558
ModBus.ReadWriteProxy.[]=
train
def []=(key, val) if key.instance_of?(0.class) @slave.send("write_#{@type}", key, val) elsif key.instance_of?(Range) if key.count != val.size raise ModBus::Errors::ProxyException, "The size of the range must match the size of the values (#{key.count} != #{val.size})" end @slave.send("write_#{@type}s", key.first, val) else raise ModBus::Errors::ProxyException, "Invalid argument, must be integer or range. Was #{key.class}" end end
ruby
{ "resource": "" }
q19559
ModBus.RTU.read_rtu_response
train
def read_rtu_response(io) # Read the slave_id and function code msg = nil while msg.nil? msg = io.read(2) end function_code = msg.getbyte(1) case function_code when 1,2,3,4 then # read the third byte to find out how much more # we need to read + CRC msg += io.read(1) msg += io.read(msg.getbyte(2)+2) when 5,6,15,16 then # We just read in an additional 6 bytes msg += io.read(6) when 22 then msg += io.read(8) when 0x80..0xff then msg += io.read(3) else raise ModBus::Errors::IllegalFunction, "Illegal function: #{function_code}" end end
ruby
{ "resource": "" }
q19560
ModBus.RTU.crc16
train
def crc16(msg) crc_lo = 0xff crc_hi = 0xff msg.unpack('c*').each do |byte| i = crc_hi ^ byte crc_hi = crc_lo ^ CrcHiTable[i] crc_lo = CrcLoTable[i] end return ((crc_hi << 8) + crc_lo) end
ruby
{ "resource": "" }
q19561
Regexp::Expression.Subexpression.each_expression
train
def each_expression(include_self = false, &block) traverse(include_self) do |event, exp, index| yield(exp, index) unless event == :exit end end
ruby
{ "resource": "" }
q19562
Regexp::Expression.Subexpression.flat_map
train
def flat_map(include_self = false, &block) result = [] each_expression(include_self) do |exp, index| if block_given? result << yield(exp, index) else result << [exp, index] end end result end
ruby
{ "resource": "" }
q19563
Clamby.Command.run
train
def run(executable, *args) executable_full = executable_path(executable) self.command = args | default_args self.command = command.sort.unshift(executable_full) system(*self.command, system_options) end
ruby
{ "resource": "" }
q19564
CouchRest.Design.view_on
train
def view_on db, view_name, query = {}, &block raise ArgumentError, "View query options must be set as symbols!" if query.keys.find{|k| k.is_a?(String)} view_name = view_name.to_s view_slug = "#{name}/#{view_name}" # Set the default query options query = view_defaults(view_name).merge(query) # Ensure reduce is set if dealing with a reduceable view # This is a requirement of CouchDB. query[:reduce] ||= false if can_reduce_view?(view_name) db.view(view_slug, query, &block) end
ruby
{ "resource": "" }
q19565
CouchRest.Attachments.put_attachment
train
def put_attachment(name, file, options={}) raise ArgumentError, "doc must be saved" unless self.rev raise ArgumentError, "doc.database required to put_attachment" unless database result = database.put_attachment(self, name, file, options) self['_rev'] = result['rev'] result['ok'] end
ruby
{ "resource": "" }
q19566
CouchRest.Attachments.fetch_attachment
train
def fetch_attachment(name) raise ArgumentError, "doc must be saved" unless self.rev raise ArgumentError, "doc.database required to put_attachment" unless database database.fetch_attachment(self, name) end
ruby
{ "resource": "" }
q19567
CouchRest.Attachments.delete_attachment
train
def delete_attachment(name, force=false) raise ArgumentError, "doc.database required to delete_attachment" unless database result = database.delete_attachment(self, name, force) self['_rev'] = result['rev'] result['ok'] end
ruby
{ "resource": "" }
q19568
CouchRest.Document.uri
train
def uri(append_rev = false) return nil if new? couch_uri = "#{database.root}/#{CGI.escape(id)}" if append_rev == true couch_uri << "?rev=#{rev}" elsif append_rev.kind_of?(Integer) couch_uri << "?rev=#{append_rev}" end couch_uri end
ruby
{ "resource": "" }
q19569
CouchRest.Connection.clean_uri
train
def clean_uri(uri) uri = uri.dup uri.path = "" uri.query = nil uri.fragment = nil uri end
ruby
{ "resource": "" }
q19570
CouchRest.Connection.prepare_http_connection
train
def prepare_http_connection conn = HTTPClient.new(options[:proxy] || self.class.proxy) set_http_connection_options(conn, options) conn end
ruby
{ "resource": "" }
q19571
CouchRest.Connection.set_http_connection_options
train
def set_http_connection_options(conn, opts) # Authentication unless uri.user.to_s.empty? conn.force_basic_auth = true conn.set_auth(uri.to_s, uri.user, uri.password) end # SSL Certificate option mapping if opts.include?(:verify_ssl) conn.ssl_config.verify_mode = opts[:verify_ssl] ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE end conn.ssl_config.client_cert = opts[:ssl_client_cert] if opts.include?(:ssl_client_cert) conn.ssl_config.client_key = opts[:ssl_client_key] if opts.include?(:ssl_client_key) conn.ssl_config.set_trust_ca(opts[:ssl_ca_file]) if opts.include?(:ssl_ca_file) # Timeout options conn.receive_timeout = opts[:timeout] if opts.include?(:timeout) conn.connect_timeout = opts[:open_timeout] if opts.include?(:open_timeout) conn.send_timeout = opts[:read_timeout] if opts.include?(:read_timeout) end
ruby
{ "resource": "" }
q19572
CouchRest.Connection.send_request
train
def send_request(req, &block) @last_response = @http.request(req.delete(:method), req.delete(:uri), req, &block) end
ruby
{ "resource": "" }
q19573
CouchRest.Connection.payload_from_doc
train
def payload_from_doc(req, doc, opts = {}) if doc.is_a?(IO) || doc.is_a?(StringIO) || doc.is_a?(Tempfile) # attachments req[:header]['Content-Type'] = mime_for(req[:uri].path) doc elsif opts[:raw] || doc.nil? doc else MultiJson.encode(doc.respond_to?(:as_couch_json) ? doc.as_couch_json : doc) end end
ruby
{ "resource": "" }
q19574
CouchRest.Attributes.as_couch_json
train
def as_couch_json _attributes.inject({}) {|h, (k,v)| h[k] = v.respond_to?(:as_couch_json) ? v.as_couch_json : v; h} end
ruby
{ "resource": "" }
q19575
CouchRest.Server.database!
train
def database!(name) connection.head name # Check if the URL is valid database(name) rescue CouchRest::NotFound # Thrown if the HTTP HEAD fails create_db(name) end
ruby
{ "resource": "" }
q19576
CouchRest.Server.next_uuid
train
def next_uuid(count = @uuid_batch_count) if uuids.nil? || uuids.empty? @uuids = connection.get("_uuids?count=#{count}")["uuids"] end uuids.pop end
ruby
{ "resource": "" }
q19577
CouchRest.RestAPI.get
train
def get(url, options = {}) connection(url, options) do |uri, conn| conn.get(uri.request_uri, options) end end
ruby
{ "resource": "" }
q19578
CouchRest.RestAPI.put
train
def put(url, doc = nil, options = {}) connection(url, options) do |uri, conn| conn.put(uri.request_uri, doc, options) end end
ruby
{ "resource": "" }
q19579
CouchRest.RestAPI.delete
train
def delete(url, options = {}) connection(url, options) do |uri, conn| conn.delete(uri.request_uri, options) end end
ruby
{ "resource": "" }
q19580
CouchRest.Database.replicate_from
train
def replicate_from(other_db, continuous = false, create_target = false, doc_ids = nil) replicate(other_db, continuous, :target => name, :create_target => create_target, :doc_ids => doc_ids) end
ruby
{ "resource": "" }
q19581
CouchRest.Database.replicate_to
train
def replicate_to(other_db, continuous = false, create_target = false, doc_ids = nil) replicate(other_db, continuous, :source => name, :create_target => create_target, :doc_ids => doc_ids) end
ruby
{ "resource": "" }
q19582
CouchRest.Database.get!
train
def get!(id, params = {}) slug = escape_docid(id) url = CouchRest.paramify_url("#{path}/#{slug}", params) result = connection.get(url) return result unless result.is_a?(Hash) doc = if /^_design/ =~ result["_id"] Design.new(result) else Document.new(result) end doc.database = self doc end
ruby
{ "resource": "" }
q19583
CouchRest.Database.bulk_save
train
def bulk_save(docs = nil, opts = {}) opts = { :use_uuids => true, :all_or_nothing => false }.update(opts) if docs.nil? docs = @bulk_save_cache @bulk_save_cache = [] end if opts[:use_uuids] ids, noids = docs.partition{|d|d['_id']} uuid_count = [noids.length, @server.uuid_batch_count].max noids.each do |doc| nextid = server.next_uuid(uuid_count) rescue nil doc['_id'] = nextid if nextid end end request_body = {:docs => docs} if opts[:all_or_nothing] request_body[:all_or_nothing] = true end results = connection.post "#{path}/_bulk_docs", request_body docs_by_id = Hash[docs.map { |doc| [doc['_id'], doc] }] unless docs.nil? results.each { |r| docs_by_id[r['id']]['_rev'] = r['rev'] if r['ok'] } unless results.nil? results end
ruby
{ "resource": "" }
q19584
CouchRest.Database.update_doc
train
def update_doc(doc_id, params = {}, update_limit = 10) resp = {'ok' => false} last_fail = nil until resp['ok'] or update_limit <= 0 doc = self.get(doc_id, params) yield doc begin resp = self.save_doc doc rescue CouchRest::RequestFailed => e if e.http_code == 409 # Update collision update_limit -= 1 last_fail = e else raise e end end end raise last_fail unless resp['ok'] doc end
ruby
{ "resource": "" }
q19585
CouchRest.Database.put_attachment
train
def put_attachment(doc, name, file, options = {}) file = StringIO.new(file) if file.is_a?(String) connection.put path_for_attachment(doc, name), file, options end
ruby
{ "resource": "" }
q19586
CouchRest.Database.delete_attachment
train
def delete_attachment(doc, name, force=false) attach_path = path_for_attachment(doc, name) begin connection.delete(attach_path) rescue Exception => error if force # get over a 409 doc = get(doc['_id']) attach_path = path_for_attachment(doc, name) connection.delete(attach_path) else error end end end
ruby
{ "resource": "" }
q19587
CouchRest::Model.Property.cast_value
train
def cast_value(parent, value) if !allow_blank && value.to_s.empty? nil else value = typecast_value(parent, self, value) associate_casted_value_to_parent(parent, value) end end
ruby
{ "resource": "" }
q19588
CouchRest::Model.Property.build
train
def build(*args) raise StandardError, "Cannot build property without a class" if @type.nil? if @init_method.is_a?(Proc) @init_method.call(*args) else @type.send(@init_method, *args) end end
ruby
{ "resource": "" }
q19589
ActionDispatch::Routing.Mapper.front_end
train
def front_end(name, path = name, options = {}) defaults = { app_name: name }.merge(options) # Create a new build for this app. post( "#{path}" => "front_end_builds/builds#create", defaults: { app_name: name } ) # Get a build for this app. constraints FrontEndBuilds::HtmlRoutingConstraint.new do get( "/#{path}/(*path)" => "front_end_builds/bests#show", defaults: defaults ) # Need a better way to do this front_end_route = Rails.application.routes.routes.routes.find do |route| route.defaults == defaults.merge( controller: "front_end_builds/bests", action: "show" ) end FrontEndBuilds::App.register_url(name, front_end_route.format({})) end end
ruby
{ "resource": "" }
q19590
Mongoid.Paranoia.restore
train
def restore(opts = {}) run_callbacks(:restore) do _paranoia_update("$unset" => { paranoid_field => true }) attributes.delete("deleted_at") @destroyed = false restore_relations if opts[:recursive] true end end
ruby
{ "resource": "" }
q19591
SanitizeEmail.OverriddenAddresses.good_listize
train
def good_listize(real_addresses) good_listed = clean_addresses(real_addresses, :good_list) good_listed = clean_addresses(good_listed, :bad_list) unless good_listed.empty? good_listed end
ruby
{ "resource": "" }
q19592
Nyaplot.Color.to_html
train
def to_html html = '<table><tr>' @source.each{|color| html.concat("<th>" + color + "</th>")} html.concat("</tr><tr>") @source.each{|color| html.concat("<td style=\"background-color:" + color + ";\">&nbsp;</td>")} html += '</tr></table>' return html end
ruby
{ "resource": "" }
q19593
Nyaplot.Exportable.generate_html
train
def generate_html(temp_path) path = File.expand_path(temp_path, __FILE__) url = Nyaplot.get_url id = SecureRandom.uuid model = to_json template = File.read(path) ERB.new(template).result(binding) end
ruby
{ "resource": "" }
q19594
Nyaplot.Exportable.export_html
train
def export_html(path="./plot.html", to_png=false) path = File.expand_path(path, Dir::pwd) body = generate_html("../templates/iruby.erb") temp_path = File.expand_path("../templates/static_html.erb", __FILE__) template = File.read(temp_path) num = File.write(path, ERB.new(template).result(binding)) "Plot was saved to " + path end
ruby
{ "resource": "" }
q19595
Nyaplot.Plot3D.add
train
def add(type, *data) df = DataFrame.new({x: data[0], y: data[1], z: data[2]}) return add_with_df(df, type, :x, :y, :z) end
ruby
{ "resource": "" }
q19596
Nyaplot.Plot3D.add_with_df
train
def add_with_df(df, type, *labels) diagram = Diagram3D.new(df, type, labels) diagrams = get_property(:diagrams) diagrams.push(diagram) return diagram end
ruby
{ "resource": "" }
q19597
Nyaplot.Plot3D.export_html
train
def export_html(path=nil) require 'securerandom' path = "./plot-" + SecureRandom.uuid().to_s + ".html" if path.nil? Frame.new.tap {|f| f.add(self) }.export_html(path) end
ruby
{ "resource": "" }
q19598
Librarian.SpecChangeSet.analyze
train
def analyze @analyze ||= begin debug { "Analyzing spec and lock:" } if same? debug { " Same!" } return lock.manifests end debug { " Removed:" } ; removed_dependency_names.each { |name| debug { " #{name}" } } debug { " ExplicitRemoved:" } ; explicit_removed_dependency_names.each { |name| debug { " #{name}" } } debug { " Added:" } ; added_dependency_names.each { |name| debug { " #{name}" } } debug { " NonMatchingAdded:" } ; nonmatching_added_dependency_names.each { |name| debug { " #{name}" } } debug { " Changed:" } ; changed_dependency_names.each { |name| debug { " #{name}" } } debug { " DeepKeep:" } ; deep_keep_manifest_names.each { |name| debug { " #{name}" } } debug { " ShallowStrip:" } ; shallow_strip_manifest_names.each { |name| debug { " #{name}" } } manifests = ManifestSet.new(lock_manifests) manifests.deep_keep!(deep_keep_manifest_names) manifests.shallow_strip!(shallow_strip_manifest_names) manifests.to_a end end
ruby
{ "resource": "" }
q19599
Librarian.ManifestSet.dependencies_of
train
def dependencies_of(names) names = Array === names ? names.dup : names.to_a assert_strings!(names) deps = Set.new until names.empty? name = names.shift next if deps.include?(name) deps << name names.concat index[name].dependencies.map(&:name) end deps.to_a end
ruby
{ "resource": "" }