_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q22200
CFPropertyList.Binary.read_binary_string
train
def read_binary_string(fname,fd,length) buff = read_fd fd, length @unique_table[buff] = true unless @unique_table.has_key?(buff) CFString.new(buff) end
ruby
{ "resource": "" }
q22201
CFPropertyList.Binary.read_binary_unicode_string
train
def read_binary_unicode_string(fname,fd,length) # The problem is: we get the length of the string IN CHARACTERS; # since a char in UTF-16 can be 16 or 32 bit long, we don't really know # how long the string is in bytes buff = fd.read(2*length) @unique_table[buff] = true unless @unique_table.has_key?(buff) CFString.new(Binary.charset_convert(buff,"UTF-16BE","UTF-8")) end
ruby
{ "resource": "" }
q22202
CFPropertyList.Binary.read_binary_array
train
def read_binary_array(fname,fd,length) ary = [] # first: read object refs if(length != 0) buff = fd.read(length * @object_ref_size) objects = unpack_with_size(@object_ref_size, buff) #buff.unpack(@object_ref_size == 1 ? "C*" : "n*") # now: read objects 0.upto(length-1) do |i| object = read_binary_object_at(fname,fd,objects[i]) ary.push object end end CFArray.new(ary) end
ruby
{ "resource": "" }
q22203
CFPropertyList.Binary.read_binary_dict
train
def read_binary_dict(fname,fd,length) dict = {} # first: read keys if(length != 0) then buff = fd.read(length * @object_ref_size) keys = unpack_with_size(@object_ref_size, buff) # second: read object refs buff = fd.read(length * @object_ref_size) objects = unpack_with_size(@object_ref_size, buff) # read real keys and objects 0.upto(length-1) do |i| key = read_binary_object_at(fname,fd,keys[i]) object = read_binary_object_at(fname,fd,objects[i]) dict[key.value] = object end end CFDictionary.new(dict) end
ruby
{ "resource": "" }
q22204
CFPropertyList.Binary.read_binary_object
train
def read_binary_object(fname,fd) # first: read the marker byte buff = fd.read(1) object_length = buff.unpack("C*") object_length = object_length[0] & 0xF buff = buff.unpack("H*") object_type = buff[0][0].chr if(object_type != "0" && object_length == 15) then object_length = read_binary_object(fname,fd) object_length = object_length.value end case object_type when '0' # null, false, true, fillbyte read_binary_null_type(object_length) when '1' # integer read_binary_int(fname,fd,object_length) when '2' # real read_binary_real(fname,fd,object_length) when '3' # date read_binary_date(fname,fd,object_length) when '4' # data read_binary_data(fname,fd,object_length) when '5' # byte string, usually utf8 encoded read_binary_string(fname,fd,object_length) when '6' # unicode string (utf16be) read_binary_unicode_string(fname,fd,object_length) when '8' CFUid.new(read_binary_int(fname, fd, object_length).value) when 'a' # array read_binary_array(fname,fd,object_length) when 'd' # dictionary read_binary_dict(fname,fd,object_length) end end
ruby
{ "resource": "" }
q22205
CFPropertyList.Binary.string_to_binary
train
def string_to_binary(val) val = val.to_s @unique_table[val] ||= begin if !Binary.ascii_string?(val) val = Binary.charset_convert(val,"UTF-8","UTF-16BE") bdata = Binary.type_bytes(0b0110, Binary.charset_strlen(val,"UTF-16BE")) val.force_encoding("ASCII-8BIT") if val.respond_to?("encode") @object_table[@written_object_count] = bdata << val else bdata = Binary.type_bytes(0b0101,val.bytesize) @object_table[@written_object_count] = bdata << val end @written_object_count += 1 @written_object_count - 1 end end
ruby
{ "resource": "" }
q22206
CFPropertyList.Binary.int_to_binary
train
def int_to_binary(value) # Note: nbytes is actually an exponent. number of bytes = 2**nbytes. nbytes = 0 nbytes = 1 if value > 0xFF # 1 byte unsigned integer nbytes += 1 if value > 0xFFFF # 4 byte unsigned integer nbytes += 1 if value > 0xFFFFFFFF # 8 byte unsigned integer nbytes += 1 if value > 0x7FFFFFFFFFFFFFFF # 8 byte unsigned integer, stored in lower half of 16 bytes nbytes = 3 if value < 0 # signed integers always stored in 8 bytes Binary.type_bytes(0b0001, nbytes) << if nbytes < 4 [value].pack(["C", "n", "N", "q>"][nbytes]) else # nbytes == 4 [0,value].pack("Q>Q>") end end
ruby
{ "resource": "" }
q22207
CFPropertyList.Binary.num_to_binary
train
def num_to_binary(value) @object_table[@written_object_count] = if value.is_a?(CFInteger) int_to_binary(value.value) else real_to_binary(value.value) end @written_object_count += 1 @written_object_count - 1 end
ruby
{ "resource": "" }
q22208
CFPropertyList.Binary.array_to_binary
train
def array_to_binary(val) saved_object_count = @written_object_count @written_object_count += 1 #@object_refs += val.value.size values = val.value.map { |v| v.to_binary(self) } bdata = Binary.type_bytes(0b1010, val.value.size) << Binary.pack_int_array_with_size(object_ref_size(@object_refs), values) @object_table[saved_object_count] = bdata saved_object_count end
ruby
{ "resource": "" }
q22209
CFPropertyList.Binary.dict_to_binary
train
def dict_to_binary(val) saved_object_count = @written_object_count @written_object_count += 1 #@object_refs += val.value.keys.size * 2 keys_and_values = val.value.keys.map { |k| CFString.new(k).to_binary(self) } keys_and_values.concat(val.value.values.map { |v| v.to_binary(self) }) bdata = Binary.type_bytes(0b1101,val.value.size) << Binary.pack_int_array_with_size(object_ref_size(@object_refs), keys_and_values) @object_table[saved_object_count] = bdata return saved_object_count end
ruby
{ "resource": "" }
q22210
S3.Connection.request
train
def request(method, options) host = options.fetch(:host, S3.host) path = options.fetch(:path) body = options.fetch(:body, nil) params = options.fetch(:params, {}) headers = options.fetch(:headers, {}) # Must be done before adding params # Encodes all characters except forward-slash (/) and explicitly legal URL characters path = Addressable::URI.escape(path) if params params = params.is_a?(String) ? params : self.class.parse_params(params) path << "?#{params}" end request = Request.new(@chunk_size, method.to_s.upcase, !!body, method.to_s.upcase != "HEAD", path) headers = self.class.parse_headers(headers) headers.each do |key, value| request[key] = value end if body if body.respond_to?(:read) request.body_stream = body else request.body = body end request.content_length = body.respond_to?(:lstat) ? body.stat.size : body.size end send_request(host, request) end
ruby
{ "resource": "" }
q22211
S3.Parser.parse_acl
train
def parse_acl(xml) grants = {} rexml_document(xml).elements.each("AccessControlPolicy/AccessControlList/Grant") do |grant| grants.merge!(extract_grantee(grant)) end grants end
ruby
{ "resource": "" }
q22212
S3.Object.temporary_url
train
def temporary_url(expires_at = Time.now + 3600) signature = Signature.generate_temporary_url_signature(:bucket => name, :resource => key, :expires_at => expires_at, :secret_access_key => secret_access_key) "#{url}?AWSAccessKeyId=#{self.bucket.service.access_key_id}&Expires=#{expires_at.to_i.to_s}&Signature=#{signature}" end
ruby
{ "resource": "" }
q22213
S3.Bucket.save
train
def save(options = {}) options = {:location => options} unless options.is_a?(Hash) create_bucket_configuration(options) true end
ruby
{ "resource": "" }
q22214
WebTranslateIt.String.translation_for
train
def translation_for(locale) success = true tries ||= 3 translation = self.translations.detect{ |t| t.locale == locale } return translation if translation return nil if self.new_record request = Net::HTTP::Get.new("/api/projects/#{Connection.api_key}/strings/#{self.id}/locales/#{locale}/translations.yaml") WebTranslateIt::Util.add_fields(request) begin response = Util.handle_response(Connection.http_connection.request(request), true, true) hash = YAML.load(response) return nil if hash.empty? translation = WebTranslateIt::Translation.new(hash) return translation rescue Timeout::Error puts "Request timeout. Will retry in 5 seconds." if (tries -= 1) > 0 sleep(5) retry else success = false end end success end
ruby
{ "resource": "" }
q22215
WebTranslateIt.TranslationFile.fetch
train
def fetch(http_connection, force = false) success = true tries ||= 3 display = [] display.push(self.file_path) display.push "#{StringUtil.checksumify(self.local_checksum.to_s)}..#{StringUtil.checksumify(self.remote_checksum.to_s)}" if !File.exist?(self.file_path) or force or self.remote_checksum != self.local_checksum begin request = Net::HTTP::Get.new(api_url) WebTranslateIt::Util.add_fields(request) FileUtils.mkpath(self.file_path.split('/')[0..-2].join('/')) unless File.exist?(self.file_path) or self.file_path.split('/')[0..-2].join('/') == "" begin response = http_connection.request(request) File.open(self.file_path, 'wb'){ |file| file << response.body } if response.code.to_i == 200 display.push Util.handle_response(response) rescue Timeout::Error puts StringUtil.failure("Request timeout. Will retry in 5 seconds.") if (tries -= 1) > 0 sleep(5) retry else success = false end rescue display.push StringUtil.failure("An error occured: #{$!}") success = false end end else display.push StringUtil.success("Skipped") end print ArrayUtil.to_columns(display) return success end
ruby
{ "resource": "" }
q22216
WebTranslateIt.TranslationFile.upload
train
def upload(http_connection, merge=false, ignore_missing=false, label=nil, low_priority=false, minor_changes=false, force=false) success = true tries ||= 3 display = [] display.push(self.file_path) display.push "#{StringUtil.checksumify(self.local_checksum.to_s)}..#{StringUtil.checksumify(self.remote_checksum.to_s)}" if File.exists?(self.file_path) if force or self.remote_checksum != self.local_checksum File.open(self.file_path) do |file| begin request = Net::HTTP::Put::Multipart.new(api_url, {"file" => UploadIO.new(file, "text/plain", file.path), "merge" => merge, "ignore_missing" => ignore_missing, "label" => label, "low_priority" => low_priority, "minor_changes" => minor_changes }) WebTranslateIt::Util.add_fields(request) display.push Util.handle_response(http_connection.request(request)) rescue Timeout::Error puts StringUtil.failure("Request timeout. Will retry in 5 seconds.") if (tries -= 1) > 0 sleep(5) retry else success = false end rescue display.push StringUtil.failure("An error occured: #{$!}") success = false end end else display.push StringUtil.success("Skipped") end puts ArrayUtil.to_columns(display) else puts StringUtil.failure("Can't push #{self.file_path}. File doesn't exist.") end return success end
ruby
{ "resource": "" }
q22217
WebTranslateIt.TranslationFile.create
train
def create(http_connection, low_priority=false) success = true tries ||= 3 display = [] display.push file_path display.push "#{StringUtil.checksumify(self.local_checksum.to_s)}..[ ]" if File.exists?(self.file_path) File.open(self.file_path) do |file| begin request = Net::HTTP::Post::Multipart.new(api_url_for_create, { "name" => self.file_path, "file" => UploadIO.new(file, "text/plain", file.path), "low_priority" => low_priority }) WebTranslateIt::Util.add_fields(request) display.push Util.handle_response(http_connection.request(request)) puts ArrayUtil.to_columns(display) rescue Timeout::Error puts StringUtil.failure("Request timeout. Will retry in 5 seconds.") if (tries -= 1) > 0 sleep(5) retry else success = false end rescue display.push StringUtil.failure("An error occured: #{$!}") success = false end end else puts StringUtil.failure("\nFile #{self.file_path} doesn't exist!") end return success end
ruby
{ "resource": "" }
q22218
WebTranslateIt.TranslationFile.delete
train
def delete(http_connection) success = true tries ||= 3 display = [] display.push file_path if File.exists?(self.file_path) File.open(self.file_path) do |file| begin request = Net::HTTP::Delete.new(api_url_for_delete) WebTranslateIt::Util.add_fields(request) display.push Util.handle_response(http_connection.request(request)) puts ArrayUtil.to_columns(display) rescue Timeout::Error puts StringUtil.failure("Request timeout. Will retry in 5 seconds.") if (tries -= 1) > 0 sleep(5) retry else success = false end rescue display.push StringUtil.failure("An error occured: #{$!}") success = false end end else puts StringUtil.failure("\nFile #{self.file_path} doesn't exist!") end return success end
ruby
{ "resource": "" }
q22219
SendGrid.ClassMethods.sendgrid_enable
train
def sendgrid_enable(*options) self.default_sg_options = Array.new unless self.default_sg_options options.each { |option| self.default_sg_options << option if VALID_OPTIONS.include?(option) } end
ruby
{ "resource": "" }
q22220
ThreadSafe.AtomicReferenceCacheBackend.clear
train
def clear return self unless current_table = table current_table_size = current_table.size deleted_count = i = 0 while i < current_table_size if !(node = current_table.volatile_get(i)) i += 1 elsif (node_hash = node.hash) == MOVED current_table = node.key current_table_size = current_table.size elsif Node.locked_hash?(node_hash) decrement_size(deleted_count) # opportunistically update count deleted_count = 0 node.try_await_lock(current_table, i) else current_table.try_lock_via_hash(i, node, node_hash) do begin deleted_count += 1 if NULL != node.value # recheck under lock node.value = nil end while node = node.next current_table.volatile_set(i, nil) i += 1 end end end decrement_size(deleted_count) self end
ruby
{ "resource": "" }
q22221
ThreadSafe.AtomicReferenceCacheBackend.initialize_table
train
def initialize_table until current_table ||= table if (size_ctrl = size_control) == NOW_RESIZING Thread.pass # lost initialization race; just spin else try_in_resize_lock(current_table, size_ctrl) do initial_size = size_ctrl > 0 ? size_ctrl : DEFAULT_CAPACITY current_table = self.table = Table.new(initial_size) initial_size - (initial_size >> 2) # 75% load factor end end end current_table end
ruby
{ "resource": "" }
q22222
ThreadSafe.AtomicReferenceCacheBackend.check_for_resize
train
def check_for_resize while (current_table = table) && MAX_CAPACITY > (table_size = current_table.size) && NOW_RESIZING != (size_ctrl = size_control) && size_ctrl < @counter.sum try_in_resize_lock(current_table, size_ctrl) do self.table = rebuild(current_table) (table_size << 1) - (table_size >> 1) # 75% load factor end end end
ruby
{ "resource": "" }
q22223
ThreadSafe.AtomicReferenceCacheBackend.split_old_bin
train
def split_old_bin(table, new_table, i, node, node_hash, forwarder) table.try_lock_via_hash(i, node, node_hash) do split_bin(new_table, i, node, node_hash) table.volatile_set(i, forwarder) end end
ruby
{ "resource": "" }
q22224
GreenAndSecure.BlockList.run
train
def run GreenAndSecure::check_block_setup puts "The available chef servers are:" servers.each do |server| if server == current_server then puts "\t* #{server} [ Currently Selected ]" else puts "\t* #{server}" end end end
ruby
{ "resource": "" }
q22225
AlexaVerifier.Verifier.valid?
train
def valid?(request) begin valid!(request) rescue AlexaVerifier::BaseError => e puts e return false end true end
ruby
{ "resource": "" }
q22226
AlexaVerifier.Verifier.check_that_request_is_timely
train
def check_that_request_is_timely(raw_body) request_json = JSON.parse(raw_body) raise AlexaVerifier::InvalidRequestError, 'Timestamp field not present in request' if request_json.fetch('request', {}).fetch('timestamp', nil).nil? request_is_timely = (Time.parse(request_json['request']['timestamp'].to_s) >= (Time.now - REQUEST_THRESHOLD)) raise AlexaVerifier::InvalidRequestError, "Request is from more than #{REQUEST_THRESHOLD} seconds ago" unless request_is_timely end
ruby
{ "resource": "" }
q22227
AlexaVerifier.Verifier.check_that_request_is_valid
train
def check_that_request_is_valid(signature_certificate_url, request, raw_body) certificate, chain = AlexaVerifier::CertificateStore.fetch(signature_certificate_url) if @configuration.verify_certificate? || @configuration.verify_signature? begin AlexaVerifier::Verifier::CertificateVerifier.valid!(certificate, chain) if @configuration.verify_certificate? check_that_request_was_signed(certificate.public_key, request, raw_body) if @configuration.verify_signature? rescue AlexaVerifier::InvalidCertificateError, AlexaVerifier::InvalidRequestError => error # We don't want to cache a certificate that fails our checks as it could lock us out of valid requests for the cache length AlexaVerifier::CertificateStore.delete(signature_certificate_url) raise error end end
ruby
{ "resource": "" }
q22228
AlexaVerifier.Verifier.check_that_request_was_signed
train
def check_that_request_was_signed(certificate_public_key, request, raw_body) signed_by_certificate = certificate_public_key.verify( OpenSSL::Digest::SHA1.new, Base64.decode64(request.env['HTTP_SIGNATURE']), raw_body ) raise AlexaVerifier::InvalidRequestError, 'Signature does not match certificate provided' unless signed_by_certificate end
ruby
{ "resource": "" }
q22229
TeamCityFormatter.Formatter.before_feature_element
train
def before_feature_element(cuke_feature_element) if cuke_feature_element.is_a?(Cucumber::Core::Ast::Scenario) before_scenario(cuke_feature_element) elsif cuke_feature_element.is_a?(Cucumber::Core::Ast::ScenarioOutline) before_scenario_outline(cuke_feature_element) else raise("unsupported feature element `#{cuke_feature_element.class.name}`") end end
ruby
{ "resource": "" }
q22230
TeamCityFormatter.Formatter.after_feature_element
train
def after_feature_element(cuke_feature_element) if cuke_feature_element.is_a?(Cucumber::Formatter::LegacyApi::Ast::Scenario) after_scenario(cuke_feature_element) elsif cuke_feature_element.is_a?(Cucumber::Formatter::LegacyApi::Ast::ScenarioOutline) after_scenario_outline(cuke_feature_element) else raise("unsupported feature element `#{cuke_feature_element.class.name}`") end @exception = nil @scenario = nil @scenario_outline = nil end
ruby
{ "resource": "" }
q22231
TeamCityFormatter.Formatter.before_table_row
train
def before_table_row(cuke_table_row) if cuke_table_row.is_a?(Cucumber::Formatter::LegacyApi::ExampleTableRow) is_not_header_row = (@scenario_outline.example_column_names != cuke_table_row.values) if is_not_header_row example = @scenario_outline.examples.find { |example| example.column_values == cuke_table_row.values } test_name = scenario_outline_test_name(@scenario_outline.name, example.column_values) @logger.test_started(test_name) end end end
ruby
{ "resource": "" }
q22232
TeamCityFormatter.Formatter.after_table_row
train
def after_table_row(cuke_table_row) if cuke_table_row.is_a?(Cucumber::Formatter::LegacyApi::Ast::ExampleTableRow) is_not_header_row = (@scenario_outline.example_column_names != cuke_table_row.cells) if is_not_header_row # treat scenario-level exception as example exception # the exception could have been raised in a background section exception = (@exception || cuke_table_row.exception) example = @scenario_outline.examples.find { |example| example.column_values == cuke_table_row.cells } test_name = scenario_outline_test_name(@scenario_outline.name, example.column_values) if exception if exception.is_a? ::Cucumber::Pending @logger.test_ignored(test_name, 'Pending test') else @logger.test_failed(test_name, exception) end end @logger.test_finished(test_name) @exception = nil end end end
ruby
{ "resource": "" }
q22233
TabsOnRails.Tabs.render
train
def render(&block) raise LocalJumpError, "no block given" unless block_given? options = @options.dup open_tabs_options = options.delete(:open_tabs) || {} close_tabs_options = options.delete(:close_tabs) || {} "".tap do |html| html << open_tabs(open_tabs_options).to_s html << @context.capture(self, &block) html << close_tabs(close_tabs_options).to_s end.html_safe end
ruby
{ "resource": "" }
q22234
ActiveStorage.Service::CloudinaryService.download
train
def download(key, &block) source = cloudinary_url_for_key(key) if block_given? instrument :streaming_download, key: key do stream_download(source, &block) end else instrument :download, key: key do Cloudinary::Downloader.download(source) end end end
ruby
{ "resource": "" }
q22235
ActiveStorage.Service::CloudinaryService.download_chunk
train
def download_chunk(key, range) instrument :download_chunk, key: key, range: range do source = cloudinary_url_for_key(key) download_range(source, range) end end
ruby
{ "resource": "" }
q22236
ActiveStorage.Service::CloudinaryService.url_for_direct_upload
train
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:) instrument :url_for_direct_upload, key: key do options = { expires_in: expires_in, content_type: content_type, content_length: content_length, checksum: checksum, resource_type: 'auto' } # FIXME: Cloudinary Ruby SDK does't expose an api for signed upload url # The expected url is similar to the private_download_url # with download replaced with upload signed_download_url_for_public_id(key, options) .sub(/download/, 'upload') end end
ruby
{ "resource": "" }
q22237
OptaSD.Core.parse_json
train
def parse_json(response) data = JSON.parse(response) fail OptaSD::Error.new(data), ErrorMessage.get_message(data['errorCode'].to_i) if data['errorCode'] data end
ruby
{ "resource": "" }
q22238
OptaSD.Core.parse_xml
train
def parse_xml(response) data = Nokogiri::XML(response) do |config| config.strict.noblanks end fail OptaSD::Error.new(xml_error_to_hast(data)), ErrorMessage.get_message(data.children.first.content.to_i) if data.css('errorCode').first.present? data end
ruby
{ "resource": "" }
q22239
Hermann.Producer.push
train
def push(value, opts={}) topic = opts[:topic] || @topic result = nil if value.kind_of? Array return value.map { |e| self.push(e, opts) } end if Hermann.jruby? result = @internal.push_single(value, topic, opts[:partition_key], nil) unless result.nil? @children << result end # Reaping children on the push just to make sure that it does get # called correctly and we don't leak memory reap_children else # Ticking reactor to make sure that we don't inadvertantly let the # librdkafka callback queue overflow tick_reactor result = create_result @internal.push_single(value, topic, opts[:partition_key].to_s, result) end return result end
ruby
{ "resource": "" }
q22240
Hermann.Producer.tick_reactor
train
def tick_reactor(timeout=0) begin execute_tick(rounded_timeout(timeout)) rescue StandardError => ex @children.each do |child| # Skip over any children that should already be reaped for other # reasons next if (Hermann.jruby? ? child.fulfilled? : child.completed?) # Propagate errors to the remaining children child.internal_set_error(ex) end end # Reaping the children at this point will also reap any children marked # as errored by an exception out of #execute_tick return reap_children end
ruby
{ "resource": "" }
q22241
Hermann.Producer.execute_tick
train
def execute_tick(timeout) if timeout == 0 @internal.tick(0) else (timeout * 2).times do # We're going to Thread#sleep in Ruby to avoid a # pthread_cond_timedwait(3) inside of librdkafka events = @internal.tick(0) # If we find events, break out early break if events > 0 sleep 0.5 end end end
ruby
{ "resource": "" }
q22242
ActiveStorage.Service::OpenStackService.change_content_type
train
def change_content_type(key, content_type) client.post_object(container, key, 'Content-Type' => content_type) true rescue Fog::OpenStack::Storage::NotFound false end
ruby
{ "resource": "" }
q22243
JMS.OracleAQConnectionFactory.create_connection
train
def create_connection(*args) # Since username and password are not assigned (see lib/jms/connection.rb:200) # and connection_factory.create_connection expects 2 arguments when username is not null ... if args.length == 2 @username = args[0] @password = args[1] end # Full Qualified name causes a Java exception #cf = oracle.jms.AQjmsFactory.getConnectionFactory(@url, java.util.Properties.new) cf = AQjmsFactory.getConnectionFactory(@url, java.util.Properties.new) if username cf.createConnection(@username, @password) else cf.createConnection() end end
ruby
{ "resource": "" }
q22244
JMS.SessionPool.session
train
def session(&block) s = nil begin s = @pool.checkout block.call(s) rescue javax.jms.JMSException => e s.close rescue nil @pool.remove(s) s = nil # Do not check back in since we have removed it raise e ensure @pool.checkin(s) if s end end
ruby
{ "resource": "" }
q22245
JMS.SessionPool.consumer
train
def consumer(params, &block) session do |s| begin consumer = s.consumer(params) block.call(s, consumer) ensure consumer.close if consumer end end end
ruby
{ "resource": "" }
q22246
JMS.SessionPool.producer
train
def producer(params, &block) session do |s| begin producer = s.producer(params) block.call(s, producer) ensure producer.close if producer end end end
ruby
{ "resource": "" }
q22247
JMS.Connection.fetch_dependencies
train
def fetch_dependencies(jar_list) jar_list.each do |jar| logger.debug "Loading Jar File:#{jar}" begin require jar rescue Exception => exc logger.error "Failed to Load Jar File:#{jar}", exc end end if jar_list require 'jms/mq_workaround' require 'jms/imports' require 'jms/message_listener_impl' require 'jms/message' require 'jms/text_message' require 'jms/map_message' require 'jms/bytes_message' require 'jms/object_message' require 'jms/session' require 'jms/message_consumer' require 'jms/message_producer' require 'jms/queue_browser' end
ruby
{ "resource": "" }
q22248
JMS.Connection.session
train
def session(params={}, &block) raise(ArgumentError, 'Missing mandatory Block when calling JMS::Connection#session') unless block session = self.create_session(params) begin block.call(session) ensure session.close end end
ruby
{ "resource": "" }
q22249
JMS.Connection.create_session
train
def create_session(params={}) transacted = params[:transacted] || false options = params[:options] || JMS::Session::AUTO_ACKNOWLEDGE @jms_connection.create_session(transacted, options) end
ruby
{ "resource": "" }
q22250
JMS.Connection.on_message
train
def on_message(params, &block) raise 'JMS::Connection must be connected prior to calling JMS::Connection::on_message' unless @sessions && @consumers consumer_count = params[:session_count] || 1 consumer_count.times do session = self.create_session(params) consumer = session.consumer(params) if session.transacted? consumer.on_message(params) do |message| begin block.call(message) ? session.commit : session.rollback rescue => exc session.rollback throw exc end end else consumer.on_message(params, &block) end @consumers << consumer @sessions << session end end
ruby
{ "resource": "" }
q22251
JMS.MessageListenerImpl.statistics
train
def statistics raise(ArgumentError, 'First call MessageConsumer::on_message with statistics: true before calling MessageConsumer::statistics()') unless @message_count duration = (@last_time || Time.now) - @start_time { messages: @message_count, duration: duration, messages_per_second: (@message_count/duration).to_i } end
ruby
{ "resource": "" }
q22252
CFMicro.McfCommand.override
train
def override(config, option, escape=false, &blk) # override if given on the command line if opt = input[option] opt = CFMicro.escape_path(opt) if escape config[option] = opt end config[option] = yield unless config[option] end
ruby
{ "resource": "" }
q22253
CFManifests.Resolver.resolve_lexically
train
def resolve_lexically(resolver, val, ctx) case val when Hash new = {} val.each do |k, v| new[k] = resolve_lexically(resolver, v, [val] + ctx) end new when Array val.collect do |v| resolve_lexically(resolver, v, ctx) end when String val.gsub(/\$\{([^\}]+)\}/) do resolve_symbol(resolver, $1, ctx) end else val end end
ruby
{ "resource": "" }
q22254
CFManifests.Resolver.resolve_symbol
train
def resolve_symbol(resolver, sym, ctx) if found = find_symbol(sym.to_sym, ctx) resolve_lexically(resolver, found, ctx) found elsif dynamic = resolver.resolve_symbol(sym) dynamic else fail("Unknown symbol in manifest: #{sym}") end end
ruby
{ "resource": "" }
q22255
CFManifests.Resolver.find_symbol
train
def find_symbol(sym, ctx) ctx.each do |h| if val = resolve_in(h, sym) return val end end nil end
ruby
{ "resource": "" }
q22256
CFManifests.Resolver.find_in_hash
train
def find_in_hash(hash, where) what = hash where.each do |x| return nil unless what.is_a?(Hash) what = what[x] end what end
ruby
{ "resource": "" }
q22257
CFManifests.Builder.build
train
def build(file) manifest = YAML.load_file file raise CFManifests::InvalidManifest.new(file) unless manifest Array(manifest["inherit"]).each do |path| manifest = merge_parent(path, manifest) end manifest.delete("inherit") manifest end
ruby
{ "resource": "" }
q22258
CFManifests.Builder.merge_manifest
train
def merge_manifest(parent, child) merge = proc do |_, old, new| if new.is_a?(Hash) && old.is_a?(Hash) old.merge(new, &merge) else new end end parent.merge(child, &merge) end
ruby
{ "resource": "" }
q22259
SimCtl.List.where
train
def where(filter) return self if filter.nil? select do |item| matches = true filter.each do |key, value| matches &= case value when Regexp item.send(key) =~ value else item.send(key) == value end end matches end end
ruby
{ "resource": "" }
q22260
SimCtl.Device.reload
train
def reload device = SimCtl.device(udid: udid) device.instance_variables.each do |ivar| instance_variable_set(ivar, device.instance_variable_get(ivar)) end end
ruby
{ "resource": "" }
q22261
SimCtl.Device.wait
train
def wait(timeout = SimCtl.default_timeout) Timeout.timeout(timeout) do loop do break if yield SimCtl.device(udid: udid) end end reload end
ruby
{ "resource": "" }
q22262
SimCtl.DeviceSettings.disable_keyboard_helpers
train
def disable_keyboard_helpers edit_plist(path.preferences_plist) do |plist| %w[ KeyboardAllowPaddle KeyboardAssistant KeyboardAutocapitalization KeyboardAutocorrection KeyboardCapsLock KeyboardCheckSpelling KeyboardPeriodShortcut KeyboardPrediction KeyboardShowPredictionBar ].each do |key| plist[key] = false end end end
ruby
{ "resource": "" }
q22263
SimCtl.DeviceSettings.set_language
train
def set_language(language) edit_plist(path.global_preferences_plist) do |plist| key = 'AppleLanguages' plist[key] = [] unless plist.key?(key) plist[key].unshift(language).uniq! end end
ruby
{ "resource": "" }
q22264
DICOM.Parent.count_all
train
def count_all # Iterate over all elements, and repeat recursively for all elements which themselves contain children. total_count = count @tags.each_value do |value| total_count += value.count_all if value.children? end return total_count end
ruby
{ "resource": "" }
q22265
DICOM.Parent.delete
train
def delete(tag_or_index, options={}) check_key(tag_or_index, :delete) # We need to delete the specified child element's parent reference in addition to removing it from the tag Hash. element = self[tag_or_index] if element element.parent = nil unless options[:no_follow] @tags.delete(tag_or_index) end end
ruby
{ "resource": "" }
q22266
DICOM.Parent.delete_group
train
def delete_group(group_string) group_elements = group(group_string) group_elements.each do |element| delete(element.tag) end end
ruby
{ "resource": "" }
q22267
DICOM.Parent.encode_children
train
def encode_children(old_endian) # Cycle through all levels of children recursively: children.each do |element| if element.children? element.encode_children(old_endian) elsif element.is_a?(Element) encode_child(element, old_endian) end end end
ruby
{ "resource": "" }
q22268
DICOM.Parent.group
train
def group(group_string) raise ArgumentError, "Expected String, got #{group_string.class}." unless group_string.is_a?(String) found = Array.new children.each do |child| found << child if child.tag.group == group_string.upcase end return found end
ruby
{ "resource": "" }
q22269
DICOM.Parent.handle_print
train
def handle_print(index, max_digits, max_name, max_length, max_generations, visualization, options={}) # FIXME: This method is somewhat complex, and some simplification, if possible, wouldn't hurt. elements = Array.new s = " " hook_symbol = "|_" last_item_symbol = " " nonlast_item_symbol = "| " children.each_with_index do |element, i| n_parents = element.parents.length # Formatting: Index i_s = s*(max_digits-(index).to_s.length) # Formatting: Name (and Tag) if element.tag == ITEM_TAG # Add index numbers to the Item names: name = "#{element.name} (\##{i})" else name = element.name end n_s = s*(max_name-name.length) # Formatting: Tag tag = "#{visualization.join}#{element.tag}" t_s = s*((max_generations-1)*2+9-tag.length) # Formatting: Length l_s = s*(max_length-element.length.to_s.length) # Formatting Value: if element.is_a?(Element) value = element.value.to_s else value = "" end if options[:value_max] value = "#{value[0..(options[:value_max]-3)]}.." if value.length > options[:value_max] end elements << "#{i_s}#{index} #{tag}#{t_s} #{name}#{n_s} #{element.vr} #{l_s}#{element.length} #{value}" index += 1 # If we have child elements, print those elements recursively: if element.children? if n_parents > 1 child_visualization = Array.new child_visualization.replace(visualization) if element == children.first if children.length == 1 # Last item: child_visualization.insert(n_parents-2, last_item_symbol) else # More items follows: child_visualization.insert(n_parents-2, nonlast_item_symbol) end elsif element == children.last # Last item: child_visualization[n_parents-2] = last_item_symbol child_visualization.insert(-1, hook_symbol) else # Neither first nor last (more items follows): child_visualization.insert(n_parents-2, nonlast_item_symbol) end elsif n_parents == 1 child_visualization = Array.new(1, hook_symbol) else child_visualization = Array.new end new_elements, index = element.handle_print(index, max_digits, max_name, max_length, max_generations, child_visualization, options) elements << new_elements end end return elements.flatten, index end
ruby
{ "resource": "" }
q22270
DICOM.Parent.max_lengths
train
def max_lengths max_name = 0 max_length = 0 max_generations = 0 children.each do |element| if element.children? max_nc, max_lc, max_gc = element.max_lengths max_name = max_nc if max_nc > max_name max_length = max_lc if max_lc > max_length max_generations = max_gc if max_gc > max_generations end n_length = element.name.length l_length = element.length.to_s.length generations = element.parents.length max_name = n_length if n_length > max_name max_length = l_length if l_length > max_length max_generations = generations if generations > max_generations end return max_name, max_length, max_generations end
ruby
{ "resource": "" }
q22271
DICOM.Parent.method_missing
train
def method_missing(sym, *args, &block) s = sym.to_s action = s[-1] # Try to match the method against a tag from the dictionary: tag = LIBRARY.as_tag(s) || LIBRARY.as_tag(s[0..-2]) if tag if action == '?' # Query: return self.exists?(tag) elsif action == '=' # Assignment: unless args.length==0 || args[0].nil? # What kind of element to create? if tag == 'FFFE,E000' return self.add_item elsif LIBRARY.element(tag).vr == 'SQ' return self.add(Sequence.new(tag)) else return self.add(Element.new(tag, *args)) end else return self.delete(tag) end else # Retrieval: return self[tag] end end # Forward to Object#method_missing: super end
ruby
{ "resource": "" }
q22272
DICOM.Parent.print
train
def print(options={}) # FIXME: Perhaps a :children => false option would be a good idea (to avoid lengthy printouts in cases where this would be desirable)? # FIXME: Speed. The new print algorithm may seem to be slower than the old one (observed on complex, hiearchical DICOM files). Perhaps it can be optimized? elements = Array.new # We first gather some properties that is necessary to produce a nicely formatted printout (max_lengths, count_all), # then the actual information is gathered (handle_print), # and lastly, we pass this information on to the methods which print the output (print_file or print_screen). if count > 0 max_name, max_length, max_generations = max_lengths max_digits = count_all.to_s.length visualization = Array.new elements, index = handle_print(start_index=1, max_digits, max_name, max_length, max_generations, visualization, options) if options[:file] print_file(elements, options[:file]) else print_screen(elements) end else puts "Notice: Object #{self} is empty (contains no data elements)!" end return elements end
ruby
{ "resource": "" }
q22273
DICOM.Parent.to_hash
train
def to_hash as_hash = Hash.new unless children? if self.is_a?(DObject) as_hash = {} else as_hash[(self.tag.private?) ? self.tag : self.send(DICOM.key_representation)] = nil end else children.each do |child| if child.tag.private? hash_key = child.tag elsif child.is_a?(Item) hash_key = "Item #{child.index}" else hash_key = child.send(DICOM.key_representation) end if child.is_a?(Element) as_hash[hash_key] = child.to_hash[hash_key] else as_hash[hash_key] = child.to_hash end end end return as_hash end
ruby
{ "resource": "" }
q22274
DICOM.Parent.value
train
def value(tag) check_key(tag, :value) if exists?(tag) if self[tag].is_parent? raise ArgumentError, "Illegal parameter '#{tag}'. Parent elements, like the referenced '#{@tags[tag].class}', have no value. Only Element tags are valid." else return self[tag].value end else return nil end end
ruby
{ "resource": "" }
q22275
DICOM.Parent.check_key
train
def check_key(tag_or_index, method) if tag_or_index.is_a?(String) logger.warn("Parent##{method} called with an invalid tag argument: #{tag_or_index}") unless tag_or_index.tag? elsif tag_or_index.is_a?(Integer) logger.warn("Parent##{method} called with a negative Integer argument: #{tag_or_index}") if tag_or_index < 0 else logger.warn("Parent##{method} called with an unexpected argument. Expected String or Integer, got: #{tag_or_index.class}") end end
ruby
{ "resource": "" }
q22276
DICOM.Link.await_release
train
def await_release segments = receive_single_transmission info = segments.first if info[:pdu] != PDU_RELEASE_REQUEST # For some reason we didn't get our expected release request. Determine why: if info[:valid] logger.error("Unexpected message type received (PDU: #{info[:pdu]}). Expected a release request. Closing the connection.") handle_abort(false) else logger.error("Timed out while waiting for a release request. Closing the connection.") end stop_session else # Properly release the association: handle_release end end
ruby
{ "resource": "" }
q22277
DICOM.Link.build_association_request
train
def build_association_request(presentation_contexts, user_info) # Big endian encoding: @outgoing.endian = @net_endian # Clear the outgoing binary string: @outgoing.reset # Note: The order of which these components are built is not arbitrary. # (The first three are built 'in order of appearance', the header is built last, but is put first in the message) append_application_context append_presentation_contexts(presentation_contexts, ITEM_PRESENTATION_CONTEXT_REQUEST, request=true) append_user_information(user_info) # Header must be built last, because we need to know the length of the other components. append_association_header(PDU_ASSOCIATION_REQUEST, @host_ae) end
ruby
{ "resource": "" }
q22278
DICOM.Link.build_command_fragment
train
def build_command_fragment(pdu, context, flags, command_elements) # Little endian encoding: @outgoing.endian = @data_endian # Clear the outgoing binary string: @outgoing.reset # Build the last part first, the Command items: command_elements.each do |element| # Tag (4 bytes) @outgoing.add_last(@outgoing.encode_tag(element[0])) # Encode the value first, so we know its length: value = @outgoing.encode_value(element[2], element[1]) # Length (2 bytes) @outgoing.encode_last(value.length, "US") # Reserved (2 bytes) @outgoing.encode_last("0000", "HEX") # Value (variable length) @outgoing.add_last(value) end # The rest of the command fragment will be buildt in reverse, all the time # putting the elements first in the outgoing binary string. # Group length item: # Value (4 bytes) @outgoing.encode_first(@outgoing.string.length, "UL") # Reserved (2 bytes) @outgoing.encode_first("0000", "HEX") # Length (2 bytes) @outgoing.encode_first(4, "US") # Tag (4 bytes) @outgoing.add_first(@outgoing.encode_tag("0000,0000")) # Big endian encoding from now on: @outgoing.endian = @net_endian # Flags (1 byte) @outgoing.encode_first(flags, "HEX") # Presentation context ID (1 byte) @outgoing.encode_first(context, "BY") # Length (of remaining data) (4 bytes) @outgoing.encode_first(@outgoing.string.length, "UL") # PRESENTATION DATA VALUE (the above) append_header(pdu) end
ruby
{ "resource": "" }
q22279
DICOM.Link.build_data_fragment
train
def build_data_fragment(data_elements, presentation_context_id) # Set the transfer syntax to be used for encoding the data fragment: set_transfer_syntax(@presentation_contexts[presentation_context_id]) # Endianness of data fragment: @outgoing.endian = @data_endian # Clear the outgoing binary string: @outgoing.reset # Build the last part first, the Data items: data_elements.each do |element| # Encode all tags (even tags which are empty): # Tag (4 bytes) @outgoing.add_last(@outgoing.encode_tag(element[0])) # Encode the value in advance of putting it into the message, so we know its length: vr = LIBRARY.element(element[0]).vr value = @outgoing.encode_value(element[1], vr) if @explicit # Type (VR) (2 bytes) @outgoing.encode_last(vr, "STR") # Length (2 bytes) @outgoing.encode_last(value.length, "US") else # Implicit: # Length (4 bytes) @outgoing.encode_last(value.length, "UL") end # Value (variable length) @outgoing.add_last(value) end # The rest of the data fragment will be built in reverse, all the time # putting the elements first in the outgoing binary string. # Big endian encoding from now on: @outgoing.endian = @net_endian # Flags (1 byte) @outgoing.encode_first("02", "HEX") # Data, last fragment (identifier) # Presentation context ID (1 byte) @outgoing.encode_first(presentation_context_id, "BY") # Length (of remaining data) (4 bytes) @outgoing.encode_first(@outgoing.string.length, "UL") # PRESENTATION DATA VALUE (the above) append_header(PDU_DATA) end
ruby
{ "resource": "" }
q22280
DICOM.Link.build_storage_fragment
train
def build_storage_fragment(pdu, context, flags, body) # Big endian encoding: @outgoing.endian = @net_endian # Clear the outgoing binary string: @outgoing.reset # Build in reverse, putting elements in front of the binary string: # Insert the data (body): @outgoing.add_last(body) # Flags (1 byte) @outgoing.encode_first(flags, "HEX") # Context ID (1 byte) @outgoing.encode_first(context, "BY") # PDV Length (of remaining data) (4 bytes) @outgoing.encode_first(@outgoing.string.length, "UL") # PRESENTATION DATA VALUE (the above) append_header(pdu) end
ruby
{ "resource": "" }
q22281
DICOM.Link.forward_to_interpret
train
def forward_to_interpret(message, pdu, file=nil) case pdu when PDU_ASSOCIATION_REQUEST info = interpret_association_request(message) when PDU_ASSOCIATION_ACCEPT info = interpret_association_accept(message) when PDU_ASSOCIATION_REJECT info = interpret_association_reject(message) when PDU_DATA info = interpret_command_and_data(message, file) when PDU_RELEASE_REQUEST info = interpret_release_request(message) when PDU_RELEASE_RESPONSE info = interpret_release_response(message) when PDU_ABORT info = interpret_abort(message) else info = {:valid => false} logger.error("An unknown PDU type was received in the incoming transmission. Can not decode this message. (PDU: #{pdu})") end return info end
ruby
{ "resource": "" }
q22282
DICOM.Link.handle_incoming_data
train
def handle_incoming_data(path) # Wait for incoming data: segments = receive_multiple_transmissions(file=true) # Reset command results arrays: @command_results = Array.new @data_results = Array.new file_transfer_syntaxes = Array.new files = Array.new single_file_data = Array.new # Proceed to extract data from the captured segments: segments.each do |info| if info[:valid] # Determine if it is command or data: if info[:presentation_context_flag] == DATA_MORE_FRAGMENTS @data_results << info[:results] single_file_data << info[:bin] elsif info[:presentation_context_flag] == DATA_LAST_FRAGMENT @data_results << info[:results] single_file_data << info[:bin] # Join the recorded data binary strings together to make a DICOM file binary string and put it in our files Array: files << single_file_data.join single_file_data = Array.new elsif info[:presentation_context_flag] == COMMAND_LAST_FRAGMENT @command_results << info[:results] @presentation_context_id = info[:presentation_context_id] # Does this actually do anything useful? file_transfer_syntaxes << @presentation_contexts[info[:presentation_context_id]] end end end # Process the received files using the customizable FileHandler class: success, messages = @file_handler.receive_files(path, files, file_transfer_syntaxes) return success, messages end
ruby
{ "resource": "" }
q22283
DICOM.Link.handle_response
train
def handle_response # Need to construct the command elements array: command_elements = Array.new # SOP Class UID: command_elements << ["0000,0002", "UI", @command_request["0000,0002"]] # Command Field: command_elements << ["0000,0100", "US", command_field_response(@command_request["0000,0100"])] # Message ID Being Responded To: command_elements << ["0000,0120", "US", @command_request["0000,0110"]] # Data Set Type: command_elements << ["0000,0800", "US", NO_DATA_SET_PRESENT] # Status: command_elements << ["0000,0900", "US", SUCCESS] # Affected SOP Instance UID: command_elements << ["0000,1000", "UI", @command_request["0000,1000"]] if @command_request["0000,1000"] build_command_fragment(PDU_DATA, @presentation_context_id, COMMAND_LAST_FRAGMENT, command_elements) transmit end
ruby
{ "resource": "" }
q22284
DICOM.Link.interpret
train
def interpret(message, file=nil) if @first_part message = @first_part + message @first_part = nil end segments = Array.new # If the message is at least 8 bytes we can start decoding it: if message.length > 8 # Create a new Stream instance to handle this response. msg = Stream.new(message, @net_endian) # PDU type ( 1 byte) pdu = msg.decode(1, "HEX") # Reserved (1 byte) msg.skip(1) # Length of remaining data (4 bytes) specified_length = msg.decode(4, "UL") # Analyze the remaining length of the message versurs the specified_length value: if msg.rest_length > specified_length # If the remaining length of the string itself is bigger than this specified_length value, # then it seems that we have another message appended in our incoming transmission. fragment = msg.extract(specified_length) info = forward_to_interpret(fragment, pdu, file) info[:pdu] = pdu segments << info # It is possible that a fragment contains both a command and a data fragment. If so, we need to make sure we collect all the information: if info[:rest_string] additional_info = forward_to_interpret(info[:rest_string], pdu, file) segments << additional_info end # The information gathered from the interpretation is appended to a segments array, # and in the case of a recursive call some special logic is needed to build this array in the expected fashion. remaining_segments = interpret(msg.rest_string, file) remaining_segments.each do |remaining| segments << remaining end elsif msg.rest_length == specified_length # Proceed to analyze the rest of the message: fragment = msg.extract(specified_length) info = forward_to_interpret(fragment, pdu, file) info[:pdu] = pdu segments << info # It is possible that a fragment contains both a command and a data fragment. If so, we need to make sure we collect all the information: if info[:rest_string] additional_info = forward_to_interpret(info[:rest_string], pdu, file) segments << additional_info end else # Length of the message is less than what is specified in the message. Need to listen for more. This is hopefully handled properly now. #logger.error("Error. The length of the received message (#{msg.rest_length}) is smaller than what it claims (#{specified_length}). Aborting.") @first_part = msg.string end else # Assume that this is only the start of the message, and add it to the next incoming string: @first_part = message end return segments end
ruby
{ "resource": "" }
q22285
DICOM.Link.interpret_abort
train
def interpret_abort(message) info = Hash.new msg = Stream.new(message, @net_endian) # Reserved (2 bytes) reserved_bytes = msg.skip(2) # Source (1 byte) info[:source] = msg.decode(1, "HEX") # Reason/Diag. (1 byte) info[:reason] = msg.decode(1, "HEX") # Analyse the results: process_source(info[:source]) process_reason(info[:reason]) stop_receiving @abort = true info[:valid] = true return info end
ruby
{ "resource": "" }
q22286
DICOM.Link.interpret_association_reject
train
def interpret_association_reject(message) info = Hash.new msg = Stream.new(message, @net_endian) # Reserved (1 byte) msg.skip(1) # Result (1 byte) info[:result] = msg.decode(1, "BY") # 1 for permanent and 2 for transient rejection # Source (1 byte) info[:source] = msg.decode(1, "BY") # Reason (1 byte) info[:reason] = msg.decode(1, "BY") logger.warn("ASSOCIATE Request was rejected by the host. Error codes: Result: #{info[:result]}, Source: #{info[:source]}, Reason: #{info[:reason]} (See DICOM PS3.8: Table 9-21 for details.)") stop_receiving info[:valid] = true return info end
ruby
{ "resource": "" }
q22287
DICOM.Link.interpret_release_request
train
def interpret_release_request(message) info = Hash.new msg = Stream.new(message, @net_endian) # Reserved (4 bytes) reserved_bytes = msg.decode(4, "HEX") handle_release info[:valid] = true return info end
ruby
{ "resource": "" }
q22288
DICOM.Link.interpret_release_response
train
def interpret_release_response(message) info = Hash.new msg = Stream.new(message, @net_endian) # Reserved (4 bytes) reserved_bytes = msg.decode(4, "HEX") stop_receiving info[:valid] = true return info end
ruby
{ "resource": "" }
q22289
DICOM.Link.receive_multiple_transmissions
train
def receive_multiple_transmissions(file=nil) # FIXME: The code which waits for incoming network packets seems to be very CPU intensive. # Perhaps there is a more elegant way to wait for incoming messages? # @listen = true segments = Array.new while @listen # Receive data and append the current data to our segments array, which will be returned. data = receive_transmission(@min_length) current_segments = interpret(data, file) if current_segments current_segments.each do |cs| segments << cs end end end segments << {:valid => false} unless segments return segments end
ruby
{ "resource": "" }
q22290
DICOM.Link.receive_single_transmission
train
def receive_single_transmission min_length = 8 data = receive_transmission(min_length) segments = interpret(data) segments << {:valid => false} unless segments.length > 0 return segments end
ruby
{ "resource": "" }
q22291
DICOM.Link.process_reason
train
def process_reason(reason) case reason when "00" logger.error("Reason specified for abort: Reason not specified") when "01" logger.error("Reason specified for abort: Unrecognized PDU") when "02" logger.error("Reason specified for abort: Unexpected PDU") when "04" logger.error("Reason specified for abort: Unrecognized PDU parameter") when "05" logger.error("Reason specified for abort: Unexpected PDU parameter") when "06" logger.error("Reason specified for abort: Invalid PDU parameter value") else logger.error("Reason specified for abort: Unknown reason (Error code: #{reason})") end end
ruby
{ "resource": "" }
q22292
DICOM.Link.process_result
train
def process_result(result) unless result == 0 # Analyse the result and report what is wrong: case result when 1 logger.warn("DICOM Request was rejected by the host, reason: 'User-rejection'") when 2 logger.warn("DICOM Request was rejected by the host, reason: 'No reason (provider rejection)'") when 3 logger.warn("DICOM Request was rejected by the host, reason: 'Abstract syntax not supported'") when 4 logger.warn("DICOM Request was rejected by the host, reason: 'Transfer syntaxes not supported'") else logger.warn("DICOM Request was rejected by the host, reason: 'UNKNOWN (#{result})' (Illegal reason provided)") end end end
ruby
{ "resource": "" }
q22293
DICOM.Link.receive_transmission
train
def receive_transmission(min_length=0) data = receive_transmission_data # Check the nature of the received data variable: if data # Sometimes the incoming transmission may be broken up into smaller pieces: # Unless a short answer is expected, we will continue to listen if the first answer was too short: unless min_length == 0 if data.length < min_length addition = receive_transmission_data data = data + addition if addition end end else # It seems there was no incoming message and the operation timed out. # Convert the variable to an empty string. data = "" end data end
ruby
{ "resource": "" }
q22294
DICOM.Link.receive_transmission_data
train
def receive_transmission_data data = false response = IO.select([@session], nil, nil, @timeout) if response.nil? logger.error("No answer was received within the specified timeout period. Aborting.") stop_receiving else data = @session.recv(@max_receive_size) end data end
ruby
{ "resource": "" }
q22295
DICOM.Link.set_transfer_syntax
train
def set_transfer_syntax(syntax) @transfer_syntax = syntax # Query the library with our particular transfer syntax string: ts = LIBRARY.uid(@transfer_syntax) @explicit = ts ? ts.explicit? : true @data_endian = ts ? ts.big_endian? : false logger.warn("Invalid/unknown transfer syntax encountered: #{@transfer_syntax} Will try to continue, but errors may occur.") unless ts end
ruby
{ "resource": "" }
q22296
DICOM.Item.bin=
train
def bin=(new_bin) raise ArgumentError, "Invalid parameter type. String was expected, got #{new_bin.class}." unless new_bin.is_a?(String) # Add an empty byte at the end if the length of the binary is odd: if new_bin.length.odd? @bin = new_bin + "\x00" else @bin = new_bin end @value = nil @length = @bin.length end
ruby
{ "resource": "" }
q22297
DICOM.ImageProcessor.decompress
train
def decompress(blobs) raise ArgumentError, "Expected Array or String, got #{blobs.class}." unless [String, Array].include?(blobs.class) blobs = [blobs] unless blobs.is_a?(Array) begin return image_module.decompress(blobs) rescue return false end end
ruby
{ "resource": "" }
q22298
DICOM.ImageProcessor.import_pixels
train
def import_pixels(blob, columns, rows, depth, photometry) raise ArgumentError, "Expected String, got #{blob.class}." unless blob.is_a?(String) image_module.import_pixels(blob, columns, rows, depth, photometry) end
ruby
{ "resource": "" }
q22299
Calabash.Location.coordinates_for_place
train
def coordinates_for_place(place_name) result = Geocoder.search(place_name) if result.empty? raise "No result found for '#{place}'" end {latitude: result.first.latitude, longitude: result.first.longitude} end
ruby
{ "resource": "" }