_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q21600 | ZPNG.Image.save | train | def save fname, options={}
File.open(fname,"wb"){ |f| f << export(options) }
end | ruby | {
"resource": ""
} |
q21601 | ZPNG.Image._safe_inflate | train | def _safe_inflate data
zi = Zlib::Inflate.new
pos = 0; r = ''
begin
# save some memory by not using String#[] when not necessary
r << zi.inflate(pos==0 ? data : data[pos..-1])
if zi.total_in < data.size
@extradata << data[zi.total_in..-1]
puts "[?] #{@extradata.last.size} bytes of extra data after zlib stream".red if @verbose >= 1
end
# decompress OK
rescue Zlib::BufError
# tried to decompress, but got EOF - need more data
puts "[!] #{$!.inspect}".red if @verbose >= -1
# collect any remaining data in decompress buffer
r << zi.flush_next_out
rescue Zlib::DataError
puts "[!] #{$!.inspect}".red if @verbose >= -1
#p [pos, zi.total_in, zi.total_out, data.size, r.size]
r << zi.flush_next_out
# XXX TODO try to skip error and continue
# printf "[d] pos=%d/%d t_in=%d t_out=%d bytes_ok=%d\n".gray, pos, data.size,
# zi.total_in, zi.total_out, r.size
# if pos < zi.total_in
# pos = zi.total_in
# else
# pos += 1
# end
# pos = 0
# retry if pos < data.size
rescue Zlib::NeedDict
puts "[!] #{$!.inspect}".red if @verbose >= -1
# collect any remaining data in decompress buffer
r << zi.flush_next_out
end
r == "" ? nil : r
ensure
zi.close if zi && !zi.closed?
end | ruby | {
"resource": ""
} |
q21602 | ZPNG.Image.crop! | train | def crop! params
decode_all_scanlines
x,y,h,w = (params[:x]||0), (params[:y]||0), params[:height], params[:width]
raise ArgumentError, "negative params not allowed" if [x,y,h,w].any?{ |x| x < 0 }
# adjust crop sizes if they greater than image sizes
h = self.height-y if (y+h) > self.height
w = self.width-x if (x+w) > self.width
raise ArgumentError, "negative params not allowed (p2)" if [x,y,h,w].any?{ |x| x < 0 }
# delete excess scanlines at tail
scanlines[(y+h)..-1] = [] if (y+h) < scanlines.size
# delete excess scanlines at head
scanlines[0,y] = [] if y > 0
# crop remaining scanlines
scanlines.each{ |l| l.crop!(x,w) }
# modify header
hdr.height, hdr.width = h, w
# return self
self
end | ruby | {
"resource": ""
} |
q21603 | ZPNG.Image.deinterlace | train | def deinterlace
return self unless interlaced?
# copy all but 'interlace' header params
h = Hash[*%w'width height depth color compression filter'.map{ |k| [k.to_sym, hdr.send(k)] }.flatten]
# don't auto-add palette chunk
h[:palette] = nil
# create new img
new_img = self.class.new h
# copy all but hdr/imagedata/end chunks
chunks.each do |chunk|
next if chunk.is_a?(Chunk::IHDR)
next if chunk.is_a?(Chunk::IDAT)
next if chunk.is_a?(Chunk::IEND)
new_img.chunks << chunk.deep_copy
end
# pixel-by-pixel copy
each_pixel do |c,x,y|
new_img[x,y] = c
end
new_img
end | ruby | {
"resource": ""
} |
q21604 | ZPNG.Color.to_ansi | train | def to_ansi
return to_depth(8).to_ansi if depth != 8
a = ANSI_COLORS.map{|c| self.class.const_get(c.to_s.upcase) }
a.map!{ |c| self.euclidian(c) }
ANSI_COLORS[a.index(a.min)]
end | ruby | {
"resource": ""
} |
q21605 | ZPNG.Color.to_depth | train | def to_depth new_depth
return self if depth == new_depth
color = Color.new :depth => new_depth
if new_depth > self.depth
%w'r g b a'.each do |part|
color.send("#{part}=", (2**new_depth-1)/(2**depth-1)*self.send(part))
end
else
# new_depth < self.depth
%w'r g b a'.each do |part|
color.send("#{part}=", self.send(part)>>(self.depth-new_depth))
end
end
color
end | ruby | {
"resource": ""
} |
q21606 | ZPNG.Color.op | train | def op op, c=nil
# XXX what to do with alpha?
max = 2**depth-1
if c
c = c.to_depth(depth)
Color.new(
@r.send(op, c.r) & max,
@g.send(op, c.g) & max,
@b.send(op, c.b) & max,
:depth => self.depth
)
else
Color.new(
@r.send(op) & max,
@g.send(op) & max,
@b.send(op) & max,
:depth => self.depth
)
end
end | ruby | {
"resource": ""
} |
q21607 | SODA.Client.handle_response | train | def handle_response(response)
# Check our response code
check_response_fail(response)
return nil if blank?(response.body)
# Return a bunch of mashes as the body if we're JSON
begin
response.body = JSON.parse(response.body, max_nesting: false)
response.body = if response.body.is_a? Array
response.body.map { |r| Hashie::Mash.new(r) }
else
Hashie::Mash.new(response.body)
end
rescue => exception
raise "JSON parsing failed. Error details: #{exception}"
ensure
return response
end
end | ruby | {
"resource": ""
} |
q21608 | Hashie.Rash.underscore_string | train | def underscore_string(str)
str.to_s.strip.
gsub(' ', '_').
gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
tr("-", "_").
squeeze("_").
downcase
end | ruby | {
"resource": ""
} |
q21609 | NFC.Reader.discover | train | def discover(*card_types)
# TODO: по правильному здесь надо делать низкоуровневый
card_types.inject([]) do |tags, card_type|
raise NFC::Error.new('Wrong card type') unless card_type.respond_to? :discover
tags += card_type.discover(connect)
end
end | ruby | {
"resource": ""
} |
q21610 | Quorum.JobsController.search | train | def search
data = Job.search(params)
# Respond with :json, :txt (tab delimited Blast results), or GFF3.
respond_with data.flatten!(1) do |format|
format.json {
render :json => Quorum::JobSerializer.as_json(data)
}
format.gff {
render :text => Quorum::JobSerializer.as_gff(data)
}
format.txt {
render :text => Quorum::JobSerializer.as_txt(data)
}
end
end | ruby | {
"resource": ""
} |
q21611 | Quorum.JobsController.build_blast_jobs | train | def build_blast_jobs
@job ||= Job.new
@job.build_blastn_job if @job.blastn_job.nil?
@job.build_blastx_job if @job.blastx_job.nil?
@job.build_tblastn_job if @job.tblastn_job.nil?
@job.build_blastp_job if @job.blastp_job.nil?
end | ruby | {
"resource": ""
} |
q21612 | Quorum.BuildBlastDB.create_file_name | train | def create_file_name(file, base_dir)
file_name = file.split("/").delete_if { |f| f.include?(".") }.first
unless File.exists?(File.join(base_dir, file_name))
Dir.mkdir(File.join(base_dir, file_name))
end
file_name
end | ruby | {
"resource": ""
} |
q21613 | Quorum.BuildBlastDB.extract_files | train | def extract_files(src, file, flag, path)
extract_data_error = File.join(@log_dir, "extract_data_error.log")
cmd = "tar -x#{flag}Of #{src} #{file} >> #{path} 2>> " <<
"#{extract_data_error}"
system(cmd)
if $?.exitstatus > 0
raise "Data extraction error. " <<
"See #{extract_data_error} for details."
end
end | ruby | {
"resource": ""
} |
q21614 | Quorum.BuildBlastDB.execute_makeblastdb | train | def execute_makeblastdb(type, title, input)
@output.puts "Executing makeblastdb for #{title} dbtype #{type}..."
makeblast_log = File.join(@log_dir, "makeblastdb.log")
output = File.dirname(input)
cmd = "makeblastdb " <<
"-dbtype #{type} " <<
"-title #{title} " <<
"-in #{input} " <<
"-out #{output} " <<
"-hash_index >> #{makeblast_log}"
system(cmd)
if $?.exitstatus > 0
raise "makeblastdb error. " <<
"See #{makeblast_log} for details."
end
end | ruby | {
"resource": ""
} |
q21615 | Quorum.BuildBlastDB.build_blast_db | train | def build_blast_db(blastdb)
Dir.glob(File.expand_path(blastdb) + "/*").each do |d|
if File.directory?(d)
contigs = File.join(d, "contigs.fa")
peptides = File.join(d, "peptides.fa")
found = false
if File.exists?(contigs) && File.readable?(contigs)
execute_makeblastdb("nucl", d, contigs)
found = true
end
if File.exists?(peptides) && File.readable?(peptides)
execute_makeblastdb("prot", d, peptides)
found = true
end
unless found
raise "Extracted data not found for #{contigs} or #{peptides}. " <<
"Make sure you supplied the correct data directory and file names."
end
end
end
end | ruby | {
"resource": ""
} |
q21616 | Quorum.Logger.log | train | def log(program, message, exit_status = nil, files = nil)
File.open(File.join(@log_directory, @log_file), "a") do |log|
log.puts ""
log.puts Time.now.to_s + " " + program
log.puts message
log.puts ""
end
if exit_status
remove_files(files) unless files.nil?
exit exit_status.to_i
end
end | ruby | {
"resource": ""
} |
q21617 | Quorum.Job.algorithm_selected | train | def algorithm_selected
in_queue = false
if (self.blastn_job && self.blastn_job.queue) ||
(self.blastx_job && self.blastx_job.queue) ||
(self.tblastn_job && self.tblastn_job.queue) ||
(self.blastp_job && self.blastp_job.queue)
in_queue = true
end
unless in_queue
errors.add(
:algorithm,
" - Please select at least one algorithm to continue."
)
end
end | ruby | {
"resource": ""
} |
q21618 | SaucelabsAdapter.Utilities.start_mongrel | train | def start_mongrel(suite_name = {})
pid_file = File.join(RAILS_ROOT, "tmp", "pids", "mongrel_selenium.pid")
port = suite_name[:port] rescue @selenium_config.application_port
say "Starting mongrel at #{pid_file}, port #{port}"
system "mongrel_rails start -d --chdir='#{RAILS_ROOT}' --port=#{port} --environment=test --pid #{pid_file} %"
end | ruby | {
"resource": ""
} |
q21619 | Quorum.Sequence.create_hash | train | def create_hash(sequence)
Digest::MD5.hexdigest(sequence).to_s + "-" + Time.now.to_f.to_s
end | ruby | {
"resource": ""
} |
q21620 | Quorum.Sequence.write_input_sequence_to_file | train | def write_input_sequence_to_file(tmp_dir, hash, sequence)
seq = File.join(tmp_dir, hash + ".seq")
File.open(seq, "w") do |f|
f << sequence
end
fasta = File.join(tmp_dir, hash + ".fa")
# Force FASTA format.
cmd = "seqret -filter -sformat pearson -osformat fasta < #{seq} " <<
"> #{fasta} 2> /dev/null"
system(cmd)
if $?.exitstatus > 0
raise " - Please enter your sequence(s) in Plain Text as " <<
"FASTA."
end
end | ruby | {
"resource": ""
} |
q21621 | Quorum.Helpers.set_flash_message | train | def set_flash_message(key, kind, options = {})
options[:scope] = "quorum.#{controller_name}"
options[:scope] << ".errors" if key.to_s == "error"
options[:scope] << ".notices" if key.to_s == "notice"
options[:scope] << ".alerts" if key.to_s == "alert"
message = I18n.t("#{kind}", options)
flash[key] = message if message.present?
end | ruby | {
"resource": ""
} |
q21622 | Quorum.BlastxJob.gap_opening_extension= | train | def gap_opening_extension=(value)
v = value.split(',')
self.gap_opening_penalty = v.first
self.gap_extension_penalty = v.last
end | ruby | {
"resource": ""
} |
q21623 | FitbitAPI.Client.deep_transform_keys! | train | def deep_transform_keys!(object, &block)
case object
when Hash
object.keys.each do |key|
value = object.delete(key)
object[yield(key)] = deep_transform_keys!(value) { |key| yield(key) }
end
object
when Array
object.map! { |e| deep_transform_keys!(e) { |key| yield(key) } }
else
object
end
end | ruby | {
"resource": ""
} |
q21624 | CrateRuby.Client.execute | train | def execute(sql, args = nil, bulk_args = nil, http_options = {})
@logger.debug sql
req = Net::HTTP::Post.new('/_sql', headers)
body = { 'stmt' => sql }
body['args'] = args if args
body['bulk_args'] = bulk_args if bulk_args
req.body = body.to_json
response = request(req, http_options)
@logger.debug response.body
case response.code
when /^2\d{2}/
ResultSet.new response.body
else
@logger.info(response.body)
raise CrateRuby::CrateError, response.body
end
end | ruby | {
"resource": ""
} |
q21625 | CrateRuby.Client.blob_put | train | def blob_put(table, digest, data)
uri = blob_path(table, digest)
@logger.debug("BLOB PUT #{uri}")
req = Net::HTTP::Put.new(blob_path(table, digest), headers)
req.body = data
response = request(req)
case response.code
when '201'
true
else
@logger.info("Response #{response.code}: " + response.body)
false
end
end | ruby | {
"resource": ""
} |
q21626 | RubyRabbitmqJanus.RRJAdmin.start_transaction_admin | train | def start_transaction_admin(options = {})
transaction = Janus::Transactions::Admin.new(options)
transaction.connect { yield(transaction) }
rescue
raise Errors::RRJAdmin::StartTransactionAdmin, options
end | ruby | {
"resource": ""
} |
q21627 | Rake::Pipeline::Web::Filters.CoffeeScriptFilter.generate_output | train | def generate_output(inputs, output)
inputs.each do |input|
begin
output.write CoffeeScript.compile(input, options)
rescue ExecJS::Error => error
raise error, "Error compiling #{input.path}. #{error.message}"
end
end
end | ruby | {
"resource": ""
} |
q21628 | RubyRabbitmqJanus.RRJ.start_transaction | train | def start_transaction(exclusive = true, options = {})
session = @option.use_current_session?(options)
transaction = Janus::Transactions::Session.new(exclusive, session)
transaction.connect { yield(transaction) }
rescue
raise Errors::RRJ::StartTransaction.new(exclusive, options)
end | ruby | {
"resource": ""
} |
q21629 | Rake::Pipeline::Web::Filters.ES6ModuleFilter.generate_output | train | def generate_output(inputs, output)
inputs.each do |input|
begin
body = input.read if input.respond_to?(:read)
local_opts = {}
if @module_id_generator
local_opts[:moduleName] = @module_id_generator.call(input)
end
opts = @options.merge(local_opts)
opts.delete(:module_id_generator)
output.write RubyES6ModuleTranspiler.transpile(body, opts)
rescue ExecJS::Error => error
raise error, "Error compiling #{input.path}. #{error.message}"
end
end
end | ruby | {
"resource": ""
} |
q21630 | RubyRabbitmqJanus.RRJTask.start_transaction_handle | train | def start_transaction_handle(exclusive = true, options = {})
janus = session_instance(options)
handle = 0 # Create always a new handle
transaction = Janus::Transactions::Handle.new(exclusive,
janus.session,
handle,
janus.instance)
transaction.connect { yield(transaction) }
rescue
raise Errors::RRJTask::StartTransactionHandle.new(exclusive, options)
end | ruby | {
"resource": ""
} |
q21631 | RubyRabbitmqJanus.ActionEvents.actions | train | def actions
lambda do |reason, data|
Rails.logger.debug "Execute block code with reason : #{reason}"
case reason
when event then case_events(data.to_hash)
end
end
end | ruby | {
"resource": ""
} |
q21632 | Railsdav.Renderer.response | train | def response(options = {})
elements = options.slice(:error)
render do
response_for options[:href] do |dav|
elements.each do |name, value|
status_for options[:status]
dav.__send__ name, value
end
end
end
end | ruby | {
"resource": ""
} |
q21633 | NoCms::Blocks.Block.duplicate_self | train | def duplicate_self new_self
new_self.translations = translations.map(&:dup)
new_self.translations.each { |t| t.globalized_model = new_self }
children.each do |child|
new_self.children << child.dup
end
end | ruby | {
"resource": ""
} |
q21634 | Degu.HasSet.has_set_coerce_argument_value | train | def has_set_coerce_argument_value(enum_class, argument_value)
invalid_set_elements = []
set_elements =
if String === argument_value
argument_value.split(',').map(&:strip)
else
Array(argument_value)
end.map do |set_element|
if result = enum_class[set_element]
result
else
invalid_set_elements << set_element
nil
end
end
invalid_set_elements.empty? or
raise ArgumentError, "element #{argument_value.inspect} contains invalid elements: #{invalid_set_elements.inspect}"
set_elements
end | ruby | {
"resource": ""
} |
q21635 | Hijacker.RedisKeys.unresponsive_dbhost_count | train | def unresponsive_dbhost_count(db_host)
begin
count = $hijacker_redis.hget( redis_keys(:unresponsive_dbhosts), db_host) unless db_host.nil?
(count or 0).to_i
rescue
0
end
end | ruby | {
"resource": ""
} |
q21636 | DataStructures.LinkedList.to_a | train | def to_a
current = @first
array = []
while !current.nil?
array << current.data
current = current.next
end
array
end | ruby | {
"resource": ""
} |
q21637 | TriplestoreAdapter::Providers.Blazegraph.delete | train | def delete(statements)
raise(TriplestoreAdapter::TriplestoreException, "delete received invalid array of statements") unless statements.any?
#TODO: Evaluate that all statements are singular, and without bnodes?
writer = RDF::Writer.for(:jsonld)
uri = URI.parse("#{@uri}?delete")
request = Net::HTTP::Post.new(uri)
request['Content-Type'] = 'application/ld+json'
request.body = writer.dump(statements)
@http.request(uri, request)
return true
end | ruby | {
"resource": ""
} |
q21638 | TriplestoreAdapter::Providers.Blazegraph.get_statements | train | def get_statements(subject: nil)
raise(TriplestoreAdapter::TriplestoreException, "get_statements received blank subject") if subject.empty?
subject = URI.escape(subject.to_s)
uri = URI.parse(format("%{uri}?GETSTMTS&s=<%{subject}>&includeInferred=false", {uri: @uri, subject: subject}))
request = Net::HTTP::Get.new(uri)
response = @http.request(uri, request)
RDF::Reader.for(:ntriples).new(response.body)
end | ruby | {
"resource": ""
} |
q21639 | TriplestoreAdapter::Providers.Blazegraph.build_namespace | train | def build_namespace(namespace)
raise(TriplestoreAdapter::TriplestoreException, "build_namespace received blank namespace") if namespace.empty?
request = Net::HTTP::Post.new("#{build_url}/blazegraph/namespace")
request['Content-Type'] = 'text/plain'
request.body = "com.bigdata.rdf.sail.namespace=#{namespace}"
@http.request(@uri, request)
"#{build_url}/blazegraph/namespace/#{namespace}/sparql"
end | ruby | {
"resource": ""
} |
q21640 | TriplestoreAdapter.Triplestore.store | train | def store(graph)
begin
statements = graph.each_statement.to_a
@client.insert(statements)
graph
rescue => e
raise TriplestoreAdapter::TriplestoreException, "store graph in triplestore cache failed with exception: #{e.message}"
end
end | ruby | {
"resource": ""
} |
q21641 | TriplestoreAdapter.Triplestore.delete | train | def delete(rdf_url)
begin
graph = fetch_cached_graph(rdf_url)
puts "[INFO] did not delete #{rdf_url}, it doesn't exist in the triplestore cache" if graph.nil?
return true if graph.nil?
statements = graph.each_statement.to_a
@client.delete(statements)
return true
rescue => e
raise TriplestoreAdapter::TriplestoreException, "delete #{rdf_url} from triplestore cache failed with exception: #{e.message}"
end
end | ruby | {
"resource": ""
} |
q21642 | TriplestoreAdapter.Triplestore.fetch_cached_graph | train | def fetch_cached_graph(rdf_url)
statements = @client.get_statements(subject: rdf_url.to_s)
if statements.count == 0
puts "[INFO] fetch_cached_graph(#{rdf_url.to_s}) not found in triplestore cache (#{@client.url})"
return nil
end
RDF::Graph.new.insert(*statements)
end | ruby | {
"resource": ""
} |
q21643 | TriplestoreAdapter.Triplestore.fetch_and_cache_graph | train | def fetch_and_cache_graph(rdf_url)
begin
graph = RDF::Graph.load(rdf_url)
store(graph)
graph
rescue TriplestoreAdapter::TriplestoreException => tse
puts "[ERROR] *****\n[ERROR] Unable to store graph in triplestore cache! Returning graph fetched from source.\n[ERROR] *****\n#{tse.message}"
graph
rescue => e
raise TriplestoreAdapter::TriplestoreException, "fetch_and_cache_graph(#{rdf_url}) failed to load the graph with exception: #{e.message}"
end
end | ruby | {
"resource": ""
} |
q21644 | TriplestoreAdapter.Client.get_statements | train | def get_statements(subject: nil)
raise TriplestoreAdapter::TriplestoreException.new("#{@provider.class.name} missing get_statements method.") unless @provider.respond_to?(:get_statements)
@provider.get_statements(subject: subject)
end | ruby | {
"resource": ""
} |
q21645 | CSVParty.Validations.raise_unless_all_named_parsers_exist! | train | def raise_unless_all_named_parsers_exist!
config.columns_with_named_parsers.each do |name, options|
parser = options[:parser]
next if named_parsers.include? parser
raise UnknownParserError.new(name, parser, named_parsers)
end
end | ruby | {
"resource": ""
} |
q21646 | TTY.Which.which | train | def which(cmd, paths: search_paths)
if file_with_path?(cmd)
return cmd if executable_file?(cmd)
extensions.each do |ext|
exe = ::File.join(cmd, ext)
return ::File.absolute_path(exe) if executable_file?(exe)
end
return nil
end
paths.each do |path|
if file_with_exec_ext?(cmd)
exe = ::File.join(path, cmd)
return ::File.absolute_path(exe) if executable_file?(exe)
end
extensions.each do |ext|
exe = ::File.join(path, "#{cmd}#{ext}")
return ::File.absolute_path(exe) if executable_file?(exe)
end
end
nil
end | ruby | {
"resource": ""
} |
q21647 | TTY.Which.search_paths | train | def search_paths(path = ENV['PATH'])
paths = if path && !path.empty?
path.split(::File::PATH_SEPARATOR)
else
%w(/usr/local/bin /usr/ucb /usr/bin /bin)
end
paths.select(&Dir.method(:exist?))
end | ruby | {
"resource": ""
} |
q21648 | TTY.Which.extensions | train | def extensions(path_ext = ENV['PATHEXT'])
return [''] unless path_ext
path_ext.split(::File::PATH_SEPARATOR).select { |part| part.include?('.') }
end | ruby | {
"resource": ""
} |
q21649 | TTY.Which.executable_file? | train | def executable_file?(filename, dir = nil)
path = ::File.join(dir, filename) if dir
path ||= filename
::File.file?(path) && ::File.executable?(path)
end | ruby | {
"resource": ""
} |
q21650 | TTY.Which.file_with_exec_ext? | train | def file_with_exec_ext?(filename)
extension = ::File.extname(filename)
return false if extension.empty?
extensions.any? { |ext| extension.casecmp(ext).zero? }
end | ruby | {
"resource": ""
} |
q21651 | Nickel.ConstructFinder.found_now_through_following_dayname | train | def found_now_through_following_dayname
@constructs << DateSpanConstruct.new(start_date: @curdate, end_date: @curdate.this(@day_index), comp_start: @pos, comp_end: @pos += 3, found_in: __method__)
end | ruby | {
"resource": ""
} |
q21652 | Nickel.NLP.correct_case | train | def correct_case
orig = @query.split
latest = @message.split
orig.each_with_index do |original_word, j|
if i = latest.index(original_word.downcase)
latest[i] = original_word
end
end
@message = latest.join(' ')
end | ruby | {
"resource": ""
} |
q21653 | Nickel.ZDate.ordinal_dayindex | train | def ordinal_dayindex(num, day_index)
# create a date object at the first occurrence of day_index
first_occ_date = ZDate.new(ZDate.format_date(year_str, month_str)).this(day_index)
# if num is 1 through 4, we can just add (num-1) weeks
if num <= 4
d = first_occ_date.add_weeks(num - 1)
else
# we want the last occurrence of this month
# add 4 weeks to first occurrence, see if we are in the same month, subtract 1 week if we are not
d = first_occ_date.add_weeks(4)
if d.month != month
d = d.sub_weeks(1)
end
end
d
end | ruby | {
"resource": ""
} |
q21654 | Nickel.ZDate.x_weeks_from_day | train | def x_weeks_from_day(weeks_away, day2index)
day1index = dayindex
if day1index > day2index
days_away = 7 * (weeks_away + 1) - (day1index - day2index)
elsif day1index < day2index
days_away = (weeks_away * 7) + (day2index - day1index)
elsif day1index == day2index
days_away = 7 * weeks_away
end
add_days(days_away) # returns a new date object
end | ruby | {
"resource": ""
} |
q21655 | Nickel.ZDate.add_days | train | def add_days(number)
if number < 0
return sub_days(number.abs)
end
o = dup # new ZDate object
# Let's see what month we are going to end in
while number > 0
if o.days_left_in_month >= number
o.date = ZDate.format_date(o.year_str, o.month_str, o.day + number)
number = 0
else
number = number - 1 - o.days_left_in_month # it costs 1 day to increment the month
o.increment_month!
end
end
o
end | ruby | {
"resource": ""
} |
q21656 | Nickel.ZDate.sub_days | train | def sub_days(number)
o = dup
while number > 0
if (o.day - 1) >= number
o.date = ZDate.format_date(o.year_str, o.month_str, o.day - number)
number = 0
else
number -= o.day
o.decrement_month!
end
end
o
end | ruby | {
"resource": ""
} |
q21657 | Nickel.ZDate.diff_in_days | train | def diff_in_days(date_to_compare)
# d1 will be the earlier date, d2 the later
if date_to_compare > self
d1, d2 = dup, date_to_compare.dup
elsif self > date_to_compare
d1, d2 = date_to_compare.dup, dup
else
return 0 # same date
end
total = 0
while d1.year != d2.year
total += d1.days_left_in_year + 1 # need one extra day to push us to jan 1
d1 = ZDate.new(ZDate.format_date(d1.year + 1))
end
total += d2.day_of_year - d1.day_of_year
total
end | ruby | {
"resource": ""
} |
q21658 | Nickel.ZDate.diff_in_months | train | def diff_in_months(date2)
if date2 > self
ZDate.diff_in_months(month, year, date2.month, date2.year)
else
ZDate.diff_in_months(date2.month, date2.year, month, year) * -1
end
end | ruby | {
"resource": ""
} |
q21659 | Nickel.ZDate.increment_month! | train | def increment_month!
if month != 12
# just bump up a number
self.date = ZDate.format_date(year_str, month + 1)
else
self.date = ZDate.format_date(year + 1)
end
end | ruby | {
"resource": ""
} |
q21660 | Nickel.ZTime.modify_such_that_is_before | train | def modify_such_that_is_before(time2)
fail 'ZTime#modify_such_that_is_before says: trying to modify time that has @firm set' if @firm
fail 'ZTime#modify_such_that_is_before says: time2 does not have @firm set' unless time2.firm
# self cannot have @firm set, so all hours will be between 1 and 12
# time2 is an end time, self could be its current setting, or off by 12 hours
# self to time2 --> self to time2
# 12 to 2am --> 1200 to 0200
# 12 to 12am --> 1200 to 0000
# 1220 to 12am --> 1220 to 0000
# 11 to 2am or 1100 to 0200
if self > time2
if hour == 12 && time2.hour == 0
# do nothing
else
hour == 12 ? change_hour_to(0) : change_hour_to(hour + 12)
end
elsif self < time2
if time2.hour >= 12 && ZTime.new(ZTime.format_time(time2.hour - 12, time2.min_str, time2.sec_str)) > self
# 4 to 5pm or 0400 to 1700
change_hour_to(hour + 12)
else
# 4 to 1pm or 0400 to 1300
# do nothing
end
else
# the times are equal, and self can only be between 0100 and 1200, so move self forward 12 hours, unless hour is 12
hour == 12 ? change_hour_to(0) : change_hour_to(hour + 12)
end
self.firm = true
self
end | ruby | {
"resource": ""
} |
q21661 | ActiveStorage.SendZip.send_zip | train | def send_zip(active_storages, filename: 'my.zip')
require 'zip'
files = SendZipHelper.save_files_on_server active_storages
zip_data = SendZipHelper.create_temporary_zip_file files
send_data(zip_data, type: 'application/zip', filename: filename)
end | ruby | {
"resource": ""
} |
q21662 | ContributionChecker.Checker.check | train | def check
@nwo, @sha = parse_commit_url @commit_url
begin
@commit = @client.commit @nwo, @sha
rescue ArgumentError
raise ContributionChecker::InvalidCommitUrlError
rescue Octokit::NotFound
raise ContributionChecker::InvalidCommitUrlError
rescue Octokit::Unauthorized
raise ContributionChecker::InvalidAccessTokenError
end
@repo = @client.repository @nwo
@user = @client.user
@commit_email_is_not_generic = commit_email_is_not_generic?
@commit_in_valid_branch = commit_in_valid_branch?
@repo_not_a_fork = !repository_is_fork?
@commit_email_linked_to_user = commit_email_linked_to_user?
@user_has_starred_repo = user_has_starred_repo?
@user_can_push_to_repo = user_can_push_to_repo?
@user_is_repo_org_member = user_is_repo_org_member?
@user_has_fork_of_repo = user_has_fork_of_repo?
@user_has_opened_issue_or_pr_in_repo = user_has_opened_issue_or_pr_in_repo?
{
:contribution => and_criteria_met? && or_criteria_met?,
:and_criteria => {
:commit_email_is_not_generic => @commit_email_is_not_generic,
:commit_in_valid_branch => @commit_in_valid_branch,
:repo_not_a_fork => @repo_not_a_fork,
:commit_email_linked_to_user => @commit_email_linked_to_user,
:commit_email => @commit[:commit][:author][:email],
:default_branch => @repo[:default_branch],
},
:or_criteria => {
:user_has_starred_repo => @user_has_starred_repo,
:user_can_push_to_repo => @user_can_push_to_repo,
:user_is_repo_org_member => @user_is_repo_org_member,
:user_has_fork_of_repo => @user_has_fork_of_repo,
:user_has_opened_issue_or_pr_in_repo => @user_has_opened_issue_or_pr_in_repo,
}
}
end | ruby | {
"resource": ""
} |
q21663 | ContributionChecker.Checker.parse_commit_url | train | def parse_commit_url(url)
begin
parts = URI.parse(@commit_url).path.split("/")
nwo = "#{parts[1]}/#{parts[2]}"
sha = parts[4]
return nwo, sha
rescue
raise ContributionChecker::InvalidCommitUrlError
end
end | ruby | {
"resource": ""
} |
q21664 | ContributionChecker.Checker.user_has_fork_of_repo? | train | def user_has_fork_of_repo?
# The API doesn't provide a simple means of checking whether a user has
# forked a repository.
# First, if there are no forks for the repository, return false.
return false if @repo[:forks_count] == 0
# Then check whether it's worth getting the list of forks
if @repo[:forks_count] <= 100
repo_forks = @client.forks @repo[:full_name], :per_page => 100
repo_forks.each do |f|
return true if f[:owner][:login] == @user[:login]
end
end
# Then try to directly find a repository with the same name as the
# repository in which the commit exists.
potential_fork_nwo = "#{@user[:login]}/#{@repo[:name]}"
begin
potential_fork = @client.repository potential_fork_nwo
if potential_fork[:fork]
return true if potential_fork[:parent][:full_name] == @repo[:full_name]
end
rescue Octokit::NotFound
# Keep going...
end
# Otherwise, get the user's forks and check the `parent` field of each
# fork to see whether it matches @repo.
@client.auto_paginate = true
@user_repos = @client.repos
@user_forks = @user_repos.select { |r| r[:fork] }
@user_forks.each do |f|
r = @client.repository f[:full_name]
if r[:parent][:full_name] == @repo[:full_name]
@client.auto_paginate = false
return true
end
end
@client.auto_paginate = false
false
end | ruby | {
"resource": ""
} |
q21665 | Phidgets.Encoder.on_input_change | train | def on_input_change(obj=nil, &block)
@on_input_change_obj = obj
@on_input_change = Proc.new { |device, obj_ptr, index, state|
yield self, @inputs[index], (state == 0 ? false : true), object_for(obj_ptr)
}
Klass.set_OnInputChange_Handler(@handle, @on_input_change, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21666 | DynamicImage.ImageSizing.crop_geometry | train | def crop_geometry(ratio_vector)
# Maximize the crop area to fit the image size
crop_size = ratio_vector.fit(size).round
# Ignore pixels outside the pre-cropped area for now
center = crop_gravity - crop_start
start = center - (crop_size / 2).floor
start = clamp(start, crop_size, size)
[crop_size, (start + crop_start)]
end | ruby | {
"resource": ""
} |
q21667 | DynamicImage.ImageSizing.fit | train | def fit(fit_size, options = {})
fit_size = parse_vector(fit_size)
require_dimensions!(fit_size) if options[:crop]
fit_size = size.fit(fit_size) unless options[:crop]
fit_size = size.contain(fit_size) unless options[:upscale]
fit_size
end | ruby | {
"resource": ""
} |
q21668 | DynamicImage.ImageSizing.clamp | train | def clamp(start, size, max_size)
start += shift_vector(start)
start -= shift_vector(max_size - (start + size))
start
end | ruby | {
"resource": ""
} |
q21669 | Phidgets.FrequencyCounter.on_count | train | def on_count(obj=nil, &block)
@on_count_obj = obj
@on_count = Proc.new { |device, obj_ptr, index, time, counts|
yield self, @inputs[index], time, counts, object_for(obj_ptr)
}
Klass.set_OnCount_Handler(@handle, @on_count, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21670 | Phidgets.Common.create | train | def create
ptr = ::FFI::MemoryPointer.new(:pointer, 1)
self.class::Klass.create(ptr)
@handle = ptr.get_pointer(0)
true
end | ruby | {
"resource": ""
} |
q21671 | Phidgets.Common.close | train | def close
remove_common_event_handlers
remove_specific_event_handlers
sleep 0.2
Phidgets::FFI::Common.close(@handle)
delete
true
end | ruby | {
"resource": ""
} |
q21672 | Phidgets.Common.on_attach | train | def on_attach(obj=nil, &block)
@on_attach_obj = obj
@on_attach = Proc.new { |handle, obj_ptr|
load_device_attributes
yield self, object_for(obj_ptr)
}
Phidgets::FFI::Common.set_OnAttach_Handler(@handle, @on_attach, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21673 | Phidgets.Common.on_detach | train | def on_detach(obj=nil, &block)
@on_detach_obj = obj
@on_detach = Proc.new { |handle, obj_ptr|
yield self, object_for(obj_ptr)
}
Phidgets::FFI::Common.set_OnDetach_Handler(@handle, @on_detach, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21674 | Phidgets.Common.on_error | train | def on_error(obj=nil, &block)
@on_error_obj = obj
@on_error = Proc.new { |handle, obj_ptr, code, description|
yield self, object_for(obj_ptr), code, description
}
Phidgets::FFI::Common.set_OnError_Handler(@handle, @on_error, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21675 | Phidgets.Common.on_server_connect | train | def on_server_connect(obj=nil, &block)
@on_server_connect_obj = obj
@on_server_connect = Proc.new { |handle, obj_ptr|
yield self, object_for(obj_ptr)
}
Phidgets::FFI::Common.set_OnServerConnect_Handler(@handle, @on_server_connect, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21676 | Phidgets.Common.on_sleep | train | def on_sleep(obj=nil, &block)
@on_sleep_obj = obj
@on_sleep = Proc.new { |obj_ptr|
yield object_for(obj_ptr)
}
Phidgets::FFI::Common.set_OnWillSleep_Handler(@on_sleep, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21677 | Phidgets.Common.on_wake | train | def on_wake(obj=nil, &block)
@on_wake_obj = obj
@on_wake = Proc.new { |obj_ptr|
yield object_for(obj_ptr)
}
Phidgets::FFI::Common.set_OnWakeup_Handler(@on_wake, pointer_for(obj))
true
end | ruby | {
"resource": ""
} |
q21678 | DynamicImage.Routing.image_resources | train | def image_resources(resource_name, options = {})
options = {
path: "#{resource_name}/:digest(/:size)",
constraints: { size: /\d+x\d+/ },
only: [:show]
}.merge(options)
resources resource_name, options do
get :uncropped, on: :member
get :original, on: :member
get :download, on: :member
end
end | ruby | {
"resource": ""
} |
q21679 | Phidgets.Manager.on_attach | train | def on_attach(obj=nil, &block)
@on_attach_obj = obj
@on_attach = Proc.new { |handle, obj_ptr|
yield handle, object_for(obj_ptr)
}
Klass.set_OnAttach_Handler(@handle, @on_attach, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21680 | Phidgets.Manager.on_detach | train | def on_detach(obj=nil, &block)
@on_detach_obj = obj
@on_detach = Proc.new { |handle, obj_ptr|
yield handle, object_for(obj_ptr)
}
Klass.set_OnDetach_Handler(@handle, @on_detach, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21681 | Phidgets.Manager.on_server_connect | train | def on_server_connect(obj=nil, &block)
@on_server_connect_obj = obj
@on_server_connect = Proc.new { |handle, obj_ptr|
yield self, object_for(obj_pointer)
}
Klass.set_OnServerConnect_Handler(@handle, @on_server_connect, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21682 | Phidgets.Manager.on_server_disconnect | train | def on_server_disconnect(obj=nil, &block)
@on_server_disconnect_obj = obj
@on_server_disconnect = Proc.new { |handle, obj_ptr|
yield self, object_for(obj_ptr)
}
Klass.set_OnServerDisconnect_Handler(@handle, @on_server_disconnect, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21683 | Phidgets.Manager.on_error | train | def on_error(obj=nil, &block)
@on_error_obj = obj
@on_error = Proc.new { |handle, obj_ptr, code, description|
yield self, object_for(obj_ptr), code, description
}
Klass.set_OnError_Handler(@handle, @on_error, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21684 | VcoWorkflows.Runner.execute! | train | def execute!
exit_code = begin
# Thor accesses these streams directly rather than letting them be
# injected, so we replace them...
$stderr = @stderr
$stdin = @stdin
$stdout = @stdout
VcoWorkflows::CLI.start(@argv)
# Thor::Base#start does not have a return value, assume success if no
# exception is raised.
0
rescue StandardError => e
# The ruby interpreter would pipe this to STDERR and exit 1 in the
# case of an unhandled exception
b = e.backtrace
b.unshift("#{b.shift}: #{e.message} (#{e.class})")
@stderr.puts(b.map { |s| "\tfrom #{s}" }.join("\n"))
1
ensure
# put them back.
$stderr = STDERR
$stdin = STDIN
$stdout = STDOUT
end
# Proxy exit code back to the injected kernel.
@kernel.exit(exit_code)
end | ruby | {
"resource": ""
} |
q21685 | VcoWorkflows.WorkflowService.get_workflow_for_name | train | def get_workflow_for_name(name)
path = "/workflows?conditions=name=#{url_encode(name)}"
response = JSON.parse(@session.get(path).body)
# barf if we got anything other than a single workflow
fail(IOError, ERR[:too_many_workflows]) if response['total'] > 1
fail(IOError, ERR[:no_workflow_found]) if response['total'] == 0
# yank out the workflow id and name from the result attributes
workflow_id = nil
response['link'][0]['attributes'].each do |a|
workflow_id = a['value'] if a['name'].eql?('id')
end
# Get the workflow by GUID
get_workflow_for_id(workflow_id)
end | ruby | {
"resource": ""
} |
q21686 | VcoWorkflows.WorkflowService.get_execution_list | train | def get_execution_list(workflow_id)
path = "/workflows/#{workflow_id}/executions/"
relations = JSON.parse(@session.get(path).body)['relations']
# The first two elements of the relations['link'] array are URLS,
# so scrap them. Everything else is an execution.
executions = {}
relations['link'].each do |link|
next unless link.key?('attributes')
attributes = {}
link['attributes'].each { |a| attributes[a['name']] = a['value'] }
executions[attributes['id']] = attributes
end
executions
end | ruby | {
"resource": ""
} |
q21687 | VcoWorkflows.WorkflowService.execute_workflow | train | def execute_workflow(id, parameter_json)
path = "/workflows/#{id}/executions/"
response = @session.post(path, parameter_json)
# Execution ID is the final component in the Location header URL, so
# chop off the front, then pull off any trailing /
response.headers[:location].gsub(%r{^.*/executions/}, '').gsub(%r{\/$}, '')
end | ruby | {
"resource": ""
} |
q21688 | Thumbtack.Specification.parameters | train | def parameters(arguments)
Hash[
arguments.map do |name, value|
type_handler = @type_handlers.fetch(name)
type_handler.validate(value)
[name, type_handler.serialize(value)]
end
]
end | ruby | {
"resource": ""
} |
q21689 | VcoWorkflows.WorkflowExecutionLog.to_s | train | def to_s
message = ''
@messages.keys.sort.each do |timestamp|
message << "#{Time.at(timestamp / 1000)}"
message << " #{@messages[timestamp]['severity']}: #{@messages[timestamp]['user']}:"
message << " #{@messages[timestamp]['short-description']}"
unless @messages[timestamp]['short-description'].eql?(@messages[timestamp]['long-description'])
message << "; #{@messages[timestamp]['long-description']}"
end
message << "\n"
end
message
end | ruby | {
"resource": ""
} |
q21690 | Phidgets.LED.current_limit= | train | def current_limit=(new_current_limit)
ptr = ::FFI::MemoryPointer.new(:int)
Klass.setCurrentLimit(@handle, Phidgets::FFI::LEDCurrentLimit[new_current_limit])
new_current_limit
end | ruby | {
"resource": ""
} |
q21691 | Phidgets.LED.voltage= | train | def voltage=(new_voltage)
ptr = ::FFI::MemoryPointer.new(:int)
Klass.setVoltage(@handle, Phidgets::FFI::LEDVoltage[new_voltage])
new_voltage
end | ruby | {
"resource": ""
} |
q21692 | Phidgets.Stepper.on_velocity_change | train | def on_velocity_change(obj=nil, &block)
@on_velocity_change_obj = obj
@on_velocity_change = Proc.new { |device, obj_ptr, index, velocity|
yield self, @steppers[index], velocity, object_for(obj_ptr)
}
Klass.set_OnVelocityChange_Handler(@handle, @on_velocity_change, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21693 | Phidgets.Accelerometer.on_acceleration_change | train | def on_acceleration_change(obj=nil, &block)
@on_acceleration_change_obj = obj
@on_acceleration_change = Proc.new { |device, obj_ptr, ind, acc|
yield self, axes[ind], acc, object_for(obj_ptr)
}
Klass.set_OnAccelerationChange_Handler(@handle, @on_acceleration_change, pointer_for(obj))
end | ruby | {
"resource": ""
} |
q21694 | DynamicImage.ProcessedImage.cropped_and_resized | train | def cropped_and_resized(size)
normalized do |image|
if record.cropped? || size != record.size
image.crop(image_sizing.crop_geometry_string(size))
image.resize(size)
end
end
end | ruby | {
"resource": ""
} |
q21695 | DynamicImage.ProcessedImage.normalized | train | def normalized
require_valid_image!
process_data do |image|
image.combine_options do |combined|
combined.auto_orient
combined.colorspace("sRGB") if needs_colorspace_conversion?
yield(combined) if block_given?
optimize(combined)
end
image.format(format) if needs_format_conversion?
end
end | ruby | {
"resource": ""
} |
q21696 | Isono.Manifest.load_config | train | def load_config(path)
return unless File.exist?(path)
buf = File.read(path)
eval("#{buf}", binding, path)
end | ruby | {
"resource": ""
} |
q21697 | Fulfil.Base.method_missing | train | def method_missing(method)
method = method.to_s
raise NoMethodError, "No such method: #{method}" unless @args.keys.include? method
@args[method]
end | ruby | {
"resource": ""
} |
q21698 | Phidgets.MotorControl.attributes | train | def attributes
super.merge({
:motors => motors.size,
:encoders => encoders.size,
:inputs => inputs.size,
:sensors => sensors.size,
:ratiometric => ratiometric
})
end | ruby | {
"resource": ""
} |
q21699 | Phidgets.MotorControl.on_current_change | train | def on_current_change(obj=nil, &block)
@on_current_change_obj = obj
@on_current_change = Proc.new { |device, obj_ptr, motor, current|
yield self, @motors[motor], current, object_for(obj_ptr)
}
Klass.set_OnCurrentChange_Handler(@handle, @on_current_change, pointer_for(obj))
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.