_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q6200
|
PureCDB.Reader.values
|
train
|
def values(key)
h = hash(key)
hoff = @hashes[(h % 256)*2]
hlen = @hashes[(h % 256)*2 + 1]
return [] if hlen == 0
off = (h / 256) % hlen
vals = []
# FIXME: Is this potentially an infinite loop (if full)?
# Easy to avoid by exiting if off reaches the same value twice.
while
(slot = read(hoff + off * hashref_size .. hoff + off * hashref_size + hashref_size - 1)) &&
(dslot = ary_unpack(slot,2)) && dslot[1] != 0
if dslot[0] == h
pos = dslot[1]
rkey, value = read_entry(pos)
if rkey == key
vals << value
end
end
off = (off + 1) % hlen
end
return vals
end
|
ruby
|
{
"resource": ""
}
|
q6201
|
NCore.ActiveModel.errors_for_actionpack
|
train
|
def errors_for_actionpack
e0 = ::ActiveModel::Errors.new(self)
@errors.each do |e|
e0.add :base, e
end
e0
end
|
ruby
|
{
"resource": ""
}
|
q6202
|
Gearbox.AdHocProperties.add_property
|
train
|
def add_property(accessor, predicate, object)
new_property = RDF::Statement.new(bnode, predicate, object)
attributes_list[accessor] = new_property
end
|
ruby
|
{
"resource": ""
}
|
q6203
|
DataMapper::Adapters.BugzillaAdapter.update
|
train
|
def update(attributes, collection)
each_resource_with_edit_url(collection) do |resource, edit_url|
put_updated_resource(edit_url, resource)
end
# return count
collection.size
end
|
ruby
|
{
"resource": ""
}
|
q6204
|
Squash.Symbolicator.architectures
|
train
|
def architectures
architectures = Hash.new
stdin, stdout, stderr = Open3.popen3('dwarfdump', '-u', @dsym)
stdout.each_line do |line|
if line =~ /^UUID: ([0-9A-F\-]+) \((.+?)\)/
architectures[$2] = $1
end
end
return architectures
end
|
ruby
|
{
"resource": ""
}
|
q6205
|
Detroit.Email.announce
|
train
|
def announce
apply_environment unless @approved
mailopts = self.mailopts
if mailto.empty?
report "No recipents given."
else
if trial?
subject = mailopts['subject']
mailto = mailopts['to'].flatten.join(", ")
report "email '#{subject}' to #{mailto}"
else
#emailer = Emailer.new(mailopts)
#emailer.email
if @approved
email(mailopts)
else
exit -1
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6206
|
Detroit.Email.message
|
train
|
def message
@message ||= (
path = Dir[file].first if file
if path
project.announcement(File.new(file))
else
parts.map{ |part| /^file:\/\// =~ part.to_s ? $' : part }
project.announcement(*parts)
end
)
end
|
ruby
|
{
"resource": ""
}
|
q6207
|
Detroit.Email.apply_environment
|
train
|
def apply_environment
return if noenv
@server ||= ENV['EMAIL_SERVER']
@from ||= ENV['EMAIL_FROM'] || ENV['EMAIL_ACCOUNT']
@account ||= ENV['EMAIL_ACCOUNT'] || ENV['EMAIL_FROM']
@password ||= ENV['EMAIL_PASSWORD']
@port ||= ENV['EMAIL_PORT']
@domain ||= ENV['EMAIL_DOMAIN']
@login ||= ENV['EMAIL_LOGIN']
@secure ||= ENV['EMAIL_SECURE']
end
|
ruby
|
{
"resource": ""
}
|
q6208
|
Bebox.Provision.apply
|
train
|
def apply
started_at = DateTime.now.to_s
# Check if a Puppetfile is neccesary for use/not use librarian-puppet
check_puppetfile_content
# Copy static modules that are not downloaded by librarian-puppet
copy_static_modules
# Apply step and if the process is succesful create the checkpoint.
process_status = apply_step
create_step_checkpoint(started_at) if process_status.success?
process_status
end
|
ruby
|
{
"resource": ""
}
|
q6209
|
Bebox.Provision.create_step_checkpoint
|
train
|
def create_step_checkpoint(started_at)
self.node.started_at = started_at
self.node.finished_at = DateTime.now.to_s
Bebox::Environment.create_checkpoint_directories(project_root, environment)
generate_file_from_template("#{Bebox::FilesHelper::templates_path}/node/provisioned_node.yml.erb", "#{self.project_root}/.checkpoints/environments/#{self.environment}/steps/#{self.step}/#{self.node.hostname}.yml", {node: self.node})
end
|
ruby
|
{
"resource": ""
}
|
q6210
|
Yargi.VertexSet.in_edges
|
train
|
def in_edges(filter=nil, &block)
r = self.collect {|v| v.in_edges(filter, &block) }
EdgeSet.new(r).flatten.uniq
end
|
ruby
|
{
"resource": ""
}
|
q6211
|
Yargi.VertexSet.in_adjacent
|
train
|
def in_adjacent(filter=nil, &block)
r = self.collect {|v| v.in_adjacent(filter, &block) }
VertexSet.new(r).flatten.uniq
end
|
ruby
|
{
"resource": ""
}
|
q6212
|
Yargi.VertexSet.adjacent
|
train
|
def adjacent(filter=nil, &block)
(in_adjacent(filter, &block)+out_adjacent(filter, &block)).uniq
end
|
ruby
|
{
"resource": ""
}
|
q6213
|
Houdah.Job.config
|
train
|
def config
@parsed_config ||= Nokogiri::XML(config_xml).xpath("//property").inject({}) { |props, xprop|
props[xprop.xpath("./name").text] = xprop.xpath("./value").text
props
}
end
|
ruby
|
{
"resource": ""
}
|
q6214
|
BetterSqs.Client.push
|
train
|
def push(queue_name, message_body)
sqs.send_message(queue_url: url_for_queue(queue_name), message_body: message_body)
end
|
ruby
|
{
"resource": ""
}
|
q6215
|
BetterSqs.Client.reserve
|
train
|
def reserve(queue_name)
resp = sqs.receive_message(queue_url: url_for_queue(queue_name), max_number_of_messages: 1)
return nil unless resp.messages.any?
Message.new queue_client: self, queue: queue_name, sqs_message: resp.messages.first
end
|
ruby
|
{
"resource": ""
}
|
q6216
|
BetterSqs.Client.delete
|
train
|
def delete(message)
sqs.delete_message queue_url: url_for_queue(message.queue), receipt_handle: message.receipt_handle
end
|
ruby
|
{
"resource": ""
}
|
q6217
|
BetterSqs.Client.defer_retry
|
train
|
def defer_retry(message)
sqs.change_message_visibility queue_url: url_for_queue(message.queue),
receipt_handle: message.receipt_handle,
visibility_timeout: BetterSqs.configuration.sqs_message_deferral_seconds
end
|
ruby
|
{
"resource": ""
}
|
q6218
|
Pushfile.Resize.resize!
|
train
|
def resize!
begin
image = MiniMagick::Image.open(@file.path)
image.resize("#{@width}x#{@height}")
rescue
# Pass on any error
else
image.write(@file.path) rescue nil
end
end
|
ruby
|
{
"resource": ""
}
|
q6219
|
Pushfile.Resize.thumbnail!
|
train
|
def thumbnail!
begin
image = MiniMagick::Image.open(@file.path)
image.resize("#{Pushfile.settings[:images][:thumb][:width]}x")
rescue
@thumb = nil
else
t = @name.split('.'); ext = t.pop
@thumb = t.join(".").concat("_thumb.#{ext}")
image.write("/tmp/#{@thumb}") rescue @thumb = nil
end
end
|
ruby
|
{
"resource": ""
}
|
q6220
|
Hornetseye.Lambda.element
|
train
|
def element(i)
unless i.matched?
unless (0 ... shape.last).member? i
raise "Index must be in 0 ... #{shape.last} (was #{i})"
end
i = INT.new i
end
i.size = @index.size if i.is_a?(Variable) and @index.size.get
@term.subst @index => i
end
|
ruby
|
{
"resource": ""
}
|
q6221
|
ConfluenceReporter.Reporter.report_event
|
train
|
def report_event(name, parrent_page_id=nil, space=nil)
page = find_page_by_name(name, parrent_page_id)
if page
append_to_page(page["id"], parrent_page_id)
else
create_page(name, space, parrent_page_id)
end
clear_log
end
|
ruby
|
{
"resource": ""
}
|
q6222
|
ConfluenceReporter.Reporter.create_page
|
train
|
def create_page(title, space, parrent_page_id=nil)
params = { 'type' => 'page',
'title' => title,
'space' => {'key' => space},
'body' => {
'storage' => {
'value' => ("#{ @body_message.to_json.gsub("&&", "&&").gsub(/\\u001b.../, " ") }").force_encoding('UTF-8'),
'representation' => 'storage'
}
}
}
if parrent_page_id
params['ancestors'] = [{'type' => 'page', 'id' => parrent_page_id}]
end
uri = URI.parse(@base_url)
https = Net::HTTP.new(uri.host,uri.port)
https.use_ssl = true
# https.set_debug_output $stderr
req = Net::HTTP::Post.new(uri.path, initheader = {'Content-Type' =>'application/json'})
req.basic_auth(@user, @password)
req['Accept'] = 'application/json'
req.body = "#{params.to_json}"
response = https.request(req)
response = JSON.parse(response.body)
if response["statusCode"] == 400
puts response.inspect
puts req.body.inspect
puts "Create page: Error reporting to confluence: #{response["message"]}"
raise "Create page: Error reporting to confluence: #{response["message"]}"
else
puts "Reported page creation."
end
end
|
ruby
|
{
"resource": ""
}
|
q6223
|
Aims.Atom.constrained?
|
train
|
def constrained?
if self.constrain
if self.constrain == true
true
elsif self.constrain.is_a? String
true
elsif self.constrain.is_a? Array and not self.constrain.empty?
true
else
false
end
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q6224
|
Aims.Atom.distance_to
|
train
|
def distance_to(atom)
Math.sqrt((self.x - atom.x)**2 + (self.y - atom.y)**2 + (self.z - atom.z)**2)
end
|
ruby
|
{
"resource": ""
}
|
q6225
|
Aims.Atom.displace
|
train
|
def displace(x,y,z)
Atom.new(self.x+x, self.y+y, self.z+z, self.species, self.constrain)
end
|
ruby
|
{
"resource": ""
}
|
q6226
|
Aims.Atom.displace!
|
train
|
def displace!(x,y,z)
self.x += x
self.y += y
self.z += z
end
|
ruby
|
{
"resource": ""
}
|
q6227
|
Aims.Atom.format_geometry_in
|
train
|
def format_geometry_in
line = "atom %16.6f %16.6f %16.6f %s" % [self.x, self.y, self.z, self.species]
if self.constrain
if self.constrain == true
line << "\nconstrain_relaxation .true."
elsif self.constrain.is_a? String
line << "\nconstrain_relaxation #{self.constrain}"
elsif self.constrain.is_a? Array and not self.constrain.empty?
self.constrain.each{|c|
line << "\nconstrain_relaxation #{c}"
}
line << "\n"
end
end
line
end
|
ruby
|
{
"resource": ""
}
|
q6228
|
Trooper.Runner.build_commands
|
train
|
def build_commands(strategy_name, type, action_name)
action = Arsenal.actions[action_name]
if action
options = action.options
case type
when :prerequisite
commands = action.prerequisite_call config
Trooper.logger.action "Prerequisite: #{action.description}"
else
commands = action.call config
Trooper.logger.action action.description
end
[commands, options]
else
raise MissingActionError, "Cant find action: #{action_name}"
end
end
|
ruby
|
{
"resource": ""
}
|
q6229
|
Trooper.Runner.hosts
|
train
|
def hosts
@hosts ||= begin
r, h, u = [], (config[:hosts] rescue nil), (config[:user] rescue nil)
h.each {|host| r << Host.new(host, u) } if h && u; r
end
end
|
ruby
|
{
"resource": ""
}
|
q6230
|
Trooper.Runner.runner_execute!
|
train
|
def runner_execute!(host, commands, options = {})
result = host.execute commands, options
if result && result[1] == :stdout
Trooper.logger.info "#{result[2]}\n"
true
else
false
end
end
|
ruby
|
{
"resource": ""
}
|
q6231
|
Gvis.DataTable.add_row
|
train
|
def add_row(row)
size = row.size
raise ArgumentError.new("Given a row of data with #{size} entries, but there are only #{@table_columns.size} columns in the table") unless size == @table_columns.size
@data << row
end
|
ruby
|
{
"resource": ""
}
|
q6232
|
Gvis.DataTable.add_rows
|
train
|
def add_rows(rows)
sizes = rows.collect {|r| r.size }.uniq
expected_size = @table_columns.size
errors = sizes.select {|s| s != expected_size }
raise ArgumentError.new("Given a row of data with #{errors.to_sentence} entries, but there are only #{expected_size} columns in the table") if errors.any?
@data += rows
end
|
ruby
|
{
"resource": ""
}
|
q6233
|
Gvis.DataTable.format_data
|
train
|
def format_data
formatted_rows = []
@data.each do |row|
values = []
row.each_with_index do |entry,index|
values << Gvis::DataCell.new(entry, @column_types.to_a[index][1]).to_js
end
rowstring = "[#{values.join(", ")}]"
formatted_rows << rowstring
end
"[#{formatted_rows.join(', ')}]"
end
|
ruby
|
{
"resource": ""
}
|
q6234
|
Gvis.DataTable.register_column
|
train
|
def register_column(type, name)
type = type.to_s.downcase
raise ArgumentError.new("invalid column type #{type}, permitted types are #{COLUMN_TYPES.join(', ')}") unless COLUMN_TYPES.include?(type)
@table_columns << name.to_s
@column_types.merge!(name.to_s => type)
end
|
ruby
|
{
"resource": ""
}
|
q6235
|
Taaze.TaazeCollections.extract_books
|
train
|
def extract_books
booklist = []
if @doc.count != 0
@doc.each do |book_data|
book = {}
book['title'] = book_data['titleMain']
book['book_url'] = BOOK_URL + book_data['prodId']
book['crt_time'] = book_data['crtTime'].split(' ')[0]
booklist << book
end
end
booklist
end
|
ruby
|
{
"resource": ""
}
|
q6236
|
SiteMapper.Crawler.collect_urls
|
train
|
def collect_urls
@fetch_queue << @crawl_url.resolved_base_url
until @fetch_queue.empty? || @processed.length >= @options[:max_requests]
url = @fetch_queue.pop
yield(url)
page_urls_for(url)
end
result = @processed + @fetch_queue
Logger.log "Crawling finished:"
Logger.log "Processed links: #{@processed.length}"
Logger.log "Found links: #{result.length}"
result.to_a
rescue Interrupt, IRB::Abort
Logger.err_log 'Crawl interrupted.'
@fetch_queue.to_a
end
|
ruby
|
{
"resource": ""
}
|
q6237
|
Liner.Hashable.liner
|
train
|
def liner
liner_keys.inject({}) { |h,k| h[k] = self[k]; h }.freeze
end
|
ruby
|
{
"resource": ""
}
|
q6238
|
Pith.Output.build
|
train
|
def build
return false if @generated
logger.info("--> #{path}")
@dependencies = Set.new
file.parent.mkpath
if input.template?
evaluate_template
else
copy_resource
end
@generated = true
end
|
ruby
|
{
"resource": ""
}
|
q6239
|
Yargi.Digraph.each_vertex
|
train
|
def each_vertex(filter=nil, &block)
if filter.nil?
@vertices.each &block
else
vertices(filter).each &block
end
end
|
ruby
|
{
"resource": ""
}
|
q6240
|
Yargi.Digraph.remove_vertices
|
train
|
def remove_vertices(*vertices)
vertices = to_vertices(*vertices).sort{|v1,v2| v2<=>v1}
vertices.each do |vertex|
remove_edges(vertex.in_edges+vertex.out_edges)
@vertices.delete_at(vertex.index)
vertex.index=-1
end
@vertices.each_with_index {|v,i| v.index=i}
self
end
|
ruby
|
{
"resource": ""
}
|
q6241
|
Yargi.Digraph.each_edge
|
train
|
def each_edge(filter=nil, &block)
if filter.nil?
@edges.each &block
else
edges(filter).each &block
end
end
|
ruby
|
{
"resource": ""
}
|
q6242
|
Yargi.Digraph.remove_edges
|
train
|
def remove_edges(*edges)
edges = to_edges(edges).sort{|e1,e2| e2<=>e1}
edges.each do |edge|
edge.source.remove_out_edge(edge)
edge.target.remove_in_edge(edge)
@edges.delete_at(edge.index)
edge.index = -1
end
@edges.each_with_index {|edge,i| edge.index=i}
self
end
|
ruby
|
{
"resource": ""
}
|
q6243
|
Yargi.Digraph.to_dot
|
train
|
def to_dot(buffer='')
buffer << "digraph G {\n"
buffer << " graph[#{to_dot_attributes(self.to_h(true))}]\n"
each_vertex do |v|
buffer << " V#{v.index} [#{to_dot_attributes(v.to_h(true))}]\n"
end
each_edge do |e|
buffer << " V#{e.source.index} -> V#{e.target.index} [#{to_dot_attributes(e.to_h(true))}]\n"
end
buffer << "}\n"
end
|
ruby
|
{
"resource": ""
}
|
q6244
|
Yargi.Digraph.to_dot_attributes
|
train
|
def to_dot_attributes(hash)
# TODO: fix uncompatible key names
# TODO: some values must be encoded (backquoting and the like)
buffer = ""
hash.each_pair do |k,v|
buffer << " " unless buffer.empty?
v = case v
when Array
if v.all?{|elm| Array===elm and elm.length==2 and elm.all?{|subelm| Numeric===subelm}}
v.inject('') {|memo, elm| "#{memo} #{elm.join(',')}"}.strip
else
v.join(', ')
end
else
v.to_s
end
buffer << "#{k}=\"#{v}\""
end
buffer
end
|
ruby
|
{
"resource": ""
}
|
q6245
|
Yargi.Digraph.check_sanity
|
train
|
def check_sanity
@vertices.each_with_index do |v,i|
raise "Removed vertex in vertex list" unless v.index==i
v.in_edges.each do |ine|
raise "Removed edge in vertex incoming edges" if ine.index<0
raise "Vertex and edge don't agree on target" unless ine.target==v
end
v.out_edges.each do |oute|
raise "Removed edge in vertex outgoing edges" if oute.index<0
raise "Vertex and edge don't agree on source" unless oute.source==v
end
end
@edges.each_with_index do |e,i|
raise "Removed edge in edge list" unless e.index==i
raise "Edge in-connected to a removed vertex" if e.source.index<0
raise "Edge out-connected to a removed vertex" if e.target.index<0
end
end
|
ruby
|
{
"resource": ""
}
|
q6246
|
Yargi.Digraph.to_vertices
|
train
|
def to_vertices(*args)
selected = args.collect do |arg|
case arg
when Integer
[@vertices[arg]]
when VertexSet
arg
when Array
arg.collect{|v| to_vertices(v)}.flatten.uniq
when Digraph::Vertex
[arg]
else
pred = Predicate.to_predicate(arg)
vertices(pred)
end
end.flatten.uniq
VertexSet.new(selected)
end
|
ruby
|
{
"resource": ""
}
|
q6247
|
Yargi.Digraph.to_edges
|
train
|
def to_edges(*args)
selected = args.collect do |arg|
case arg
when Integer
[@edges[arg]]
when EdgeSet
arg
when Array
arg.collect{|v| to_edges(v)}.flatten.uniq
when Digraph::Edge
[arg]
else
pred = Predicate.to_predicate(arg)
edges(pred)
end
end.flatten.uniq
EdgeSet.new(selected)
end
|
ruby
|
{
"resource": ""
}
|
q6248
|
Yargi.Digraph.apply_arg_conventions
|
train
|
def apply_arg_conventions(element, args)
args.each do |arg|
case arg
when Module
element.tag(arg)
when Hash
element.add_marks(arg)
else
raise ArgumentError, "Unable to apply argument conventions on #{arg.inspect}", caller
end
end
element
end
|
ruby
|
{
"resource": ""
}
|
q6249
|
Curtain.HTMLHelpers.content_tag
|
train
|
def content_tag(name, content=nil, attrs={}, &body)
if content.is_a?(Hash)
attrs = content
content = nil
end
if block_given?
content = capture(&body)
end
tag = tag_opening(name, attrs)
tag << ">".html_safe
tag << content
tag << "</#{name}>".html_safe
end
|
ruby
|
{
"resource": ""
}
|
q6250
|
BarkestCore.ApplicationHelper.render_alert
|
train
|
def render_alert(type, message)
if type.to_s.index('safe_')
type = type.to_s[5..-1]
message = message.to_s.html_safe
end
type = type.to_sym
type = :info if type == :notice
type = :danger if type == :alert
return nil unless [:info, :success, :danger, :warning].include?(type)
"<div class=\"alert alert-#{type} alert-dismissible\"><button type=\"button\" class=\"close\" data-dismiss=\"alert\" aria-label=\"Close\"><span aria-hidden=\"true\">×</span></button>#{render_alert_message(message)}</div>".html_safe
end
|
ruby
|
{
"resource": ""
}
|
q6251
|
ActionKitApi.EventCampaign.create_event
|
train
|
def create_event(*args)
raise "EventCampaign needs to be saved before Event creation" if self.id.nil?
(args[0]).merge!(:campaign_id => self.id)
event = ActionKitApi::Event.new(*args)
end
|
ruby
|
{
"resource": ""
}
|
q6252
|
ActionKitApi.EventCampaign.public_search
|
train
|
def public_search(*args)
(args[0]).merge!(:campaign_id => self.id)
results = ActionKitApi.connection.call("Event.public_search", *args)
results.map do |r|
Event.new(r)
end
results
end
|
ruby
|
{
"resource": ""
}
|
q6253
|
EncryptedStore.CryptoHash.encrypt
|
train
|
def encrypt(dek, salt, iter_mag=10)
return nil if empty?
raise Errors::InvalidSaltSize, 'too long' if salt.bytes.length > 255
key, iv = _keyiv_gen(dek, salt, iter_mag)
encryptor = OpenSSL::Cipher::AES256.new(:CBC).encrypt
encryptor.key = key
encryptor.iv = iv
data_packet = _encrypted_data_header_v2(salt, iter_mag) + encryptor.update(self.to_json) + encryptor.final
_append_crc32(data_packet)
end
|
ruby
|
{
"resource": ""
}
|
q6254
|
Incline.UserManager.authenticate
|
train
|
def authenticate(email, password, client_ip)
return nil unless Incline::EmailValidator.valid?(email)
email = email.downcase
# If an engine is registered for the email domain, then use it.
engine = get_auth_engine(email)
if engine
return engine.authenticate(email, password, client_ip)
end
# Otherwise we will be using the database.
user = User.find_by(email: email)
if user
# user must be enabled and the password must match.
unless user.enabled?
add_failure_to user, '(DB) account disabled', client_ip
return nil
end
if user.authenticate(password)
add_success_to user, '(DB)', client_ip
return user
else
add_failure_to user, '(DB) invalid password', client_ip
return nil
end
end
add_failure_to email, 'invalid email', client_ip
nil
end
|
ruby
|
{
"resource": ""
}
|
q6255
|
Incline.UserManager.begin_external_authentication
|
train
|
def begin_external_authentication(request)
# We don't have an email domain to work from.
# Instead, we'll call each engine's authenticate_external method.
# If one of them returns a user, then we return that value and skip further processing.
auth_engines.each do |dom,engine|
unless engine.nil?
url = engine.begin_external_authentication(request)
return url unless url.blank?
end
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q6256
|
Incline.UserManager.register_auth_engine
|
train
|
def register_auth_engine(engine, *domains)
unless engine.nil?
unless engine.is_a?(::Incline::AuthEngineBase)
raise ArgumentError, "The 'engine' parameter must be an instance of an auth engine or a class defining an auth engine." unless engine.is_a?(::Class)
engine = engine.new(@options)
raise ArgumentError, "The 'engine' parameter must be an instance of an auth engine or a class defining an auth engine." unless engine.is_a?(::Incline::AuthEngineBase)
end
end
domains.map do |dom|
dom = dom.to_s.downcase.strip
raise ArgumentError, "The domain #{dom.inspect} does not appear to be a valid domain." unless dom =~ /\A[a-z0-9]+(?:[-.][a-z0-9]+)*\.[a-z]+\Z/
dom
end.each do |dom|
auth_engines[dom] = engine
end
end
|
ruby
|
{
"resource": ""
}
|
q6257
|
Generators.ControllerGeneratorBase.copy_view_files
|
train
|
def copy_view_files #do NOT change the name of this method
# it must be overriding an existing one in a parent class
base_path = File.join("app/views", class_path, file_name)
#binding.pry
empty_directory base_path
@actions = actions.nil? || actions.empty? ? %w(index new create edit update destroy) : actions
@attr_cols = GeneratorUtils::attr_cols(table_name)
@col_count = @attr_cols.count
@col_count += 1 if @actions.include?("edit")
@col_count += 1 if @actions.include?("destroy")
@search_sort = options.search_sort?
(@actions - %w(create update destroy)).each do |action|
@action = action
formats.each do |format|
@path = File.join(base_path, filename_with_extensions(action, format))
set_template(@action, @path)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6258
|
Footing.Hash.to_h
|
train
|
def to_h
copied_object.each_with_object({}) do |pair, memo|
value = pair.last
if value.is_a?(Footing::Hash)
memo[pair.first] = value.to_h
elsif value.is_a?(::Array)
memo[pair.first] = value.map do |val|
if val.is_a?(Footing::Hash)
val.to_h
else
val
end
end
else
memo[pair.first] = value
end
end
end
|
ruby
|
{
"resource": ""
}
|
q6259
|
Smarteru.Client.request
|
train
|
def request(operation, data)
opts = {
method: :post,
url: api_url,
payload: { 'Package' => body(operation, data) },
content_type: :xml,
verify_ssl: verify_ssl,
ssl_ca_file: ssl_ca_file }
response = RestClient::Request.execute(opts)
response = Response.new(response)
if !response.success? && fail_on_error
fail Error.new(response)
end
response
end
|
ruby
|
{
"resource": ""
}
|
q6260
|
Smarteru.Client.body_parameters
|
train
|
def body_parameters(parameters)
parameters_xml = ''
parameters.each_pair do |k, v|
key = parameter_key(k)
val = case v
when Hash
body_parameters(v)
when Array
v.map { |i| body_parameters(i) }.join('')
when nil
''
else
"<![CDATA[#{v}]]>"
end
parameters_xml << "<#{key}>#{val}</#{key}>"
end
parameters_xml
end
|
ruby
|
{
"resource": ""
}
|
q6261
|
Smarteru.Client.parameter_key
|
train
|
def parameter_key(term)
string = term.to_s
string = string.sub(/^[a-z\d]*/) { $&.capitalize }
string.gsub!(/(?:_|(\/))([a-z\d]*)/i) { "#{$1}#{$2.capitalize}" }
string
end
|
ruby
|
{
"resource": ""
}
|
q6262
|
Aims.ZincBlende.get_bulk
|
train
|
def get_bulk
b = 0.25*self.lattice_const
a1 = Atom.new(0, 0, 0, self.cation)
a2 = Atom.new(b, b, b, self.anion)
v1 = Vector[0.5, 0.5, 0.0]*self.lattice_const
v2 = Vector[0.5, 0.0, 0.5]*self.lattice_const
v3 = Vector[0.0, 0.5, 0.5]*self.lattice_const
zb = Geometry.new([a1, a2], [v1, v2, v3])
millerx = [1, 0, 0]
millery = [0, 1, 0]
millerz = [0, 0, 1]
zb.set_miller_indices(millerx, millery, millerz)
return zb
end
|
ruby
|
{
"resource": ""
}
|
q6263
|
Aims.ZincBlende.fill_volume
|
train
|
def fill_volume(volume)
# First fill a cube that bounds the volume
max = volume.max_point
min = volume.min_point
dx = max[0] - min[0]
dy = max[1] - min[1]
dz = max[2] - min[2]
bulk = get_bulk
# This inverse matrix gives the number of repetitions
m = Matrix[[dx,0,0], [0,dy,0], [0,0,dz]]
v = Matrix[bulk.lattice_vectors[0].to_a,
bulk.lattice_vectors[1].to_a,
bulk.lattice_vectors[2].to_a]
rep_mat = m*(v.inverse)
# The only way I can figure out how to do this for an
# arbitrary set of lattice vectors is to fill the volume
# out along each edge of the super-cube and then eliminate duplicates
atoms = []
3.times do |i|
# this vector is the number of repetitions in the unit cell
# to fill the volume out along the i-th edge of the super-cube
n_repeat = rep_mat.row(i)
# Give the proper sign to the repeat
nx = (n_repeat[0] < 0) ? n_repeat[0].floor-1 : n_repeat[0].ceil+1
ny = (n_repeat[1] < 0) ? n_repeat[1].floor-1 : n_repeat[1].ceil+1
nz = (n_repeat[2] < 0) ? n_repeat[2].floor-1 : n_repeat[2].ceil+1
atoms += bulk.repeat(nx, ny, nz).atoms.find_all{|a| volume.contains_point(a.x, a.y, a.z)}
end
Geometry.new(atoms.uniq)
end
|
ruby
|
{
"resource": ""
}
|
q6264
|
Aims.ZincBlende.get_001_surface
|
train
|
def get_001_surface(monolayers, vacuum, constrain_layers = 0)
anion = Atom.new(0,0,0,self.cation)
cation = Atom.new(0.25*self.lattice_const, 0.25*self.lattice_const, 0.25*self.lattice_const, self.anion)
v1 = Vector[0.5, 0.5, 0]*self.lattice_const
v2 = Vector[-0.5,0.5,0]*self.lattice_const
v3 = Vector[0.5, 0, 0.5]*self.lattice_const
zb = Geometry.new([anion, cation], [v1,v2,v3])
millerX = [1,0,0]
millerY = [0,1,0]
millerZ = [0,0,1]
zb.set_miller_indices(millerX, millerY, millerZ)
# Repeat the unit cell. The unit cell is a bi-layer so divide by 2
zb = zb.repeat(1,1,(monolayers/2).ceil)
if 0 < vacuum
# Add vacuum
monolayerSep = v3[2]/2
zb.lattice_vectors[2] = Vector[0, 0, (monolayers-1)*monolayerSep.abs + vacuum.to_f]
# Move everything into a nice tidy unit cell.
zb = zb.correct
end
minZ = zb.atoms.min{|a,b| a.z <=> b.z}.z
# Reject the top layer of atoms if an odd number of monolayers was requested.
# This is necessary because the primitive cell is a bilayer
zb.atoms.reject! {|a|
a.z >= (minZ + monolayerSep.abs*monolayers)
}
# Constrain the bottom layers
zb.atoms.each{|a|
if (a.z < minZ + monolayerSep.abs*constrain_layers)
a.constrain = ".true."
end
}
# Return the completed unit cell
return zb
end
|
ruby
|
{
"resource": ""
}
|
q6265
|
Aims.ZincBlende.get_111_surface
|
train
|
def get_111_surface(dir, monolayers, vacuum, constrain_layers = 0)
if dir == "A"
top_atom = self.anion
bot_atom = self.cation
elsif dir == "B"
top_atom = self.cation
bot_atom = self.anion
else
raise "Direction must be either A or B"
end
# The atoms on a FCC
as1 = Atom.new(0.0, 0.0, 0.0, top_atom)
ga1 = Atom.new(0.0, 0.0, -sqrt(3)/4*self.lattice_const, bot_atom)
# The lattice Vectors
v1 = Vector[0.5*sqrt(2), 0.0, 0.0]*self.lattice_const
v2 = Vector[sqrt(2)*0.25, sqrt(6)*0.25, 0.0]*self.lattice_const
v3 = Vector[sqrt(2)*0.25, sqrt(2.0/3.0)*0.25, -1*sqrt(4.0/3.0)*0.5]*self.lattice_const
# The unit cell
zb = Geometry.new([as1, ga1], [v1, v2, v3])
# The Miller Indices
millerX = [-1, 1, 0] # Orientation of the crystal pointing in the cartesian +x axis
millerY = [1, 1, -2] # Orientation of the crystal pointing in the cartesian +y axis
millerZ = [-1, -1, -1] # Orientation of the crystal pointing in the cartesian +z axis
zb.set_miller_indices(millerX, millerY, millerZ)
# Repeat the unit cell and add vacuum
if 0 < vacuum
# We actually repeat the unit cell monolayers+1 times because
# I will strip off the top and bottom atoms to make the proper surface
zb = zb.repeat(1,1,monolayers+1)
bilayerSep = v3[2]
zb.lattice_vectors[2] = Vector[0, 0, (monolayers-1)*(bilayerSep.abs) + vacuum]
# Strip off the top and bottom atom
minZ = zb.atoms.min{|a,b| a.z <=> b.z}.z
maxZ = zb.atoms.max{|a,b| a.z <=> b.z}.z
zb.atoms.reject!{|a| a.z == maxZ}
zb.atoms.reject!{|a| a.z == minZ}
# Constrain the bottom layers if requested
if 0 < constrain_layers
# get the min again because we removed the atoms at minZ above
minZ = zb.atoms.min{|a,b| a.z <=> b.z}.z
constrain_below = minZ + bilayerSep.abs*constrain_layers
zb.atoms.each{|a|
if (a.z < constrain_below)
a.constrain = ".true."
end
}
end
end
zb
end
|
ruby
|
{
"resource": ""
}
|
q6266
|
Aims.ZincBlende.get_112_surface
|
train
|
def get_112_surface(monolayers, vacuum=0, constrain_layers = 0)
atom1 = Atom.new(0,0,0,self.cation)
atom2 = Atom.new(self.lattice_const*sqrt(3)/2, 0, 0, self.anion)
v1 = Vector[sqrt(3), 0, 0]*self.lattice_const
v2 = Vector[0, sqrt(2)/2, 0]*self.lattice_const
v3 = Vector[1/sqrt(3), 1/(sqrt(3)*2), -1/(sqrt(3)*2)]*self.lattice_const
millerX = Vector[1, 1, -2];
millerY = Vector[-1, 1, 0];
millerZ = Vector[-1, -1, -1]
# The unit cell
zb = Geometry.new([atom1, atom2], [v1, v2, v3])
zb.set_miller_indices(millerX, millerY, millerZ)
# Repeat the unit cell
zb = zb.repeat(1,1,monolayers)
if 0 < vacuum
# Add vacuum
monolayerSep = v3[2]
zb.lattice_vectors[2] = Vector[0, 0, (monolayers*monolayerSep).abs + vacuum.to_f]
# Move everything into a nice tidy unit cell.
zb = zb.correct
end
# # Constrain the bottom 2 layers
# zb.atoms.each{|a|
# if (a.z < monolayerSep*2)
# a.constrain = ".true."
# end
# }
# Return the completed unit cell
return zb
end
|
ruby
|
{
"resource": ""
}
|
q6267
|
Aims.ZincBlende.get_110_surface
|
train
|
def get_110_surface(monolayers, vacuum=0, constrain_layers = 0)
# The atoms on a FCC
atom1 = Atom.new(0,0,0,self.cation)
atom2 = Atom.new(self.lattice_const*1/(2*sqrt(2)), self.lattice_const*0.25, 0.0, self.anion)
# The lattice Vectors
v1 = Vector[1/sqrt(2), 0.0, 0.0]*self.lattice_const
v2 = Vector[0.0, 1.0, 0.0]*self.lattice_const
v3 = Vector[1/(2*sqrt(2)), -0.5, 1/(2*sqrt(2))]*self.lattice_const
# The miller indices for each primitive cartesian direction
millerX = Vector[1, -1, 0]
millerY = Vector[0, 0, 1]
millerZ = Vector[1, 1, 0]
# The unit cell
zb = Geometry.new([atom1, atom2], [v1, v2, v3])
zb.set_miller_indices(millerX, millerY, millerZ)
# Repeat the unit cell
zb = zb.repeat(1,1,monolayers)
monolayerSep = v3[2]
if 0 < vacuum
# Add vacuum
zb.lattice_vectors[2] = Vector[0, 0, (monolayers-1)*monolayerSep.abs + vacuum.to_f]
# Move everything into a nice tidy unit cell.
zb = zb.correct
end
# # Constrain the bottom layers
zb.atoms.each{|a|
if (a.z < monolayerSep*constrain_layers)
a.constrain = ".true."
end
}
# Return the completed unit cell
return zb
end
|
ruby
|
{
"resource": ""
}
|
q6268
|
Jinx.Collection.to_compact_hash_with_index
|
train
|
def to_compact_hash_with_index
hash = {}
self.each_with_index do |item, index|
next if item.nil?
value = yield(item, index)
next if value.nil_or_empty?
hash[item] = value
end
hash
end
|
ruby
|
{
"resource": ""
}
|
q6269
|
Jinx.Collection.partial_sort!
|
train
|
def partial_sort!
unless block_given? then return partial_sort! { |item1, item2| item1 <=> item2 } end
# The comparison hash
h = Hash.new { |h, k| h[k] = Hash.new }
sort! do |a, b|
# * If a and b are comparable, then use the comparison result.
# * Otherwise, if there is a member c such that (a <=> c) == (c <=> b),
# then a <=> b has the transitive comparison result.
# * Otherwise, a <=> b is arbitrarily set to 1.
yield(a, b) || h[a][b] ||= -h[b][a] ||= h[a].detect_value { |c, v| v if v == yield(c, b) } || 1
end
end
|
ruby
|
{
"resource": ""
}
|
q6270
|
Jinx.Inversible.set_inverse
|
train
|
def set_inverse(other, writer, inv_writer)
other.send(inv_writer, self) if other
send(writer, other)
end
|
ruby
|
{
"resource": ""
}
|
q6271
|
Jinx.Inversible.set_inversible_noncollection_attribute
|
train
|
def set_inversible_noncollection_attribute(newval, accessors, inverse_writer)
rdr, wtr = accessors
# the previous value
oldval = send(rdr)
# bail if no change
return newval if newval.equal?(oldval)
# clear the previous inverse
logger.debug { "Moving #{qp} from #{oldval.qp} to #{newval.qp}..." } if oldval and newval
if oldval then
clr_wtr = self.class === oldval && oldval.send(rdr).equal?(self) ? wtr : inverse_writer
oldval.send(clr_wtr, nil)
end
# call the writer
send(wtr, newval)
# call the inverse writer on self
if newval then
newval.send(inverse_writer, self)
logger.debug { "Moved #{qp} from #{oldval.qp} to #{newval.qp}." } if oldval
end
newval
end
|
ruby
|
{
"resource": ""
}
|
q6272
|
Jinx.Inversible.add_to_inverse_collection
|
train
|
def add_to_inverse_collection(newval, accessors, inverse)
rdr, wtr = accessors
# the current inverse
oldval = send(rdr)
# no-op if no change
return newval if newval == oldval
# delete self from the current inverse reference collection
if oldval then
coll = oldval.send(inverse)
coll.delete(self) if coll
end
# call the writer on this object
send(wtr, newval)
# add self to the inverse collection
if newval then
coll = newval.send(inverse)
if coll.nil? then
coll = block_given? ? yield : Array.new
newval.set_property_value(inverse, coll)
end
coll << self
if oldval then
logger.debug { "Moved #{qp} from #{rdr} #{oldval.qp} #{inverse} to #{newval.qp}." }
else
logger.debug { "Added #{qp} to #{rdr} #{newval.qp} #{inverse}." }
end
end
newval
end
|
ruby
|
{
"resource": ""
}
|
q6273
|
Garcon.Pathref.expand_pathseg
|
train
|
def expand_pathseg(handle)
return handle unless handle.is_a?(Symbol)
pathsegs = ROOT_PATHS[handle] or raise ArgumentError,
"Don't know how to expand path reference '#{handle.inspect}'."
pathsegs.map { |ps| expand_pathseg(ps) }.flatten
end
|
ruby
|
{
"resource": ""
}
|
q6274
|
Garcon.MutexCountDownLatch.wait
|
train
|
def wait(timeout = nil)
@mutex.synchronize do
remaining = Condition::Result.new(timeout)
while @count > 0 && remaining.can_wait?
remaining = @condition.wait(@mutex, remaining.remaining_time)
end
@count == 0
end
end
|
ruby
|
{
"resource": ""
}
|
q6275
|
ModelSchema.SchemaError.dump_extra_diffs
|
train
|
def dump_extra_diffs(field)
extra_diffs = diffs_by_field_type(field, TYPE_EXTRA)
if extra_diffs.length > 0
header = "Table #{@table_name} has extra #{field}:\n"
diff_str = extra_diffs.map do |diff|
dump_single(field, diff[:generator], diff[:elem])
end.join("\n\t")
"#{header}\n\t#{diff_str}\n"
end
end
|
ruby
|
{
"resource": ""
}
|
q6276
|
ModelSchema.SchemaError.dump_missing_diffs
|
train
|
def dump_missing_diffs(field)
missing_diffs = diffs_by_field_type(field, TYPE_MISSING)
if missing_diffs.length > 0
header = "Table #{@table_name} is missing #{field}:\n"
diff_str = missing_diffs.map do |diff|
dump_single(field, diff[:generator], diff[:elem])
end.join("\n\t")
"#{header}\n\t#{diff_str}\n"
end
end
|
ruby
|
{
"resource": ""
}
|
q6277
|
ModelSchema.SchemaError.dump_mismatch_diffs
|
train
|
def dump_mismatch_diffs(field)
mismatch_diffs = diffs_by_field_type(field, TYPE_MISMATCH)
if mismatch_diffs.length > 0
header = "Table #{@table_name} has mismatched #{field}:\n"
diff_str = mismatch_diffs.map do |diff|
"actual: #{dump_single(field, diff[:db_generator], diff[:db_elem])}\n\t" +
"expected: #{dump_single(field, diff[:exp_generator], diff[:exp_elem])}"
end.join("\n\n\t")
"#{header}\n\t#{diff_str}\n"
end
end
|
ruby
|
{
"resource": ""
}
|
q6278
|
ModelSchema.SchemaError.to_s
|
train
|
def to_s
parts = FIELDS.flat_map do |field|
[dump_extra_diffs(field),
dump_missing_diffs(field),
dump_mismatch_diffs(field)]
end
[
"Table #{@table_name} does not match the expected schema.\n\n",
parts.compact.join("\n"),
"\nYou may disable schema checks by passing :disable => true to model_",
"schema or by setting the ENV variable #{DISABLE_MODEL_SCHEMA_KEY}=1.\n"
].join
end
|
ruby
|
{
"resource": ""
}
|
q6279
|
Jinx.Visitor.filter
|
train
|
def filter
raise ArgumentError.new("A filter block is not given to the visitor filter method") unless block_given?
self.class.new(@options) { |node| yield(node, node_children(node)) }
end
|
ruby
|
{
"resource": ""
}
|
q6280
|
Jinx.Visitor.node_children
|
train
|
def node_children(node)
children = @navigator.call(node)
return Array::EMPTY_ARRAY if children.nil?
Enumerable === children ? children.to_a.compact : [children]
end
|
ruby
|
{
"resource": ""
}
|
q6281
|
Jinx.Visitor.visit_root
|
train
|
def visit_root(node, &operator)
clear
# Exclude cycles if the prune cycles flag is set.
@exclude.merge!(cyclic_nodes(node)) if @prune_cycle_flag
# Visit the root node.
result = visit_recursive(node, &operator)
# Reset the exclusions if the prune cycles flag is set.
@exclude.clear if @prune_cycle_flag
result
end
|
ruby
|
{
"resource": ""
}
|
q6282
|
Jinx.Visitor.cyclic_nodes
|
train
|
def cyclic_nodes(root)
copts = @options.reject { |k, v| k == :prune_cycle }
cyclic = Set.new
cycler = Visitor.new(copts) do |parent|
children = @navigator.call(parent)
# Look for a cycle back to the child.
children.each do |child|
index = cycler.lineage.index(child)
if index then
# The child is also a parent: add the nodes between
# the two occurrences of the child in the lineage.
cyclic.merge!(cycler.lineage[(index + 1)..-1])
end
end
children
end
cycler.visit(root)
cyclic
end
|
ruby
|
{
"resource": ""
}
|
q6283
|
RichUnits.Numeric.duration
|
train
|
def duration(part = nil, klass = Duration)
if [:years, :months, :weeks, :days, :hours, :minutes, :seconds].include? part
klass.new(part => self)
else
klass.new(self)
end
end
|
ruby
|
{
"resource": ""
}
|
q6284
|
RichUnits.Duration.seconds
|
train
|
def seconds(part = nil)
# Table mapping
h = {:weeks => WEEK, :days => DAY, :hours => HOUR, :minutes => MINUTE}
if [:weeks, :days, :hours, :minutes].include? part
__send__(part) * h[part]
else
@seconds
end
end
|
ruby
|
{
"resource": ""
}
|
q6285
|
RichUnits.Duration.to_s
|
train
|
def to_s
str = ''
each do |part, time|
# Skip any zero times.
next if time.zero?
# Concatenate the part of the time and the time itself.
str << "#{time} #{time == 1 ? part[0..-2] : part}, "
end
str.chomp(', ').sub(/(.+), (.+)/, '\1 and \2')
end
|
ruby
|
{
"resource": ""
}
|
q6286
|
MMETools.Config.dump
|
train
|
def dump(filename)
File.open(filename,'w') do |f|
YAML.dump(self.to_hash,f)
end
end
|
ruby
|
{
"resource": ""
}
|
q6287
|
ReindeerETL::Sources.MultiSource.each
|
train
|
def each
rows = []
all_keys = Set.new
@sources.each_with_index do |source, source_idx|
first_row = false
source.each do |row|
unless row.keys.include? @key
raise ReindeerETL::Errors::RecordInvalid.new("Path#1 missing key: #{@key}")
end
if source_idx == 0 # first source
rows << row
else
source_targets = @target_cols[source_idx - 1] unless @target_cols.nil?
rindex = rows.index{|r| r[@key] == row[@key] }
if rindex.nil?
if @expect_full_match
raise ReindeerETL::Errors::RecordInvalid.new("Expected full match")
else
next
end
end
if source_targets.nil? or source_targets.empty?
rows[rindex] = rows[rindex].merge(row)
else
source_targets.each_with_index do |tar, sidx|
underscored_tar = h_underscore_string tar
if row.keys.map {|k| k[h_regex, 1] }.include? underscored_tar
k = row.keys.select{|k| k[h_regex, 1] == underscored_tar }.first
hash = h_hash_maker tar, row[k]
rows[rindex].merge!(hash)
else
val = Object
.const_get("ReindeerETL::Mods::#{@namespace}::#{tar}")
.get(row)
rows[rindex].merge!(h_hash_maker(tar, val))
end
end
end
end
end
end
rows.each {|r| yield r}
end
|
ruby
|
{
"resource": ""
}
|
q6288
|
LogCabin.SetCollection.find
|
train
|
def find(name)
cache(name) { @children.find { |x| safe_find(x, name) } || failure }
end
|
ruby
|
{
"resource": ""
}
|
q6289
|
OffTheGrid.User.add
|
train
|
def add
Tempfile.open do |tmpfile|
tmpfile.puts render(Templates::User::ERB)
tmpfile.flush
system("qconf -Auser #{tmpfile.path}")
sleep 5
end
end
|
ruby
|
{
"resource": ""
}
|
q6290
|
Undecided.Decider.decide
|
train
|
def decide(rule, values, strict = true)
rule = rule.clone
values = values.clone
error unless Undecided::Evaluator.valid?(rule, values, strict)
# Sanitize data
# Eval rules and values after process it, with safe data
final_expression = Converter.replacing_variables(rule, values)
eval final_expression
rescue => e
puts e.message
error
end
|
ruby
|
{
"resource": ""
}
|
q6291
|
XMSensu.XMClient.get_default_properties
|
train
|
def get_default_properties(event)
client = event['client']
check = event['check']
{
server_name: client['name'],
server_ip: client['address'],
subscriptions: client['subscriptions'].join(';'),
environment: client['environment'],
check_name: check['name'],
check_command: check['command'],
check_output: check['output'],
timestamp: event['timestamp'].inspect
}
end
|
ruby
|
{
"resource": ""
}
|
q6292
|
Garcon.SecretBag.data_bag_config_for
|
train
|
def data_bag_config_for(environment, source)
data_bag_item = encrypted_data_bag_for(environment, DATA_BAG)
if data_bag_item.has_key?(source)
data_bag_item[source]
elsif DATA_BAG == source
data_bag_item
else
{}
end
end
|
ruby
|
{
"resource": ""
}
|
q6293
|
Garcon.SecretBag.encrypted_data_bag_for
|
train
|
def encrypted_data_bag_for(environment, data_bag)
@encrypted_data_bags = {} unless @encrypted_data_bags
if encrypted_data_bags[data_bag]
return get_from_data_bags_cache(data_bag)
else
data_bag_item = encrypted_data_bag_item(data_bag, environment)
data_bag_item ||= encrypted_data_bag_item(data_bag, WILDCARD)
data_bag_item ||= {}
@encrypted_data_bags[data_bag] = data_bag_item
return data_bag_item
end
end
|
ruby
|
{
"resource": ""
}
|
q6294
|
AttachmentMagic.ClassMethods.copy_to_temp_file
|
train
|
def copy_to_temp_file(file, temp_base_name)
Tempfile.new(temp_base_name, AttachmentMagic.tempfile_path).tap do |tmp|
tmp.close
FileUtils.cp file, tmp.path
end
end
|
ruby
|
{
"resource": ""
}
|
q6295
|
AttachmentMagic.ClassMethods.write_to_temp_file
|
train
|
def write_to_temp_file(data, temp_base_name)
Tempfile.new(temp_base_name, AttachmentMagic.tempfile_path).tap do |tmp|
tmp.binmode
tmp.write data
tmp.close
end
end
|
ruby
|
{
"resource": ""
}
|
q6296
|
AttachmentMagic.InstanceMethods.uploaded_data=
|
train
|
def uploaded_data=(file_data)
if file_data.respond_to?(:content_type)
return nil if file_data.size == 0
self.content_type = detect_mimetype(file_data)
self.filename = file_data.original_filename if respond_to?(:filename)
else
return nil if file_data.blank? || file_data['size'] == 0
self.content_type = file_data['content_type']
self.filename = file_data['filename']
file_data = file_data['tempfile']
end
if file_data.is_a?(StringIO)
file_data.rewind
set_temp_data file_data.read
else
self.temp_paths.unshift file_data.tempfile.path
end
end
|
ruby
|
{
"resource": ""
}
|
q6297
|
AttachmentMagic.InstanceMethods.attachment_attributes_valid?
|
train
|
def attachment_attributes_valid?
[:size, :content_type].each do |attr_name|
enum = attachment_options[attr_name]
errors.add attr_name, I18n.translate("activerecord.errors.messages.inclusion", attr_name => enum) unless enum.nil? || enum.include?(send(attr_name))
end
end
|
ruby
|
{
"resource": ""
}
|
q6298
|
ActionCommand.PrettyPrintLogAction.execute_internal
|
train
|
def execute_internal(_result)
item = LogMessage.new
parser = LogParser.new(@source, @sequence)
sequences = {}
# keep track of sequences, and when you complete one, then print out the
# entire thing at once.
while parser.next(item)
if item.kind?(ActionCommand::LOG_KIND_COMMAND_OUTPUT) && item.root?
process_output(sequences, item)
else
process_other(sequences, item)
end
item = LogMessage.new
end
# print out any incomplete sequences
print_sequences(sequences)
end
|
ruby
|
{
"resource": ""
}
|
q6299
|
Mutter.Mutterer.load
|
train
|
def load styles
styles += '.yml' unless styles =~ /\.ya?ml$/
styles = File.join(File.dirname(__FILE__), "styles", styles) unless File.exist? styles
YAML.load_file(styles).inject({}) do |h, (key, value)|
value = { :match => value['match'], :style => value['style'] }
h.merge key.to_sym => value
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.