_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q16000 | Procodile.Instance.pid_from_file | train | def pid_from_file
if File.exist?(pid_file_path)
pid = File.read(pid_file_path)
pid.length > 0 ? pid.strip.to_i : nil
else
nil
end
end | ruby | {
"resource": ""
} |
q16001 | Procodile.Instance.start | train | def start
if stopping?
Procodile.log(@process.log_color, description, "Process is stopped/stopping therefore cannot be started again.")
return false
end
update_pid
if running?
Procodile.log(@process.log_color, description, "Already running with PID #{@pid}")
nil
else
if @supervisor.run_options[:port_allocations] && chosen_port = @supervisor.run_options[:port_allocations][@process.name]
if chosen_port == 0
allocate_port
else
@port = chosen_port
Procodile.log(@process.log_color, description, "Assigned #{chosen_port} to process")
end
elsif @process.proxy? && @supervisor.tcp_proxy
# Allocate a port randomly if a proxy is needed
allocate_port
elsif @process.allocate_port_from && @process.restart_mode != 'start-term'
# Allocate ports to this process sequentially from the starting port
allocated_ports = (@supervisor.processes[@process] ? @supervisor.processes[@process].select(&:running?) : []).map(&:port)
proposed_port = @process.allocate_port_from
until @port
unless allocated_ports.include?(proposed_port)
@port = proposed_port
end
proposed_port += 1
end
end
if self.process.log_path && @supervisor.run_options[:force_single_log] != true
log_destination = File.open(self.process.log_path, 'a')
io = nil
else
reader, writer = IO.pipe
log_destination = writer
io = reader
end
@tag = @supervisor.tag.dup if @supervisor.tag
Dir.chdir(@process.config.root)
without_rbenv do
@pid = ::Process.spawn(environment_variables, @process.command, :out => log_destination, :err => log_destination, :pgroup => true)
end
log_destination.close
File.open(pid_file_path, 'w') { |f| f.write(@pid.to_s + "\n") }
@supervisor.add_instance(self, io)
::Process.detach(@pid)
Procodile.log(@process.log_color, description, "Started with PID #{@pid}" + (@tag ? " (tagged with #{@tag})" : ''))
if self.process.log_path && io.nil?
Procodile.log(@process.log_color, description, "Logging to #{self.process.log_path}")
end
@started_at = Time.now
end
end | ruby | {
"resource": ""
} |
q16002 | Procodile.Instance.stop | train | def stop
@stopping = Time.now
update_pid
if self.running?
Procodile.log(@process.log_color, description, "Sending #{@process.term_signal} to #{@pid}")
::Process.kill(@process.term_signal, pid)
else
Procodile.log(@process.log_color, description, "Process already stopped")
end
end | ruby | {
"resource": ""
} |
q16003 | Procodile.Instance.restart | train | def restart
Procodile.log(@process.log_color, description, "Restarting using #{@process.restart_mode} mode")
update_pid
case @process.restart_mode
when 'usr1', 'usr2'
if running?
::Process.kill(@process.restart_mode.upcase, @pid)
@tag = @supervisor.tag if @supervisor.tag
Procodile.log(@process.log_color, description, "Sent #{@process.restart_mode.upcase} signal to process #{@pid}")
else
Procodile.log(@process.log_color, description, "Process not running already. Starting it.")
on_stop
new_instance = @process.create_instance(@supervisor)
new_instance.port = self.port
new_instance.start
end
self
when 'start-term'
new_instance = @process.create_instance(@supervisor)
new_instance.start
stop
new_instance
when 'term-start'
stop
new_instance = @process.create_instance(@supervisor)
new_instance.port = self.port
Thread.new do
sleep 0.5 while running?
new_instance.start
end
new_instance
end
end | ruby | {
"resource": ""
} |
q16004 | Procodile.Instance.update_pid | train | def update_pid
pid_from_file = self.pid_from_file
if pid_from_file && pid_from_file != @pid
@pid = pid_from_file
@started_at = File.mtime(self.pid_file_path)
Procodile.log(@process.log_color, description, "PID file changed. Updated pid to #{@pid}")
true
else
false
end
end | ruby | {
"resource": ""
} |
q16005 | Procodile.Instance.check | train | def check(options = {})
return if failed?
if self.running?
# Everything is OK. The process is running.
true
else
# If the process isn't running any more, update the PID in our memory from
# the file in case the process has changed itself.
return check if update_pid
if @supervisor.allow_respawning?
if can_respawn?
Procodile.log(@process.log_color, description, "Process has stopped. Respawning...")
start
add_respawn
elsif respawns >= @process.max_respawns
Procodile.log(@process.log_color, description, "\e[41;37mWarning:\e[0m\e[31m this process has been respawned #{respawns} times and keeps dying.\e[0m")
Procodile.log(@process.log_color, description, "It will not be respawned automatically any longer and will no longer be managed.".color(31))
@failed = Time.now
tidy
end
else
Procodile.log(@process.log_color, description, "Process has stopped. Respawning not available.")
@failed = Time.now
tidy
end
end
end | ruby | {
"resource": ""
} |
q16006 | Procodile.Instance.allocate_port | train | def allocate_port(max_attempts = 10)
attempts = 0
until @port
attempts += 1
possible_port = rand(10000) + 20000
if self.port_available?(possible_port)
Procodile.log(@process.log_color, description, "Allocated port as #{possible_port}")
return @port = possible_port
elsif attempts >= max_attempts
raise Procodile::Error, "Couldn't allocate port for #{instance.name}"
end
end
end | ruby | {
"resource": ""
} |
q16007 | Procodile.Instance.port_available? | train | def port_available?(port)
case @process.network_protocol
when 'tcp'
server = TCPServer.new('127.0.0.1', port)
server.close
true
when 'udp'
server = UDPSocket.new
server.bind('127.0.0.1', port)
server.close
true
else
raise Procodile::Error, "Invalid network_protocol '#{@process.network_protocol}'"
end
rescue Errno::EADDRINUSE => e
false
end | ruby | {
"resource": ""
} |
q16008 | Procodile.Instance.without_rbenv | train | def without_rbenv(&block)
previous_environment = ENV.select { |k,v| k =~ /\A(RBENV\_)/ }
if previous_environment.size > 0
previous_environment.each { |key, value| ENV[key] = nil }
previous_environment['PATH'] = ENV['PATH']
ENV['PATH'] = ENV['PATH'].split(':').select { |p| !(p =~ /\.rbenv\/versions/) }.join(':')
end
yield
ensure
previous_environment.each do |key, value|
ENV[key] = value
end
end | ruby | {
"resource": ""
} |
q16009 | Procodile.Process.environment_variables | train | def environment_variables
global_variables = @config.environment_variables
process_vars = @config.process_options[@name] ? @config.process_options[@name]['env'] || {} : {}
process_local_vars = @config.local_process_options[@name] ? @config.local_process_options[@name]['env'] || {} : {}
global_variables.merge(process_vars.merge(process_local_vars)).each_with_object({}) do |(key, value), hash|
hash[key.to_s] = value.to_s
end
end | ruby | {
"resource": ""
} |
q16010 | Procodile.Process.to_hash | train | def to_hash
{
:name => self.name,
:log_color => self.log_color,
:quantity => self.quantity,
:max_respawns => self.max_respawns,
:respawn_window => self.respawn_window,
:command => self.command,
:restart_mode => self.restart_mode,
:log_path => self.log_path,
:removed => self.removed ? true : false,
:proxy_port => proxy_port,
:proxy_address => proxy_address
}
end | ruby | {
"resource": ""
} |
q16011 | Procodile.AppDetermination.app_options | train | def app_options
if ambiguous?
hash = {}
@global_options.each_with_index do |option, i|
hash[i] = option['name'] || option['root']
end
hash
else
{}
end
end | ruby | {
"resource": ""
} |
q16012 | ROM.Pipeline.map_with | train | def map_with(*names)
[self, *names.map { |name| mappers[name] }]
.reduce { |a, e| composite_class.new(a, e) }
end | ruby | {
"resource": ""
} |
q16013 | ROM.PluginRegistry.plugins_for | train | def plugins_for(type, adapter)
case type
when :configuration then configuration
when :command then commands.adapter(adapter)
when :mapper then mappers.adapter(adapter)
when :relation then relations.adapter(adapter)
when :schema then schemas.adapter(adapter)
end
end | ruby | {
"resource": ""
} |
q16014 | ROM.PluginRegistryBase.register | train | def register(name, mod, options)
elements[name] = plugin_type.new(mod, options)
end | ruby | {
"resource": ""
} |
q16015 | ROM.PluginRegistryBase.plugin_name | train | def plugin_name(plugin)
tuple = elements.find { |(_, p)| p.equal?(plugin) }
tuple[0] if tuple
end | ruby | {
"resource": ""
} |
q16016 | ROM.InternalPluginRegistry.fetch | train | def fetch(name, adapter_name = :default)
adapter(adapter_name)[name] || adapter(:default)[name] ||
raise(UnknownPluginError, name)
end | ruby | {
"resource": ""
} |
q16017 | ROM.AutoRegistration.load_entities | train | def load_entities(entity)
Dir[globs[entity]].map do |file|
require file
klass_name =
case namespace
when String
AutoRegistrationStrategies::CustomNamespace.new(
namespace: namespace, file: file, directory: directory
).call
when TrueClass
AutoRegistrationStrategies::WithNamespace.new(
file: file, directory: directory
).call
when FalseClass
AutoRegistrationStrategies::NoNamespace.new(
file: file, directory: directory, entity: component_dirs.fetch(entity)
).call
end
Inflector.constantize(klass_name)
end
end | ruby | {
"resource": ""
} |
q16018 | ROM.Finalize.run! | train | def run!
mappers = load_mappers
relations = load_relations(mappers)
commands = load_commands(relations)
container = Container.new(gateways, relations, mappers, commands)
container.freeze
container
end | ruby | {
"resource": ""
} |
q16019 | ROM.Finalize.load_relations | train | def load_relations(mappers)
global_plugins = plugins.select { |p| p.relation? || p.schema? }
FinalizeRelations.new(
gateways,
relation_classes,
mappers: mappers,
plugins: global_plugins,
notifications: notifications
).run!
end | ruby | {
"resource": ""
} |
q16020 | ROM.Changeset.new | train | def new(relation, new_options = EMPTY_HASH)
self.class.new(relation, new_options.empty? ? options : options.merge(new_options))
end | ruby | {
"resource": ""
} |
q16021 | ROM.Notifications.register_event | train | def register_event(id, info = EMPTY_HASH)
Notifications.events[id] = Event.new(id, info)
end | ruby | {
"resource": ""
} |
q16022 | ROM.Plugin.apply_to | train | def apply_to(klass, options = EMPTY_HASH)
if mod.respond_to?(:new)
klass.send(:include, mod.new(options))
else
klass.send(:include, mod)
end
end | ruby | {
"resource": ""
} |
q16023 | ROM.Schema.project | train | def project(*names)
new(names.map { |name| name.is_a?(Symbol) ? self[name] : name })
end | ruby | {
"resource": ""
} |
q16024 | ROM.Schema.rename | train | def rename(mapping)
new_attributes = map do |attr|
alias_name = mapping[attr.name]
alias_name ? attr.aliased(alias_name) : attr
end
new(new_attributes)
end | ruby | {
"resource": ""
} |
q16025 | ROM.Schema.wrap | train | def wrap(prefix = name.dataset)
new(map { |attr| attr.wrapped? ? attr : attr.wrapped(prefix) })
end | ruby | {
"resource": ""
} |
q16026 | ROM.Schema.uniq | train | def uniq(&block)
if block
new(attributes.uniq(&block))
else
new(attributes.uniq(&:name))
end
end | ruby | {
"resource": ""
} |
q16027 | ROM.Schema.finalize_attributes! | train | def finalize_attributes!(gateway: nil, relations: nil)
inferrer.(self, gateway).each { |key, value| set!(key, value) }
yield if block_given?
initialize_primary_key_names
self
end | ruby | {
"resource": ""
} |
q16028 | ROM.Schema.to_output_hash | train | def to_output_hash
HASH_SCHEMA.schema(
map { |attr| [attr.key, attr.to_read_type] }.to_h
)
end | ruby | {
"resource": ""
} |
q16029 | ROM.Schema.to_input_hash | train | def to_input_hash
HASH_SCHEMA.schema(
map { |attr| [attr.name, attr.to_write_type] }.to_h
)
end | ruby | {
"resource": ""
} |
q16030 | ROM.AutoCurry.auto_curry | train | def auto_curry(name, &block)
arity = instance_method(name).arity
return unless public_instance_methods.include?(name) && arity != 0
mod = Module.new
mod.module_eval do
define_method(name) do |*args, &mblock|
response =
if arity < 0 || arity == args.size
super(*args, &mblock)
else
self.class.curried.new(self, view: name, curry_args: args, arity: arity)
end
if block
response.instance_exec(&block)
else
response
end
end
end
auto_curried_methods << name
prepend(mod)
end | ruby | {
"resource": ""
} |
q16031 | ROM.CommandRegistry.[] | train | def [](*args)
if args.size.equal?(1)
command = super
mapper = options[:mapper]
if mapper
command.curry >> mapper
else
command
end
else
cache.fetch_or_store(args.hash) { compiler.(*args) }
end
end | ruby | {
"resource": ""
} |
q16032 | ROM.ConfigurationDSL.relation | train | def relation(name, options = EMPTY_HASH, &block)
klass_opts = { adapter: default_adapter }.merge(options)
klass = Relation.build_class(name, klass_opts)
klass.schema_opts(dataset: name, relation: name)
klass.class_eval(&block) if block
register_relation(klass)
klass
end | ruby | {
"resource": ""
} |
q16033 | ROM.ConfigurationDSL.commands | train | def commands(name, &block)
register_command(*CommandDSL.new(name, default_adapter, &block).command_classes)
end | ruby | {
"resource": ""
} |
q16034 | ROM.ConfigurationDSL.plugin | train | def plugin(adapter, spec, &block)
type, name = spec.flatten(1)
plugin = plugin_registry.send(type).adapter(adapter).fetch(name) { plugin_registry.send(type).fetch(name) }
if block
register_plugin(plugin.configure(&block))
else
register_plugin(plugin)
end
end | ruby | {
"resource": ""
} |
q16035 | ROM.Command.call | train | def call(*args, &block)
tuples =
if hooks?
prepared =
if curried?
apply_hooks(before_hooks, *(curry_args + args))
else
apply_hooks(before_hooks, *args)
end
result = prepared ? execute(prepared, &block) : execute(&block)
if curried?
if args.size > 0
apply_hooks(after_hooks, result, *args)
elsif curry_args.size > 1
apply_hooks(after_hooks, result, curry_args[1])
else
apply_hooks(after_hooks, result)
end
else
apply_hooks(after_hooks, result, *args[1..args.size-1])
end
else
execute(*(curry_args + args), &block)
end
if one?
tuples.first
else
tuples
end
end | ruby | {
"resource": ""
} |
q16036 | ROM.Command.curry | train | def curry(*args)
if curry_args.empty? && args.first.is_a?(Graph::InputEvaluator)
Lazy[self].new(self, *args)
else
self.class.build(relation, { **options, curry_args: args })
end
end | ruby | {
"resource": ""
} |
q16037 | ROM.Command.map_input_tuples | train | def map_input_tuples(tuples, &mapper)
return enum_for(:with_input_tuples, tuples) unless mapper
if tuples.respond_to? :merge
mapper[tuples]
else
tuples.map(&mapper)
end
end | ruby | {
"resource": ""
} |
q16038 | ROM.Command.apply_hooks | train | def apply_hooks(hooks, tuples, *args)
hooks.reduce(tuples) do |a, e|
if e.is_a?(Hash)
hook_meth, hook_args = e.to_a.flatten(1)
__send__(hook_meth, a, *args, **hook_args)
else
__send__(e, a, *args)
end
end
end | ruby | {
"resource": ""
} |
q16039 | ROM.Command.wrap_dataset | train | def wrap_dataset(dataset)
if relation.is_a?(Relation::Composite)
relation.new(dataset).to_a
else
dataset
end
end | ruby | {
"resource": ""
} |
q16040 | ROM.Mapper.call | train | def call(relation)
transformers.reduce(relation.to_a) { |a, e| e.call(a) }
end | ruby | {
"resource": ""
} |
q16041 | ROM.CommandCompiler.call | train | def call(*args)
cache.fetch_or_store(args.hash) do
type, adapter, ast, plugins, plugins_options, meta = args
compiler = with(
id: type,
adapter: adapter,
plugins: Array(plugins),
plugins_options: plugins_options,
meta: meta
)
graph_opts = compiler.visit(ast)
command = ROM::Commands::Graph.build(registry, graph_opts)
if command.graph?
CommandProxy.new(command)
elsif command.lazy?
command.unwrap
else
command
end
end
end | ruby | {
"resource": ""
} |
q16042 | ROM.CommandCompiler.register_command | train | def register_command(rel_name, type, meta, parent_relation = nil)
relation = relations[rel_name]
type.create_class(rel_name, type) do |klass|
klass.result(meta.fetch(:combine_type, result))
if meta[:combine_type]
setup_associates(klass, relation, meta, parent_relation)
end
plugins.each do |plugin|
plugin_options = plugins_options.fetch(plugin) { EMPTY_HASH }
klass.use(plugin, plugin_options)
end
gateway = gateways[relation.gateway]
notifications.trigger(
'configuration.commands.class.before_build',
command: klass, gateway: gateway, dataset: relation.dataset, adapter: adapter
)
klass.extend_for_relation(relation) if klass.restrictable
registry[rel_name][type] = klass.build(relation, input: relation.input_schema)
end
end | ruby | {
"resource": ""
} |
q16043 | ROM.CommandCompiler.setup_associates | train | def setup_associates(klass, relation, meta, parent_relation)
assoc_name =
if relation.associations.key?(parent_relation)
parent_relation
else
singular_name = Inflector.singularize(parent_relation).to_sym
singular_name if relation.associations.key?(singular_name)
end
if assoc_name
klass.associates(assoc_name)
else
klass.associates(parent_relation)
end
end | ruby | {
"resource": ""
} |
q16044 | ROM.Environment.normalize_gateways | train | def normalize_gateways(gateways_config)
gateways_config.each_with_object(map: {}, gateways: {}) do |(name, spec), hash|
identifier, *args = Array(spec)
if identifier.is_a?(Gateway)
gateway = identifier
else
gateway = Gateway.setup(identifier, *args.flatten)
end
hash[:map][gateway] = name
hash[:gateways][name] = gateway
end
end | ruby | {
"resource": ""
} |
q16045 | ROM.Attribute.meta | train | def meta(opts = nil)
if opts
self.class.new(type.meta(opts), options)
else
type.meta
end
end | ruby | {
"resource": ""
} |
q16046 | ROM.Attribute.inspect | train | def inspect
opts = options.reject { |k| %i[type name].include?(k) }
meta_and_opts = meta.merge(opts).map { |k, v| "#{k}=#{v.inspect}" }
%(#<#{self.class}[#{type.name}] name=#{name.inspect} #{meta_and_opts.join(' ')}>)
end | ruby | {
"resource": ""
} |
q16047 | ROM.Attribute.optional | train | def optional
sum = self.class.new(super, options)
read? ? sum.meta(read: meta[:read].optional) : sum
end | ruby | {
"resource": ""
} |
q16048 | ROM.Relation.each | train | def each
return to_enum unless block_given?
if auto_map?
mapper.(dataset.map { |tuple| output_schema[tuple] }).each { |struct| yield(struct) }
else
dataset.each { |tuple| yield(output_schema[tuple]) }
end
end | ruby | {
"resource": ""
} |
q16049 | ROM.Relation.node | train | def node(name)
assoc = associations[name]
other = assoc.node
other.eager_load(assoc)
end | ruby | {
"resource": ""
} |
q16050 | ROM.Relation.eager_load | train | def eager_load(assoc)
relation = assoc.prepare(self)
if assoc.override?
relation.(assoc)
else
relation.preload_assoc(assoc)
end
end | ruby | {
"resource": ""
} |
q16051 | ROM.Relation.new | train | def new(dataset, new_opts = EMPTY_HASH)
opts =
if new_opts.empty?
options
elsif new_opts.key?(:schema)
options.merge(new_opts).reject { |k, _| k == :input_schema || k == :output_schema }
else
options.merge(new_opts)
end
self.class.new(dataset, opts)
end | ruby | {
"resource": ""
} |
q16052 | ROM.Relation.with | train | def with(opts)
new_options =
if opts.key?(:meta)
opts.merge(meta: meta.merge(opts[:meta]))
else
opts
end
new(dataset, options.merge(new_options))
end | ruby | {
"resource": ""
} |
q16053 | ROM.Relation.map_to | train | def map_to(klass, **opts)
with(opts.merge(auto_map: false, auto_struct: true, meta: { model: klass }))
end | ruby | {
"resource": ""
} |
q16054 | ROM.SchemaPlugin.apply_to | train | def apply_to(schema, options = EMPTY_HASH)
mod.apply(schema, options) if mod.respond_to?(:apply)
end | ruby | {
"resource": ""
} |
q16055 | Vips.Operation.get_construct_args | train | def get_construct_args
args = []
argument_map do |pspec, argument_class, argument_instance|
flags = argument_class[:flags]
if (flags & ARGUMENT_CONSTRUCT) != 0
# names can include - as punctuation, but we always use _ in
# Ruby
name = pspec[:name].tr("-", "_")
args << [name, flags]
end
end
return args
end | ruby | {
"resource": ""
} |
q16056 | Vips.Operation.set | train | def set name, value, match_image = nil, flags = 0
gtype = get_typeof name
if gtype == IMAGE_TYPE
value = Operation::imageize match_image, value
if (flags & ARGUMENT_MODIFY) != 0
# make sure we have a unique copy
value = value.copy.copy_memory
end
elsif gtype == ARRAY_IMAGE_TYPE
value = value.map {|x| Operation::imageize match_image, x}
end
super name, value
end | ruby | {
"resource": ""
} |
q16057 | Vips.Image.call_enum | train | def call_enum(name, other, enum)
if other.is_a?(Vips::Image)
Vips::Operation.call name.to_s, [self, other, enum]
else
Vips::Operation.call name.to_s + "_const", [self, enum, other]
end
end | ruby | {
"resource": ""
} |
q16058 | Vips.Image.new_from_image | train | def new_from_image value
pixel = (Vips::Image.black(1, 1) + value).cast(format)
image = pixel.embed 0, 0, width, height, extend: :copy
image.copy interpretation: interpretation,
xres: xres, yres: yres, xoffset: xoffset, yoffset: yoffset
end | ruby | {
"resource": ""
} |
q16059 | Vips.Image.write_to_memory | train | def write_to_memory
len = Vips::SizeStruct.new
ptr = Vips::vips_image_write_to_memory self, len
# wrap up as an autopointer
ptr = FFI::AutoPointer.new(ptr, GLib::G_FREE)
ptr.get_bytes 0, len[:value]
end | ruby | {
"resource": ""
} |
q16060 | Vips.Image.get_typeof | train | def get_typeof name
# on libvips before 8.5, property types must be searched first,
# since vips_image_get_typeof returned built-in enums as int
unless Vips::at_least_libvips?(8, 5)
gtype = parent_get_typeof name
return gtype if gtype != 0
end
Vips::vips_image_get_typeof self, name
end | ruby | {
"resource": ""
} |
q16061 | Vips.Image.get | train | def get name
# with old libvips, we must fetch properties (as opposed to
# metadata) via VipsObject
unless Vips::at_least_libvips?(8, 5)
return super if parent_get_typeof(name) != 0
end
gvalue = GObject::GValue.alloc
result = Vips::vips_image_get self, name, gvalue
raise Vips::Error if result != 0
gvalue.get
end | ruby | {
"resource": ""
} |
q16062 | Vips.Image.get_fields | train | def get_fields
# vips_image_get_fields() was added in libvips 8.5
return [] unless Vips.respond_to? :vips_image_get_fields
array = Vips::vips_image_get_fields self
names = []
p = array
until (q = p.read_pointer).null?
names << q.read_string
GLib::g_free q
p += FFI::Type::POINTER.size
end
GLib::g_free array
names
end | ruby | {
"resource": ""
} |
q16063 | Vips.Image.set_type | train | def set_type gtype, name, value
gvalue = GObject::GValue.alloc
gvalue.init gtype
gvalue.set value
Vips::vips_image_set self, name, gvalue
end | ruby | {
"resource": ""
} |
q16064 | Vips.Image.draw_point | train | def draw_point ink, left, top, opts = {}
draw_rect ink, left, top, 1, 1, opts
end | ruby | {
"resource": ""
} |
q16065 | Vips.Image.- | train | def - other
other.is_a?(Vips::Image) ?
subtract(other) : linear(1, Image::smap(other) {|x| x * -1})
end | ruby | {
"resource": ""
} |
q16066 | Vips.Image./ | train | def / other
other.is_a?(Vips::Image) ?
divide(other) : linear(Image::smap(other) {|x| 1.0 / x}, 0)
end | ruby | {
"resource": ""
} |
q16067 | Vips.Image.[] | train | def [] index
if index.is_a? Range
n = index.size
extract_band index.begin, n: n
elsif index.is_a? Numeric
extract_band index
else
raise Vips::Error, "[] index is not range or numeric."
end
end | ruby | {
"resource": ""
} |
q16068 | Vips.Image.to_a | train | def to_a
# we render the image to a big string, then unpack
# as a Ruby array of the correct type
memory = write_to_memory
# make the template for unpack
template = {
:char => 'c',
:uchar => 'C',
:short => 's_',
:ushort => 'S_',
:int => 'i_',
:uint => 'I_',
:float => 'f',
:double => 'd',
:complex => 'f',
:dpcomplex => 'd'
}[format] + '*'
# and unpack into something like [1, 2, 3, 4 ..]
array = memory.unpack(template)
# gather band elements together
pixel_array = array.each_slice(bands).to_a
# build rows
row_array = pixel_array.each_slice(width).to_a
return row_array
end | ruby | {
"resource": ""
} |
q16069 | Vips.Image.bandjoin | train | def bandjoin other
unless other.is_a? Array
other = [other]
end
# if other is just Numeric, we can use bandjoin_const
not_all_real = !other.all?{|x| x.is_a? Numeric}
if not_all_real
Vips::Image.bandjoin([self] + other)
else
bandjoin_const other
end
end | ruby | {
"resource": ""
} |
q16070 | Vips.Image.composite | train | def composite overlay, mode, opts = {}
unless overlay.is_a? Array
overlay = [overlay]
end
unless mode.is_a? Array
mode = [mode]
end
mode = mode.map do |x|
GObject::GValue.from_nick Vips::BLEND_MODE_TYPE, x
end
Vips::Image.composite([self] + overlay, mode, opts)
end | ruby | {
"resource": ""
} |
q16071 | Vips.Image.maxpos | train | def maxpos
v, opts = max x: true, y: true
x = opts['x']
y = opts['y']
return v, x, y
end | ruby | {
"resource": ""
} |
q16072 | Vips.Image.minpos | train | def minpos
v, opts = min x: true, y: true
x = opts['x']
y = opts['y']
return v, x, y
end | ruby | {
"resource": ""
} |
q16073 | Vips.Object.get_pspec | train | def get_pspec name
pspec = GObject::GParamSpecPtr.new
argument_class = Vips::ArgumentClassPtr.new
argument_instance = Vips::ArgumentInstancePtr.new
result = Vips::vips_object_get_argument self, name,
pspec, argument_class, argument_instance
return nil if result != 0
pspec
end | ruby | {
"resource": ""
} |
q16074 | Vips.Object.get_typeof_error | train | def get_typeof_error name
pspec = get_pspec name
raise Vips::Error unless pspec
pspec[:value][:value_type]
end | ruby | {
"resource": ""
} |
q16075 | GObject.GValue.set | train | def set value
# GLib::logger.debug("GObject::GValue.set") {
# "value = #{value.inspect[0..50]}"
# }
gtype = self[:gtype]
fundamental = ::GObject::g_type_fundamental gtype
case gtype
when GBOOL_TYPE
::GObject::g_value_set_boolean self, (value ? 1 : 0)
when GINT_TYPE
::GObject::g_value_set_int self, value
when GUINT64_TYPE
::GObject::g_value_set_uint64 self, value
when GDOUBLE_TYPE
::GObject::g_value_set_double self, value
when GSTR_TYPE
::GObject::g_value_set_string self, value
when Vips::REFSTR_TYPE
::Vips::vips_value_set_ref_string self, value
when Vips::ARRAY_INT_TYPE
value = [value] unless value.is_a? Array
Vips::vips_value_set_array_int self, nil, value.length
ptr = Vips::vips_value_get_array_int self, nil
ptr.write_array_of_int32 value
when Vips::ARRAY_DOUBLE_TYPE
value = [value] unless value.is_a? Array
# this will allocate an array in the gvalue
Vips::vips_value_set_array_double self, nil, value.length
# pull the array out and fill it
ptr = Vips::vips_value_get_array_double self, nil
ptr.write_array_of_double value
when Vips::ARRAY_IMAGE_TYPE
value = [value] unless value.is_a? Array
Vips::vips_value_set_array_image self, value.length
ptr = Vips::vips_value_get_array_image self, nil
ptr.write_array_of_pointer value
# the gvalue needs a ref on each of the images
value.each {|image| ::GObject::g_object_ref image}
when Vips::BLOB_TYPE
len = value.bytesize
ptr = GLib::g_malloc len
Vips::vips_value_set_blob self, GLib::G_FREE, ptr, len
ptr.write_bytes value
else
case fundamental
when GFLAGS_TYPE
::GObject::g_value_set_flags self, value
when GENUM_TYPE
enum_value = GValue.from_nick(self[:gtype], value)
::GObject::g_value_set_enum self, enum_value
when GOBJECT_TYPE
::GObject::g_value_set_object self, value
else
raise Vips::Error, "unimplemented gtype for set: " +
"#{::GObject::g_type_name gtype} (#{gtype})"
end
end
end | ruby | {
"resource": ""
} |
q16076 | GObject.GValue.get | train | def get
gtype = self[:gtype]
fundamental = ::GObject::g_type_fundamental gtype
result = nil
case gtype
when GBOOL_TYPE
result = ::GObject::g_value_get_boolean(self) != 0 ? true : false
when GINT_TYPE
result = ::GObject::g_value_get_int self
when GUINT64_TYPE
result = ::GObject::g_value_get_uint64 self
when GDOUBLE_TYPE
result = ::GObject::g_value_get_double self
when GSTR_TYPE
result = ::GObject::g_value_get_string self
when Vips::REFSTR_TYPE
len = Vips::SizeStruct.new
result = ::Vips::vips_value_get_ref_string self, len
when Vips::ARRAY_INT_TYPE
len = Vips::IntStruct.new
array = Vips::vips_value_get_array_int self, len
result = array.get_array_of_int32 0, len[:value]
when Vips::ARRAY_DOUBLE_TYPE
len = Vips::IntStruct.new
array = Vips::vips_value_get_array_double self, len
result = array.get_array_of_double 0, len[:value]
when Vips::ARRAY_IMAGE_TYPE
len = Vips::IntStruct.new
array = Vips::vips_value_get_array_image self, len
result = array.get_array_of_pointer 0, len[:value]
result.map! do |pointer|
::GObject::g_object_ref pointer
Vips::Image.new pointer
end
when Vips::BLOB_TYPE
len = Vips::SizeStruct.new
array = Vips::vips_value_get_blob self, len
result = array.get_bytes 0, len[:value]
else
case fundamental
when GFLAGS_TYPE
result = ::GObject::g_value_get_flags self
when GENUM_TYPE
enum_value = ::GObject::g_value_get_enum(self)
result = GValue.to_nick self[:gtype], enum_value
when GOBJECT_TYPE
obj = ::GObject::g_value_get_object self
# g_value_get_object() does not add a ref ... we need to add
# one to match the unref in gobject release
::GObject::g_object_ref obj
result = Vips::Image.new obj
else
raise Vips::Error, "unimplemented gtype for get: " +
"#{::GObject::g_type_name gtype} (#{gtype})"
end
end
# GLib::logger.debug("GObject::GValue.get") {
# "result = #{result.inspect[0..50]}"
# }
return result
end | ruby | {
"resource": ""
} |
q16077 | CSVImporter.Runner.call | train | def call
if rows.empty?
report.done!
return report
end
report.in_progress!
persist_rows!
report.done!
report
rescue ImportAborted
report.aborted!
report
end | ruby | {
"resource": ""
} |
q16078 | CSVImporter.CSVReader.sanitize_cells | train | def sanitize_cells(rows)
rows.map do |cells|
cells.map do |cell|
cell ? cell.strip : ""
end
end
end | ruby | {
"resource": ""
} |
q16079 | CSVImporter.Row.model | train | def model
@model ||= begin
model = find_or_build_model
set_attributes(model)
after_build_blocks.each { |block| instance_exec(model, &block) }
model
end
end | ruby | {
"resource": ""
} |
q16080 | CSVImporter.Row.set_attribute | train | def set_attribute(model, column, csv_value)
column_definition = column.definition
if column_definition.to && column_definition.to.is_a?(Proc)
to_proc = column_definition.to
case to_proc.arity
when 1 # to: ->(email) { email.downcase }
model.public_send("#{column_definition.name}=", to_proc.call(csv_value))
when 2 # to: ->(published, post) { post.published_at = Time.now if published == "true" }
to_proc.call(csv_value, model)
when 3 # to: ->(field_value, post, column) { post.hash_field[column.name] = field_value }
to_proc.call(csv_value, model, column)
else
raise ArgumentError, "`to` proc can only have 1, 2 or 3 arguments"
end
else
attribute = column_definition.attribute
model.public_send("#{attribute}=", csv_value)
end
model
end | ruby | {
"resource": ""
} |
q16081 | CSVImporter.Row.errors | train | def errors
Hash[
model.errors.map do |attribute, errors|
if column_name = header.column_name_for_model_attribute(attribute)
[column_name, errors]
else
[attribute, errors]
end
end
]
end | ruby | {
"resource": ""
} |
q16082 | CSVImporter.ColumnDefinition.match? | train | def match?(column_name, search_query=(as || name))
return false if column_name.nil?
downcased_column_name = column_name.downcase
underscored_column_name = downcased_column_name.gsub(/\s+/, '_')
case search_query
when Symbol
underscored_column_name == search_query.to_s
when String
downcased_column_name == search_query.downcase
when Regexp
column_name =~ search_query
when Array
search_query.any? { |query| match?(column_name, query) }
else
raise Error, "Invalid `as`. Should be a Symbol, String, Regexp or Array - was #{as.inspect}"
end
end | ruby | {
"resource": ""
} |
q16083 | TTY.Templater.generate | train | def generate(template_options, color_option)
templates.each do |src, dst|
source = @source_path.join(src)
destination = @target_path.join(dst).to_s
next unless ::File.exist?(source)
within_root_path do
TTY::File.copy_file(source, destination,
{ context: template_options }.merge(color_option))
end
end
end | ruby | {
"resource": ""
} |
q16084 | TTY.Plugins.load_from | train | def load_from(gemspec_path, pattern)
Gem.refresh
spec = Gem::Specification.load(gemspec_path)
dependencies = spec.runtime_dependencies.concat(spec.development_dependencies)
dependencies.each do |gem|
gem_name = gem.name[pattern]
next if gem_name.to_s.empty?
register(gem_name, Plugin.new(gem_name, gem))
end
self
end | ruby | {
"resource": ""
} |
q16085 | TTY.Plugins.names | train | def names
plugins.reduce({}) do |hash, plugin|
hash[plugin.name] = plugin
hash
end
end | ruby | {
"resource": ""
} |
q16086 | TTY.PathHelpers.relative_path_from | train | def relative_path_from(root_path, path)
project_path = Pathname.new(path)
return project_path if project_path.relative?
project_path.relative_path_from(root_path)
end | ruby | {
"resource": ""
} |
q16087 | Databasedotcom.Client.authenticate | train | def authenticate(options = nil)
if user_and_pass?(options)
req = https_request(self.host)
user = self.username || options[:username]
pass = self.password || options[:password]
path = encode_path_with_params('/services/oauth2/token', :grant_type => 'password', :client_id => self.client_id, :client_secret => self.client_secret, :username => user, :password => pass)
log_request("https://#{self.host}/#{path}")
result = req.post(path, "")
log_response(result)
raise SalesForceError.new(result) unless result.is_a?(Net::HTTPOK)
self.username = user
self.password = pass
parse_auth_response(result.body)
elsif options.is_a?(Hash)
if options.has_key?("provider")
parse_user_id_and_org_id_from_identity_url(options["uid"])
self.instance_url = options["credentials"]["instance_url"]
self.oauth_token = options["credentials"]["token"]
self.refresh_token = options["credentials"]["refresh_token"]
else
raise ArgumentError unless options.has_key?(:token) && options.has_key?(:instance_url)
self.instance_url = options[:instance_url]
self.oauth_token = options[:token]
self.refresh_token = options[:refresh_token]
end
end
self.version = "22.0" unless self.version
self.oauth_token
end | ruby | {
"resource": ""
} |
q16088 | Databasedotcom.Client.list_sobjects | train | def list_sobjects
result = http_get("/services/data/v#{self.version}/sobjects")
if result.is_a?(Net::HTTPOK)
JSON.parse(result.body)["sobjects"].collect { |sobject| sobject["name"] }
elsif result.is_a?(Net::HTTPBadRequest)
raise SalesForceError.new(result)
end
end | ruby | {
"resource": ""
} |
q16089 | Databasedotcom.Client.trending_topics | train | def trending_topics
result = http_get("/services/data/v#{self.version}/chatter/topics/trending")
result = JSON.parse(result.body)
result["topics"].collect { |topic| topic["name"] }
end | ruby | {
"resource": ""
} |
q16090 | Databasedotcom.Client.record_from_hash | train | def record_from_hash(data)
attributes = data.delete('attributes')
new_record = find_or_materialize(attributes["type"]).new
data.each do |name, value|
field = new_record.description['fields'].find do |field|
key_from_label(field["label"]) == name || field["name"] == name || field["relationshipName"] == name
end
# Field not found
if field == nil
break
end
# If reference/lookup field data was fetched, recursively build the child record and apply
if value.is_a?(Hash) and field['type'] == 'reference' and field["relationshipName"]
relation = record_from_hash( value )
set_value( new_record, field["relationshipName"], relation, 'reference' )
# Apply the raw value for all other field types
else
set_value(new_record, field["name"], value, field["type"]) if field
end
end
new_record
end | ruby | {
"resource": ""
} |
q16091 | Fasterer.MethodCallScanner.check_symbol_to_proc | train | def check_symbol_to_proc
return unless method_call.block_argument_names.count == 1
return if method_call.block_body.nil?
return unless method_call.block_body.sexp_type == :call
return if method_call.arguments.count > 0
body_method_call = MethodCall.new(method_call.block_body)
return unless body_method_call.arguments.count.zero?
return if body_method_call.has_block?
return unless body_method_call.receiver.name == method_call.block_argument_names.first
add_offense(:block_vs_symbol_to_proc)
end | ruby | {
"resource": ""
} |
q16092 | Rufus.Scheduler.scheduled? | train | def scheduled?(job_or_job_id)
job, _ = fetch(job_or_job_id)
!! (job && job.unscheduled_at.nil? && job.next_time != nil)
end | ruby | {
"resource": ""
} |
q16093 | Git.Lib.merge_base | train | def merge_base(*args)
opts = args.last.is_a?(Hash) ? args.pop : {}
arg_opts = opts.map { |k, v| "--#{k}" if v }.compact + args
command('merge-base', arg_opts)
end | ruby | {
"resource": ""
} |
q16094 | Crystalball.MapGenerator.start! | train | def start!
self.map = nil
map_storage.clear!
map_storage.dump(map.metadata.to_h)
strategies.reverse.each(&:after_start)
self.started = true
end | ruby | {
"resource": ""
} |
q16095 | Crystalball.MapGenerator.refresh_for_case | train | def refresh_for_case(example)
map << strategies.run(ExampleGroupMap.new(example), example) { example.run }
check_dump_threshold
end | ruby | {
"resource": ""
} |
q16096 | Crystalball.MapGenerator.finalize! | train | def finalize!
return unless started
strategies.each(&:before_finalize)
return unless map.size.positive?
example_groups = (configuration.compact_map? ? MapCompactor.compact_map!(map) : map).example_groups
map_storage.dump(example_groups)
end | ruby | {
"resource": ""
} |
q16097 | Origami.ResourcesHolder.add_resource | train | def add_resource(type, rsrc, name = nil)
if name.nil?
rsrc_name = self.resources(type).key(rsrc)
return rsrc_name if rsrc_name
end
name ||= new_id(type)
target = self.is_a?(Resources) ? self : (self.Resources ||= Resources.new)
rsrc_dict = (target[type] and target[type].solve) || (target[type] = Dictionary.new)
rsrc_dict[name.to_sym] = rsrc
name
end | ruby | {
"resource": ""
} |
q16098 | Origami.ResourcesHolder.each_resource | train | def each_resource(type)
target = self.is_a?(Resources) ? self : (self.Resources ||= Resources.new)
rsrc = (target[type] and target[type].solve)
return enum_for(__method__, type) { rsrc.is_a?(Dictionary) ? rsrc.length : 0 } unless block_given?
return unless rsrc.is_a?(Dictionary)
rsrc.each_pair do |name, obj|
yield(name.value, obj.solve)
end
end | ruby | {
"resource": ""
} |
q16099 | Origami.ResourcesHolder.resources | train | def resources(type = nil)
if type.nil?
self.extgstates
.merge self.colorspaces
.merge self.patterns
.merge self.shadings
.merge self.xobjects
.merge self.fonts
.merge self.properties
else
self.each_resource(type).to_h
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.