_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q19400
DataMapper.OrderedSet.<<
train
def <<(entry) if index = @cache[entry] entries[index] = entry else @cache[entry] = size entries << entry end self end
ruby
{ "resource": "" }
q19401
DataMapper.PropertySet.[]=
train
def []=(name, entry) warn "#{self.class}#[]= is deprecated. Use #{self.class}#<< instead: #{caller.first}" raise "#{entry.class} is not added with the correct name" unless name && name.to_s == entry.name.to_s self << entry entry end
ruby
{ "resource": "" }
q19402
DataMapper.Query.update
train
def update(other) other_options = if kind_of?(other.class) return self if self.eql?(other) assert_valid_other(other) other.options else other = other.to_hash return self if other.empty? other end @options = @options.merge(other_options).freeze assert_valid_options(@options) normalize = DataMapper::Ext::Hash.only(other_options, *OPTIONS - [ :conditions ]).map do |attribute, value| instance_variable_set("@#{attribute}", DataMapper::Ext.try_dup(value)) attribute end merge_conditions([ DataMapper::Ext::Hash.except(other_options, *OPTIONS), other_options[:conditions] ]) normalize_options(normalize | [ :links, :unique ]) self end
ruby
{ "resource": "" }
q19403
DataMapper.Query.filter_records
train
def filter_records(records) records = records.uniq if unique? records = match_records(records) if conditions records = sort_records(records) if order records = limit_records(records) if limit || offset > 0 records end
ruby
{ "resource": "" }
q19404
DataMapper.Query.match_records
train
def match_records(records) conditions = self.conditions records.select { |record| conditions.matches?(record) } end
ruby
{ "resource": "" }
q19405
DataMapper.Query.sort_records
train
def sort_records(records) sort_order = order.map { |direction| [ direction.target, direction.operator == :asc ] } records.sort_by do |record| sort_order.map do |(property, ascending)| Sort.new(record_value(record, property), ascending) end end end
ruby
{ "resource": "" }
q19406
DataMapper.Query.slice!
train
def slice!(*args) offset, limit = extract_slice_arguments(*args) if self.limit || self.offset > 0 offset, limit = get_relative_position(offset, limit) end update(:offset => offset, :limit => limit) end
ruby
{ "resource": "" }
q19407
DataMapper.Query.inspect
train
def inspect attrs = [ [ :repository, repository.name ], [ :model, model ], [ :fields, fields ], [ :links, links ], [ :conditions, conditions ], [ :order, order ], [ :limit, limit ], [ :offset, offset ], [ :reload, reload? ], [ :unique, unique? ], ] "#<#{self.class.name} #{attrs.map { |key, value| "@#{key}=#{value.inspect}" }.join(' ')}>" end
ruby
{ "resource": "" }
q19408
DataMapper.Query.condition_properties
train
def condition_properties properties = Set.new each_comparison do |comparison| next unless comparison.respond_to?(:subject) subject = comparison.subject properties << subject if subject.kind_of?(Property) end properties end
ruby
{ "resource": "" }
q19409
DataMapper.Query.to_subquery
train
def to_subquery collection = model.all(merge(:fields => model_key)) Conditions::Operation.new(:and, Conditions::Comparison.new(:in, self_relationship, collection)) end
ruby
{ "resource": "" }
q19410
DataMapper.Query.to_hash
train
def to_hash { :repository => repository.name, :model => model.name, :fields => fields, :links => links, :conditions => conditions, :offset => offset, :limit => limit, :order => order, :unique => unique?, :add_reversed => add_reversed?, :reload => reload?, } end
ruby
{ "resource": "" }
q19411
DataMapper.Query.assert_valid_options
train
def assert_valid_options(options) options = options.to_hash options.each do |attribute, value| case attribute when :fields then assert_valid_fields(value, options[:unique]) when :links then assert_valid_links(value) when :conditions then assert_valid_conditions(value) when :offset then assert_valid_offset(value, options[:limit]) when :limit then assert_valid_limit(value) when :order then assert_valid_order(value, options[:fields]) when :unique, :add_reversed, :reload then assert_valid_boolean("options[:#{attribute}]", value) else assert_valid_conditions(attribute => value) end end end
ruby
{ "resource": "" }
q19412
DataMapper.Query.assert_valid_offset
train
def assert_valid_offset(offset, limit) unless offset >= 0 raise ArgumentError, "+options[:offset]+ must be greater than or equal to 0, but was #{offset.inspect}" end if offset > 0 && limit.nil? raise ArgumentError, '+options[:offset]+ cannot be greater than 0 if limit is not specified' end end
ruby
{ "resource": "" }
q19413
DataMapper.Query.assert_valid_other
train
def assert_valid_other(other) other_repository = other.repository repository = self.repository other_class = other.class unless other_repository == repository raise ArgumentError, "+other+ #{other_class} must be for the #{repository.name} repository, not #{other_repository.name}" end other_model = other.model model = self.model unless other_model >= model raise ArgumentError, "+other+ #{other_class} must be for the #{model.name} model, not #{other_model.name}" end end
ruby
{ "resource": "" }
q19414
DataMapper.Query.merge_conditions
train
def merge_conditions(conditions) @conditions = Conditions::Operation.new(:and) << @conditions unless @conditions.nil? conditions.compact! conditions.each do |condition| case condition when Conditions::AbstractOperation, Conditions::AbstractComparison add_condition(condition) when Hash condition.each { |key, value| append_condition(key, value) } when Array statement, *bind_values = *condition raw_condition = [ statement ] raw_condition << bind_values if bind_values.size > 0 add_condition(raw_condition) @raw = true end end end
ruby
{ "resource": "" }
q19415
DataMapper.Query.append_condition
train
def append_condition(subject, bind_value, model = self.model, operator = :eql) case subject when Property, Associations::Relationship then append_property_condition(subject, bind_value, operator) when Symbol then append_symbol_condition(subject, bind_value, model, operator) when String then append_string_condition(subject, bind_value, model, operator) when Operator then append_operator_conditions(subject, bind_value, model) when Path then append_path(subject, bind_value, model, operator) else raise ArgumentError, "#{subject} is an invalid instance: #{subject.class}" end end
ruby
{ "resource": "" }
q19416
DataMapper.Query.set_operation
train
def set_operation(operation, other) assert_valid_other(other) query = self.class.new(@repository, @model, other.to_relative_hash) query.instance_variable_set(:@conditions, other_conditions(other, operation)) query end
ruby
{ "resource": "" }
q19417
DataMapper.Query.other_conditions
train
def other_conditions(other, operation) self_conditions = query_conditions(self) unless self_conditions.kind_of?(Conditions::Operation) operation_slug = case operation when :intersection, :difference then :and when :union then :or end self_conditions = Conditions::Operation.new(operation_slug, self_conditions) end self_conditions.send(operation, query_conditions(other)) end
ruby
{ "resource": "" }
q19418
DataMapper.Query.query_conditions
train
def query_conditions(query) if query.limit || query.links.any? query.to_subquery else query.conditions end end
ruby
{ "resource": "" }
q19419
DataMapper.Query.self_relationship
train
def self_relationship @self_relationship ||= begin model = self.model Associations::OneToMany::Relationship.new( :self, model, model, self_relationship_options ) end end
ruby
{ "resource": "" }
q19420
DataMapper.Query.self_relationship_options
train
def self_relationship_options keys = model_key.map { |property| property.name } repository = self.repository { :child_key => keys, :parent_key => keys, :child_repository_name => repository.name, :parent_repository_name => repository.name, } end
ruby
{ "resource": "" }
q19421
DataMapper.Model.get
train
def get(*key) assert_valid_key_size(key) repository = self.repository key = self.key(repository.name).typecast(key) repository.identity_map(self)[key] || first(key_conditions(repository, key).update(:order => nil)) end
ruby
{ "resource": "" }
q19422
DataMapper.Model.first
train
def first(*args) first_arg = args.first last_arg = args.last limit_specified = first_arg.kind_of?(Integer) with_query = (last_arg.kind_of?(Hash) && !last_arg.empty?) || last_arg.kind_of?(Query) limit = limit_specified ? first_arg : 1 query = with_query ? last_arg : {} query = self.query.slice(0, limit).update(query) if limit_specified all(query) else query.repository.read(query).first end end
ruby
{ "resource": "" }
q19423
DataMapper.Model.copy
train
def copy(source_repository_name, target_repository_name, query = {}) target_properties = properties(target_repository_name) query[:fields] ||= properties(source_repository_name).select do |property| target_properties.include?(property) end repository(target_repository_name) do |repository| resources = [] all(query.merge(:repository => source_repository_name)).each do |resource| new_resource = new query[:fields].each { |property| new_resource.__send__("#{property.name}=", property.get(resource)) } resources << new_resource if new_resource.save end all(Query.target_query(repository, self, resources)) end end
ruby
{ "resource": "" }
q19424
DataMapper.Model.repository_name
train
def repository_name context = Repository.context context.any? ? context.last.name : default_repository_name end
ruby
{ "resource": "" }
q19425
DataMapper.Model.finalize_allowed_writer_methods
train
def finalize_allowed_writer_methods @allowed_writer_methods = public_instance_methods.map { |method| method.to_s }.grep(WRITER_METHOD_REGEXP).to_set @allowed_writer_methods -= INVALID_WRITER_METHODS @allowed_writer_methods.freeze end
ruby
{ "resource": "" }
q19426
DataMapper.Model.assert_valid_properties
train
def assert_valid_properties repository_name = self.repository_name if properties(repository_name).empty? && !relationships(repository_name).any? { |relationship| relationship.kind_of?(Associations::ManyToOne::Relationship) } raise IncompleteModelError, "#{name} must have at least one property or many to one relationship in #{repository_name} to be valid" end end
ruby
{ "resource": "" }
q19427
DataMapper.SubjectSet.[]
train
def [](name) name = name.to_s entries.detect { |entry| entry.name.to_s == name } end
ruby
{ "resource": "" }
q19428
DataMapper.Collection.reload
train
def reload(other_query = Undefined) query = self.query query = other_query.equal?(Undefined) ? query.dup : query.merge(other_query) # make sure the Identity Map contains all the existing resources identity_map = repository.identity_map(model) loaded_entries.each do |resource| identity_map[resource.key] = resource end # sort fields based on declared order, for more consistent reload queries properties = self.properties fields = properties & (query.fields | model_key | [ properties.discriminator ].compact) # replace the list of resources replace(all(query.update(:fields => fields, :reload => true))) end
ruby
{ "resource": "" }
q19429
DataMapper.Collection.get
train
def get(*key) assert_valid_key_size(key) key = model_key.typecast(key) query = self.query @identity_map[key] || if !loaded? && (query.limit || query.offset > 0) # current query is exclusive, find resource within the set # TODO: use a subquery to retrieve the Collection and then match # it up against the key. This will require some changes to # how subqueries are generated, since the key may be a # composite key. In the case of DO adapters, it means subselects # like the form "(a, b) IN(SELECT a, b FROM ...)", which will # require making it so the Query condition key can be a # Property or an Array of Property objects # use the brute force approach until subquery lookups work lazy_load @identity_map[key] else # current query is all inclusive, lookup using normal approach first(model.key_conditions(repository, key).update(:order => nil)) end end
ruby
{ "resource": "" }
q19430
DataMapper.Collection.all
train
def all(query = Undefined) if query.equal?(Undefined) || (query.kind_of?(Hash) && query.empty?) dup else # TODO: if there is no order parameter, and the Collection is not loaded # check to see if the query can be satisfied by the head/tail new_collection(scoped_query(query)) end end
ruby
{ "resource": "" }
q19431
DataMapper.Collection.first
train
def first(*args) first_arg = args.first last_arg = args.last limit_specified = first_arg.kind_of?(Integer) with_query = (last_arg.kind_of?(Hash) && !last_arg.empty?) || last_arg.kind_of?(Query) limit = limit_specified ? first_arg : 1 query = with_query ? last_arg : {} query = self.query.slice(0, limit).update(query) # TODO: when a query provided, and there are enough elements in head to # satisfy the query.limit, filter the head with the query, and make # sure it matches the limit exactly. if so, use that result instead # of calling all() # - this can probably only be done if there is no :order parameter loaded = loaded? head = self.head collection = if !with_query && (loaded || lazy_possible?(head, limit)) new_collection(query, super(limit)) else all(query) end return collection if limit_specified resource = collection.to_a.first if with_query || loaded resource elsif resource head[0] = resource end end
ruby
{ "resource": "" }
q19432
DataMapper.Collection.at
train
def at(offset) if loaded? || partially_loaded?(offset) super elsif offset == 0 first elsif offset > 0 first(:offset => offset) elsif offset == -1 last else last(:offset => offset.abs - 1) end end
ruby
{ "resource": "" }
q19433
DataMapper.Collection.slice!
train
def slice!(*args) removed = super resources_removed(removed) unless removed.nil? # Workaround for Ruby <= 1.8.6 compact! if RUBY_VERSION <= '1.8.6' unless removed.kind_of?(Enumerable) return removed end offset, limit = extract_slice_arguments(*args) query = sliced_query(offset, limit) new_collection(query, removed) end
ruby
{ "resource": "" }
q19434
DataMapper.Collection.[]=
train
def []=(*args) orphans = Array(superclass_slice(*args[0..-2])) # relate new resources resources = resources_added(super) # mark resources as removed resources_removed(orphans - loaded_entries) resources end
ruby
{ "resource": "" }
q19435
DataMapper.Collection.each
train
def each return to_enum unless block_given? super do |resource| begin original, resource.collection = resource.collection, self yield resource ensure resource.collection = original end end end
ruby
{ "resource": "" }
q19436
DataMapper.Collection.new
train
def new(attributes = {}) resource = repository.scope { model.new(attributes) } self << resource resource end
ruby
{ "resource": "" }
q19437
DataMapper.Collection.update
train
def update(attributes) assert_update_clean_only(:update) dirty_attributes = model.new(attributes).dirty_attributes dirty_attributes.empty? || all? { |resource| resource.update(attributes) } end
ruby
{ "resource": "" }
q19438
DataMapper.Collection.update!
train
def update!(attributes) assert_update_clean_only(:update!) model = self.model dirty_attributes = model.new(attributes).dirty_attributes if dirty_attributes.empty? true else dirty_attributes.each do |property, value| property.assert_valid_value(value) end unless _update(dirty_attributes) return false end if loaded? each do |resource| dirty_attributes.each { |property, value| property.set!(resource, value) } repository.identity_map(model)[resource.key] = resource end end true end end
ruby
{ "resource": "" }
q19439
DataMapper.Collection.destroy!
train
def destroy! repository = self.repository deleted = repository.delete(self) if loaded? unless deleted == size return false end each do |resource| resource.persistence_state = Resource::PersistenceState::Immutable.new(resource) end clear else mark_loaded end true end
ruby
{ "resource": "" }
q19440
DataMapper.Collection.respond_to?
train
def respond_to?(method, include_private = false) super || model.respond_to?(method) || relationships.named?(method) end
ruby
{ "resource": "" }
q19441
DataMapper.Collection.partially_loaded?
train
def partially_loaded?(offset, limit = 1) if offset >= 0 lazy_possible?(head, offset + limit) else lazy_possible?(tail, offset.abs) end end
ruby
{ "resource": "" }
q19442
DataMapper.Collection.lazy_load
train
def lazy_load if loaded? return self end mark_loaded head = self.head tail = self.tail query = self.query resources = repository.read(query) # remove already known results resources -= head if head.any? resources -= tail if tail.any? resources -= @removed.to_a if @removed.any? query.add_reversed? ? unshift(*resources.reverse) : concat(resources) # TODO: DRY this up with LazyArray @array.unshift(*head) @array.concat(tail) @head = @tail = nil @reapers.each { |resource| @array.delete_if(&resource) } if @reapers @array.freeze if frozen? self end
ruby
{ "resource": "" }
q19443
DataMapper.Collection.new_collection
train
def new_collection(query, resources = nil, &block) if loaded? resources ||= filter(query) end # TOOD: figure out a way to pass not-yet-saved Resources to this newly # created Collection. If the new resource matches the conditions, then # it should be added to the collection (keep in mind limit/offset too) self.class.new(query, resources, &block) end
ruby
{ "resource": "" }
q19444
DataMapper.Collection.set_operation
train
def set_operation(operation, other) resources = set_operation_resources(operation, other) other_query = Query.target_query(repository, model, other) new_collection(query.send(operation, other_query), resources) end
ruby
{ "resource": "" }
q19445
DataMapper.Collection._create
train
def _create(attributes, execute_hooks = true) resource = repository.scope { model.send(execute_hooks ? :create : :create!, default_attributes.merge(attributes)) } self << resource if resource.saved? resource end
ruby
{ "resource": "" }
q19446
DataMapper.Collection._save
train
def _save(execute_hooks = true) loaded_entries = self.loaded_entries loaded_entries.each { |resource| set_default_attributes(resource) } @removed.clear loaded_entries.all? { |resource| resource.__send__(execute_hooks ? :save : :save!) } end
ruby
{ "resource": "" }
q19447
DataMapper.Collection.default_attributes
train
def default_attributes return @default_attributes if @default_attributes default_attributes = {} conditions = query.conditions if conditions.slug == :and model_properties = properties.dup model_key = self.model_key if model_properties.to_set.superset?(model_key.to_set) model_properties -= model_key end conditions.each do |condition| next unless condition.slug == :eql subject = condition.subject next unless model_properties.include?(subject) || (condition.relationship? && subject.source_model == model) default_attributes[subject] = condition.loaded_value end end @default_attributes = default_attributes.freeze end
ruby
{ "resource": "" }
q19448
DataMapper.Collection.resource_added
train
def resource_added(resource) resource = initialize_resource(resource) if resource.saved? @identity_map[resource.key] = resource @removed.delete(resource) else set_default_attributes(resource) end resource end
ruby
{ "resource": "" }
q19449
DataMapper.Collection.resources_added
train
def resources_added(resources) if resources.kind_of?(Enumerable) resources.map { |resource| resource_added(resource) } else resource_added(resources) end end
ruby
{ "resource": "" }
q19450
DataMapper.Collection.resources_removed
train
def resources_removed(resources) if resources.kind_of?(Enumerable) resources.each { |resource| resource_removed(resource) } else resource_removed(resources) end end
ruby
{ "resource": "" }
q19451
DataMapper.Collection.filter
train
def filter(other_query) query = self.query fields = query.fields.to_set unique = other_query.unique? # TODO: push this into a Query#subset? method if other_query.links.empty? && (unique || (!unique && !query.unique?)) && !other_query.reload? && !other_query.raw? && other_query.fields.to_set.subset?(fields) && other_query.condition_properties.subset?(fields) then other_query.filter_records(to_a.dup) end end
ruby
{ "resource": "" }
q19452
DataMapper.Collection.scoped_query
train
def scoped_query(query) if query.kind_of?(Query) query.dup else self.query.relative(query) end end
ruby
{ "resource": "" }
q19453
DataMapper.Collection.delegate_to_model
train
def delegate_to_model(method, *args, &block) model = self.model model.send(:with_scope, query) do model.send(method, *args, &block) end end
ruby
{ "resource": "" }
q19454
DataMapper.Repository.read
train
def read(query) return [] unless query.valid? query.model.load(adapter.read(query), query) end
ruby
{ "resource": "" }
q19455
DataMapper.Repository.update
train
def update(attributes, collection) return 0 unless collection.query.valid? && attributes.any? adapter.update(attributes, collection) end
ruby
{ "resource": "" }
q19456
DataMapper.DescendantSet.delete
train
def delete(descendant) @descendants.delete(descendant) each { |d| d.descendants.delete(descendant) } end
ruby
{ "resource": "" }
q19457
DataMapper.Property.valid?
train
def valid?(value, negated = false) dumped_value = dump(value) if required? && dumped_value.nil? negated || false else value_dumped?(dumped_value) || (dumped_value.nil? && (allow_nil? || negated)) end end
ruby
{ "resource": "" }
q19458
DataMapper.Property.assert_valid_value
train
def assert_valid_value(value) unless valid?(value) raise Property::InvalidValueError.new(self,value) end true end
ruby
{ "resource": "" }
q19459
DataMapper.Resource.attribute_set
train
def attribute_set(name, value) property = properties[name] if property value = property.typecast(value) self.persistence_state = persistence_state.set(property, value) end end
ruby
{ "resource": "" }
q19460
DataMapper.Resource.attributes
train
def attributes(key_on = :name) attributes = {} lazy_load(properties) fields.each do |property| if model.public_method_defined?(name = property.name) key = case key_on when :name then name when :field then property.field else property end attributes[key] = __send__(name) end end attributes end
ruby
{ "resource": "" }
q19461
DataMapper.Resource.inspect
train
def inspect # TODO: display relationship values attrs = properties.map do |property| value = if new? || property.loaded?(self) property.get!(self).inspect else '<not loaded>' end "#{property.instance_variable_name}=#{value}" end "#<#{model.name} #{attrs.join(' ')}>" end
ruby
{ "resource": "" }
q19462
DataMapper.Resource.dirty_attributes
train
def dirty_attributes dirty_attributes = {} original_attributes.each_key do |property| next unless property.respond_to?(:dump) dirty_attributes[property] = property.dump(property.get!(self)) end dirty_attributes end
ruby
{ "resource": "" }
q19463
DataMapper.Resource.initialize_copy
train
def initialize_copy(original) instance_variables.each do |ivar| instance_variable_set(ivar, DataMapper::Ext.try_dup(instance_variable_get(ivar))) end self.persistence_state = persistence_state.class.new(self) end
ruby
{ "resource": "" }
q19464
DataMapper.Resource.reset_key
train
def reset_key properties.key.zip(key) do |property, value| property.set!(self, value) end end
ruby
{ "resource": "" }
q19465
DataMapper.Resource.clear_subjects
train
def clear_subjects model_properties = properties (model_properties - model_properties.key | relationships).each do |subject| next unless subject.loaded?(self) remove_instance_variable(subject.instance_variable_name) end end
ruby
{ "resource": "" }
q19466
DataMapper.Resource.eager_load
train
def eager_load(properties) unless properties.empty? || key.nil? || collection.nil? # set an initial value to prevent recursive lazy loads properties.each { |property| property.set!(self, nil) } collection.reload(:fields => properties) end self end
ruby
{ "resource": "" }
q19467
DataMapper.Resource.conditions
train
def conditions key = self.key if key model.key_conditions(repository, key) else conditions = {} properties.each do |property| next unless property.loaded?(self) conditions[property] = property.get!(self) end conditions end end
ruby
{ "resource": "" }
q19468
DataMapper.Resource.child_relationships
train
def child_relationships child_relationships = [] relationships.each do |relationship| next unless relationship.respond_to?(:collection_for) set_default_value(relationship) next unless relationship.loaded?(self) child_relationships << relationship end many_to_many, other = child_relationships.partition do |relationship| relationship.kind_of?(Associations::ManyToMany::Relationship) end many_to_many + other end
ruby
{ "resource": "" }
q19469
DataMapper.Resource.save_self
train
def save_self(execute_hooks = true) # short-circuit if the resource is not dirty return saved? unless dirty_self? if execute_hooks new? ? create_with_hooks : update_with_hooks else _persist end clean? end
ruby
{ "resource": "" }
q19470
DataMapper.Resource.save_parents
train
def save_parents(execute_hooks) run_once(true) do parent_relationships.map do |relationship| parent = relationship.get(self) if parent.__send__(:save_parents, execute_hooks) && parent.__send__(:save_self, execute_hooks) relationship.set(self, parent) # set the FK values end end.all? end end
ruby
{ "resource": "" }
q19471
DataMapper.Resource.dirty_self?
train
def dirty_self? if original_attributes.any? true elsif new? !model.serial.nil? || properties.any? { |property| property.default? } else false end end
ruby
{ "resource": "" }
q19472
DataMapper.Resource.dirty_parents?
train
def dirty_parents? run_once(false) do parent_associations.any? do |association| association.__send__(:dirty_self?) || association.__send__(:dirty_parents?) end end end
ruby
{ "resource": "" }
q19473
DataMapper.Resource.cmp?
train
def cmp?(other, operator) return false unless repository.send(operator, other.repository) && key.send(operator, other.key) if saved? && other.saved? # if dirty attributes match then they are the same resource dirty_attributes == other.dirty_attributes else # compare properties for unsaved resources properties.all? do |property| __send__(property.name).send(operator, other.__send__(property.name)) end end end
ruby
{ "resource": "" }
q19474
DataMapper.Resource.execute_hooks_for
train
def execute_hooks_for(type, name) model.hooks[name][type].each { |hook| hook.call(self) } end
ruby
{ "resource": "" }
q19475
DataMapper.Resource.run_once
train
def run_once(default) caller_method = Kernel.caller(1).first[/`([^'?!]+)[?!]?'/, 1] sentinel = "@_#{caller_method}_sentinel" return instance_variable_get(sentinel) if instance_variable_defined?(sentinel) begin instance_variable_set(sentinel, default) yield ensure remove_instance_variable(sentinel) end end
ruby
{ "resource": "" }
q19476
DataMapper.Inflector.pluralize
train
def pluralize(word) result = word.to_s.dup if word.empty? || inflections.uncountables.include?(result.downcase) result else inflections.plurals.each { |(rule, replacement)| break if result.gsub!(rule, replacement) } result end end
ruby
{ "resource": "" }
q19477
Backburner.Helpers.exception_message
train
def exception_message(e) msg = [ "Exception #{e.class} -> #{e.message}" ] base = File.expand_path(Dir.pwd) + '/' e.backtrace.each do |t| msg << " #{File.expand_path(t).gsub(/#{base}/, '')}" end if e.backtrace msg.join("\n") end
ruby
{ "resource": "" }
q19478
Backburner.Helpers.classify
train
def classify(dashed_word) dashed_word.to_s.split('-').each { |part| part[0] = part[0].chr.upcase }.join end
ruby
{ "resource": "" }
q19479
Backburner.Helpers.dasherize
train
def dasherize(word) classify(word).to_s.gsub(/::/, '/'). gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2'). gsub(/([a-z\d])([A-Z])/,'\1_\2'). tr("_", "-").downcase end
ruby
{ "resource": "" }
q19480
Backburner.Helpers.resolve_priority
train
def resolve_priority(pri) if pri.respond_to?(:queue_priority) resolve_priority(pri.queue_priority) elsif pri.is_a?(String) || pri.is_a?(Symbol) # named priority resolve_priority(Backburner.configuration.priority_labels[pri.to_sym]) elsif pri.is_a?(Integer) # numerical pri else # default Backburner.configuration.default_priority end end
ruby
{ "resource": "" }
q19481
Backburner.Helpers.resolve_respond_timeout
train
def resolve_respond_timeout(ttr) if ttr.respond_to?(:queue_respond_timeout) resolve_respond_timeout(ttr.queue_respond_timeout) elsif ttr.is_a?(Integer) # numerical ttr else # default Backburner.configuration.respond_timeout end end
ruby
{ "resource": "" }
q19482
Backburner.Worker.work_one_job
train
def work_one_job(conn = connection) begin job = reserve_job(conn) rescue Beaneater::TimedOutError => e return end self.log_job_begin(job.name, job.args) job.process self.log_job_end(job.name) rescue Backburner::Job::JobFormatInvalid => e self.log_error self.exception_message(e) rescue => e # Error occurred processing job self.log_error self.exception_message(e) unless e.is_a?(Backburner::Job::RetryJob) unless job self.log_error "Error occurred before we were able to assign a job. Giving up without retrying!" return end # NB: There's a slight chance here that the connection to beanstalkd has # gone down between the time we reserved / processed the job and here. num_retries = job.stats.releases retry_status = "failed: attempt #{num_retries+1} of #{queue_config.max_job_retries+1}" if num_retries < queue_config.max_job_retries # retry again delay = queue_config.retry_delay_proc.call(queue_config.retry_delay, num_retries) rescue queue_config.retry_delay job.retry(num_retries + 1, delay) self.log_job_end(job.name, "#{retry_status}, retrying in #{delay}s") if job_started_at else # retries failed, bury job.bury self.log_job_end(job.name, "#{retry_status}, burying") if job_started_at end handle_error(e, job.name, job.args, job) end
ruby
{ "resource": "" }
q19483
Backburner.Worker.new_connection
train
def new_connection Connection.new(Backburner.configuration.beanstalk_url) { |conn| Backburner::Hooks.invoke_hook_events(self, :on_reconnect, conn) } end
ruby
{ "resource": "" }
q19484
Backburner.Worker.reserve_job
train
def reserve_job(conn, reserve_timeout = Backburner.configuration.reserve_timeout) Backburner::Job.new(conn.tubes.reserve(reserve_timeout)) end
ruby
{ "resource": "" }
q19485
Backburner.Worker.handle_error
train
def handle_error(e, name, args, job) if error_handler = Backburner.configuration.on_error if error_handler.arity == 1 error_handler.call(e) elsif error_handler.arity == 3 error_handler.call(e, name, args) else error_handler.call(e, name, args, job) end end end
ruby
{ "resource": "" }
q19486
Backburner.Worker.compact_tube_names
train
def compact_tube_names(tube_names) tube_names = tube_names.first if tube_names && tube_names.size == 1 && tube_names.first.is_a?(Array) tube_names = Array(tube_names).compact if tube_names && Array(tube_names).compact.size > 0 tube_names = nil if tube_names && tube_names.compact.empty? tube_names ||= Backburner.default_queues.any? ? Backburner.default_queues : all_existing_queues Array(tube_names).uniq end
ruby
{ "resource": "" }
q19487
Backburner.Logger.log_job_end
train
def log_job_end(name, message = nil) ellapsed = Time.now - job_started_at ms = (ellapsed.to_f * 1000).to_i action_word = message ? 'Finished' : 'Completed' log_info("#{action_word} #{name} in #{ms}ms #{message}") end
ruby
{ "resource": "" }
q19488
Backburner.Job.process
train
def process # Invoke before hook and stop if false res = @hooks.invoke_hook_events(job_name, :before_perform, *args) return false unless res # Execute the job @hooks.around_hook_events(job_name, :around_perform, *args) do # We subtract one to ensure we timeout before beanstalkd does, except if: # a) ttr == 0, to support never timing out # b) ttr == 1, so that we don't accidentally set it to never time out # NB: A ttr of 1 will likely result in race conditions between # Backburner and beanstalkd and should probably be avoided timeout_job_after(task.ttr > 1 ? task.ttr - 1 : task.ttr) { job_class.perform(*args) } end task.delete # Invoke after perform hook @hooks.invoke_hook_events(job_name, :after_perform, *args) rescue => e @hooks.invoke_hook_events(job_name, :on_failure, e, *args) raise e end
ruby
{ "resource": "" }
q19489
Backburner.Job.timeout_job_after
train
def timeout_job_after(secs, &block) begin Timeout::timeout(secs) { yield } rescue Timeout::Error => e raise JobTimeout, "#{name}(#{(@args||[]).join(', ')}) hit #{secs}s timeout.\nbacktrace: #{e.backtrace}" end end
ruby
{ "resource": "" }
q19490
Backburner.Connection.retryable
train
def retryable(options = {}, &block) options = {:max_retries => 4, :on_retry => nil, :retry_delay => 1.0}.merge!(options) retry_count = options[:max_retries] begin yield rescue Beaneater::NotConnected if retry_count > 0 reconnect! retry_count -= 1 sleep options[:retry_delay] options[:on_retry].call if options[:on_retry].respond_to?(:call) retry else # stop retrying raise e end end end
ruby
{ "resource": "" }
q19491
Backburner.Connection.ensure_connected!
train
def ensure_connected!(max_retries = 4, retry_delay = 1.0) return self if connected? begin reconnect! return self rescue Beaneater::NotConnected => e if max_retries > 0 max_retries -= 1 sleep retry_delay retry else # stop retrying raise e end end end
ruby
{ "resource": "" }
q19492
Backburner.Connection.beanstalk_addresses
train
def beanstalk_addresses uri = self.url.is_a?(Array) ? self.url.first : self.url beanstalk_host_and_port(uri) end
ruby
{ "resource": "" }
q19493
Backburner.Connection.beanstalk_host_and_port
train
def beanstalk_host_and_port(uri_string) uri = URI.parse(uri_string) raise(BadURL, uri_string) if uri.scheme != 'beanstalk'.freeze "#{uri.host}:#{uri.port || 11300}" end
ruby
{ "resource": "" }
q19494
CountryStateSelect.CscsController.find_cities
train
def find_cities cities = CS.cities(params[:state_id].to_sym, params[:country_id].to_sym) respond_to do |format| format.json { render :json => cities.to_a} end end
ruby
{ "resource": "" }
q19495
ComfyBootstrapForm.FormBuilder.file_field
train
def file_field(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) return super if bootstrap.disabled draw_form_group(bootstrap, method, options) do if bootstrap.custom_control content_tag(:div, class: "custom-file") do add_css_class!(options, "custom-file-input") remove_css_class!(options, "form-control") label_text = options.delete(:placeholder) concat super(method, options) label_options = { class: "custom-file-label" } label_options[:for] = options[:id] if options[:id].present? concat label(method, label_text, label_options) end else super(method, options) end end end
ruby
{ "resource": "" }
q19496
ComfyBootstrapForm.FormBuilder.plaintext
train
def plaintext(method, options = {}) bootstrap = form_bootstrap.scoped(options.delete(:bootstrap)) draw_form_group(bootstrap, method, options) do remove_css_class!(options, "form-control") add_css_class!(options, "form-control-plaintext") options[:readonly] = true ActionView::Helpers::FormBuilder.instance_method(:text_field).bind(self).call(method, options) end end
ruby
{ "resource": "" }
q19497
ComfyBootstrapForm.FormBuilder.primary
train
def primary(value = nil, options = {}, &block) add_css_class!(options, "btn-primary") submit(value, options, &block) end
ruby
{ "resource": "" }
q19498
ComfyBootstrapForm.FormBuilder.draw_form_group
train
def draw_form_group(bootstrap, method, options) label = draw_label(bootstrap, method, for_attr: options[:id]) errors = draw_errors(method) control = draw_control(bootstrap, errors, method, options) do yield end form_group_class = "form-group" form_group_class += " row" if bootstrap.horizontal? form_group_class += " mr-sm-2" if bootstrap.inline? content_tag(:div, class: form_group_class) do concat label concat control end end
ruby
{ "resource": "" }
q19499
ComfyBootstrapForm.FormBuilder.draw_control
train
def draw_control(bootstrap, errors, _method, options) add_css_class!(options, "form-control") add_css_class!(options, "is-invalid") if errors.present? draw_control_column(bootstrap, offset: bootstrap.label[:hide]) do draw_input_group(bootstrap, errors) do yield end end end
ruby
{ "resource": "" }