_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q17400
SidekiqStatus.Container.save
train
def save data = dump data = Sidekiq.dump_json(data) Sidekiq.redis do |conn| conn.multi do conn.setex(status_key, self.ttl, data) conn.zadd(self.class.statuses_key, Time.now.to_f.to_s, self.jid) end end end
ruby
{ "resource": "" }
q17401
SidekiqStatus.Container.delete
train
def delete Sidekiq.redis do |conn| conn.multi do conn.del(status_key) conn.zrem(self.class.kill_key, self.jid) conn.zrem(self.class.statuses_key, self.jid) end end end
ruby
{ "resource": "" }
q17402
SidekiqStatus.Container.kill
train
def kill self.status = 'killed' Sidekiq.redis do |conn| conn.multi do save conn.zrem(self.class.kill_key, self.jid) end end end
ruby
{ "resource": "" }
q17403
SidekiqStatus.Container.load
train
def load(data) data = DEFAULTS.merge(data) @args, @worker, @queue = data.values_at('args', 'worker', 'queue') @status, @at, @total, @message = data.values_at('status', 'at', 'total', 'message') @payload = data['payload'] @last_updated_at = data['last_updated_at'] && Time.at(data['last_updated_at'].to_i) end
ruby
{ "resource": "" }
q17404
SidekiqStatus.Container.dump
train
def dump { 'args' => self.args, 'worker' => self.worker, 'queue' => self.queue, 'status' => self.status, 'at' => self.at, 'total' => self.total, 'message' => self.message, 'payload' => self.payload, 'last_updated_at' => Time.now.to_i } end
ruby
{ "resource": "" }
q17405
MessageBird.Client.send_conversation_message
train
def send_conversation_message(from, to, params={}) ConversationMessage.new(conversation_request( :post, 'send', params.merge({ :from => from, :to => to, }))) end
ruby
{ "resource": "" }
q17406
MessageBird.Client.start_conversation
train
def start_conversation(to, channelId, params={}) Conversation.new(conversation_request( :post, 'conversations/start', params.merge({ :to => to, :channelId => channelId, }))) end
ruby
{ "resource": "" }
q17407
MessageBird.Client.message_create
train
def message_create(originator, recipients, body, params={}) # Convert an array of recipients to a comma-separated string. recipients = recipients.join(',') if recipients.kind_of?(Array) Message.new(request( :post, 'messages', params.merge({ :originator => originator.to_s, :body => body.to_s, :recipients => recipients }))) end
ruby
{ "resource": "" }
q17408
MessageBird.Client.voice_message_create
train
def voice_message_create(recipients, body, params={}) # Convert an array of recipients to a comma-separated string. recipients = recipients.join(',') if recipients.kind_of?(Array) VoiceMessage.new(request( :post, 'voicemessages', params.merge({ :recipients => recipients, :body => body.to_s }))) end
ruby
{ "resource": "" }
q17409
MarkyMarkov.TemporaryDictionary.method_missing
train
def method_missing(method_sym, *args, &block) if method_sym.to_s =~ /^generate_(\d*)_word[s]*$/ generate_n_words($1.to_i) elsif method_sym.to_s =~ /^generate_(\d*)_sentence[s]*$/ generate_n_sentences($1.to_i) else super end end
ruby
{ "resource": "" }
q17410
Coolio.Loop.run
train
def run(timeout = nil) raise RuntimeError, "no watchers for this loop" if @watchers.empty? @running = true while @running and not @active_watchers.zero? run_once(timeout) end @running = false end
ruby
{ "resource": "" }
q17411
Coolio.IO.on_readable
train
def on_readable begin on_read @_io.read_nonblock(INPUT_SIZE) rescue Errno::EAGAIN, Errno::EINTR return # SystemCallError catches Errno::ECONNRESET amongst others. rescue SystemCallError, EOFError, IOError, SocketError close end end
ruby
{ "resource": "" }
q17412
Coolio.IO.on_writable
train
def on_writable begin @_write_buffer.write_to(@_io) rescue Errno::EINTR return # SystemCallError catches Errno::EPIPE & Errno::ECONNRESET amongst others. rescue SystemCallError, IOError, SocketError return close end if @_write_buffer.empty? disable_write_watcher on_write_complete end end
ruby
{ "resource": "" }
q17413
Coolio.DNSResolver.send_request
train
def send_request nameserver = @nameservers.shift @nameservers << nameserver # rotate them begin @socket.send request_message, 0, @nameservers.first, DNS_PORT rescue Errno::EHOSTUNREACH # TODO figure out why it has to be wrapper here, when the other wrapper should be wrapping this one! end end
ruby
{ "resource": "" }
q17414
Coolio.DNSResolver.on_readable
train
def on_readable datagram = nil begin datagram = @socket.recvfrom_nonblock(DATAGRAM_SIZE).first rescue Errno::ECONNREFUSED end address = response_address datagram rescue nil address ? on_success(address) : on_failure detach end
ruby
{ "resource": "" }
q17415
Coolio.TCPSocket.preinitialize
train
def preinitialize(addr, port, *args) @_write_buffer = ::IO::Buffer.new # allow for writing BEFORE DNS has resolved @remote_host, @remote_addr, @remote_port = addr, addr, port @_resolver = TCPConnectResolver.new(self, addr, port, *args) end
ruby
{ "resource": "" }
q17416
Coolio.DSL.connect
train
def connect(host, port, connection_name = nil, *initializer_args, &block) if block_given? initializer_args.unshift connection_name if connection_name klass = Class.new Cool.io::TCPSocket connection_builder = ConnectionBuilder.new klass connection_builder.instance_eval(&block) else raise ArgumentError, "no connection name or block given" unless connection_name klass = self[connection_name] end client = klass.connect host, port, *initializer_args client.attach Cool.io::Loop.default client end
ruby
{ "resource": "" }
q17417
Coolio.DSL.[]
train
def [](connection_name) class_name = connection_name.to_s.split('_').map { |s| s.capitalize }.join begin Coolio::Connections.const_get class_name rescue NameError raise NameError, "No connection type registered for #{connection_name.inspect}" end end
ruby
{ "resource": "" }
q17418
ChefAPI.Util.safe_read
train
def safe_read(path) path = File.expand_path(path) name = File.basename(path, '.*') contents = File.read(path) [name, contents] rescue Errno::EACCES raise Error::InsufficientFilePermissions.new(path: path) rescue Errno::ENOENT raise Error::FileNotFound.new(path: path) end
ruby
{ "resource": "" }
q17419
ChefAPI.Util.fast_collect
train
def fast_collect(collection, &block) collection.map do |item| Thread.new do Thread.current[:result] = block.call(item) end end.collect do |thread| thread.join thread[:result] end end
ruby
{ "resource": "" }
q17420
ChefAPI.Resource::Base.save!
train
def save! validate! response = if new_resource? self.class.post(to_json, _prefix) else self.class.put(id, to_json, _prefix) end # Update our local copy with any partial information that was returned # from the server, ignoring an "bad" attributes that aren't defined in # our schema. response.each do |key, value| update_attribute(key, value) if attribute?(key) end true end
ruby
{ "resource": "" }
q17421
ChefAPI.Resource::Base.update_attribute
train
def update_attribute(key, value) unless attribute?(key.to_sym) raise Error::UnknownAttribute.new(attribute: key) end _attributes[key.to_sym] = value end
ruby
{ "resource": "" }
q17422
ChefAPI.Resource::Base.validate!
train
def validate! unless valid? sentence = errors.full_messages.join(', ') raise Error::InvalidResource.new(errors: sentence) end true end
ruby
{ "resource": "" }
q17423
ChefAPI.Resource::Base.valid?
train
def valid? errors.clear validators.each do |validator| validator.validate(self) end errors.empty? end
ruby
{ "resource": "" }
q17424
ChefAPI.Resource::Base.diff
train
def diff diff = {} remote = self.class.fetch(id, _prefix) || self.class.new({}, _prefix) remote._attributes.each do |key, value| unless _attributes[key] == value diff[key] = { local: _attributes[key], remote: value } end end diff end
ruby
{ "resource": "" }
q17425
ChefAPI.Resource::Base.to_hash
train
def to_hash {}.tap do |hash| _attributes.each do |key, value| hash[key] = value.respond_to?(:to_hash) ? value.to_hash : value end end end
ruby
{ "resource": "" }
q17426
ChefAPI.Resource::Base.inspect
train
def inspect attrs = (_prefix).merge(_attributes).map do |key, value| if value.is_a?(String) "#{key}: #{Util.truncate(value, length: 50).inspect}" else "#{key}: #{value.inspect}" end end "#<#{self.class.classname} #{attrs.join(', ')}>" end
ruby
{ "resource": "" }
q17427
ChefAPI.Validator::Type.validate
train
def validate(resource) value = resource._attributes[attribute] if value && !types.any? { |type| value.is_a?(type) } short_name = type.to_s.split('::').last resource.errors.add(attribute, "must be a kind of #{short_name}") end end
ruby
{ "resource": "" }
q17428
ChefAPI.Authentication.digest_io
train
def digest_io(io) digester = Digest::SHA1.new while buffer = io.read(1024) digester.update(buffer) end io.rewind Base64.encode64(digester.digest) end
ruby
{ "resource": "" }
q17429
ChefAPI.Resource::CollectionProxy.fetch
train
def fetch(id) return nil unless exists?(id) cached(id) { klass.from_url(get(id), prefix) } end
ruby
{ "resource": "" }
q17430
ChefAPI.Resource::CollectionProxy.each
train
def each(&block) collection.each do |id, url| object = cached(id) { klass.from_url(url, prefix) } block.call(object) if block end end
ruby
{ "resource": "" }
q17431
ChefAPI.Resource::CollectionProxy.inspect
train
def inspect objects = collection .map { |id, _| cached(id) || klass.new(klass.schema.primary_key => id) } .map { |object| object.to_s } "#<#{self.class.name} [#{objects.join(', ')}]>" end
ruby
{ "resource": "" }
q17432
ChefAPI.Connection.add_request_headers
train
def add_request_headers(request) log.info "Adding request headers..." headers = { 'Accept' => 'application/json', 'Content-Type' => 'application/json', 'Connection' => 'keep-alive', 'Keep-Alive' => '30', 'User-Agent' => user_agent, 'X-Chef-Version' => '11.4.0', } headers.each do |key, value| log.debug "#{key}: #{value}" request[key] = value end end
ruby
{ "resource": "" }
q17433
ChefAPI.Schema.attribute
train
def attribute(key, options = {}) if primary_key = options.delete(:primary) @primary_key = key.to_sym end @attributes[key] = options.delete(:default) # All remaining options are assumed to be validations options.each do |validation, options| if options @validators << Validator.find(validation).new(key, options) end end key end
ruby
{ "resource": "" }
q17434
VestalVersions.Version.original_number
train
def original_number if reverted_from.nil? number else version = versioned.versions.at(reverted_from) version.nil? ? 1 : version.original_number end end
ruby
{ "resource": "" }
q17435
VestalVersions.Creation.update_version
train
def update_version return create_version unless v = versions.last v.modifications_will_change! v.update_attribute(:modifications, v.changes.append_changes(version_changes)) reset_version_changes reset_version end
ruby
{ "resource": "" }
q17436
VestalVersions.Reset.reset_to!
train
def reset_to!(value) if saved = skip_version{ revert_to!(value) } versions.send(:delete, versions.after(value)) reset_version end saved end
ruby
{ "resource": "" }
q17437
VestalVersions.VersionTagging.tag_version
train
def tag_version(tag) v = versions.at(version) || versions.build(:number => 1) t = v.tag!(tag) versions.reload t end
ruby
{ "resource": "" }
q17438
VestalVersions.Reversion.revert_to
train
def revert_to(value) to_number = versions.number_at(value) changes_between(version, to_number).each do |attribute, change| write_attribute(attribute, change.last) end reset_version(to_number) end
ruby
{ "resource": "" }
q17439
Stackup.StackWatcher.zero
train
def zero last_event = stack.events.first @last_processed_event_id = last_event.id unless last_event.nil? nil rescue Aws::CloudFormation::Errors::ValidationError end
ruby
{ "resource": "" }
q17440
Stackup.Stack.on_event
train
def on_event(event_handler = nil, &block) event_handler ||= block raise ArgumentError, "no event_handler provided" if event_handler.nil? @event_handler = event_handler end
ruby
{ "resource": "" }
q17441
Stackup.Stack.create_or_update
train
def create_or_update(options) options = options.dup if (template_data = options.delete(:template)) options[:template_body] = MultiJson.dump(template_data) end if (parameters = options[:parameters]) options[:parameters] = Parameters.new(parameters).to_a end if (tags = options[:tags]) options[:tags] = normalize_tags(tags) end if (policy_data = options.delete(:stack_policy)) options[:stack_policy_body] = MultiJson.dump(policy_data) end if (policy_data = options.delete(:stack_policy_during_update)) options[:stack_policy_during_update_body] = MultiJson.dump(policy_data) end options[:capabilities] ||= ["CAPABILITY_NAMED_IAM"] delete if ALMOST_DEAD_STATUSES.include?(status) update(options) rescue NoSuchStack create(options) end
ruby
{ "resource": "" }
q17442
Stackup.Stack.modify_stack
train
def modify_stack(target_status, failure_message, &block) if wait? status = modify_stack_synchronously(&block) raise StackUpdateError, failure_message unless target_status === status status else modify_stack_asynchronously(&block) end end
ruby
{ "resource": "" }
q17443
Stackup.Stack.modify_stack_synchronously
train
def modify_stack_synchronously watch do |watcher| handling_cf_errors do yield end loop do watcher.each_new_event(&event_handler) status = self.status logger.debug("stack_status=#{status}") return status if status.nil? || status =~ /_(COMPLETE|FAILED)$/ sleep(wait_poll_interval) end end end
ruby
{ "resource": "" }
q17444
Stackup.Stack.extract_hash
train
def extract_hash(collection_name, key_name, value_name) handling_cf_errors do {}.tap do |result| cf_stack.public_send(collection_name).each do |item| key = item.public_send(key_name) value = item.public_send(value_name) result[key] = value end end end end
ruby
{ "resource": "" }
q17445
Stackup.ChangeSet.create
train
def create(options = {}) options = options.dup options[:stack_name] = stack.name options[:change_set_name] = name options[:change_set_type] = stack.exists? ? "UPDATE" : "CREATE" force = options.delete(:force) options[:template_body] = MultiJson.dump(options.delete(:template)) if options[:template] options[:parameters] = Parameters.new(options[:parameters]).to_a if options[:parameters] options[:tags] = normalize_tags(options[:tags]) if options[:tags] options[:capabilities] ||= ["CAPABILITY_NAMED_IAM"] delete if force handling_cf_errors do cf_client.create_change_set(options) loop do current = describe logger.debug("change_set_status=#{current.status}") case current.status when /COMPLETE/ return current.status when "FAILED" logger.error(current.status_reason) raise StackUpdateError, "change-set creation failed" if status == "FAILED" end sleep(wait_poll_interval) end status end end
ruby
{ "resource": "" }
q17446
UserAccountPermits.UserRolePermit.static_rules
train
def static_rules cannot :manage, User can :read, Comment can :read, any(/Post/) can :read, Article can :write, any(/Article/) author_of(Article) do |author| author.can :manage end author_of(Post) do |author| author.can :manage end author_of(Comment) do |author| author.can :manage end # # can :manage, :all # scope :account do |account| # account.author_of(Article) do |author| # author.can :manage # author.cannot :delete # end # # account.writer_of(Post).can :manage # end # # scope :user do |user| # user.writer_of(Comment).can :manage # end end
ruby
{ "resource": "" }
q17447
FlickRaw.Flickr.call
train
def call(req, args={}, &block) oauth_args = args.delete(:oauth) || {} http_response = @oauth_consumer.post_form(REST_PATH, @access_secret, {:oauth_token => @access_token}.merge(oauth_args), build_args(args, req)) process_response(req, http_response.body) end
ruby
{ "resource": "" }
q17448
FlickRaw.Flickr.get_access_token
train
def get_access_token(token, secret, verify) access_token = @oauth_consumer.access_token(FLICKR_OAUTH_ACCESS_TOKEN, secret, :oauth_token => token, :oauth_verifier => verify) @access_token, @access_secret = access_token['oauth_token'], access_token['oauth_token_secret'] access_token end
ruby
{ "resource": "" }
q17449
Slideshow.CaptureHelper.capture_erb
train
def capture_erb( *args, &block ) # get the buffer from the block's binding buffer = _erb_buffer(block.binding) rescue nil # If there is no buffer, just call the block and get the contents if buffer.nil? block.call(*args) # If there is a buffer, execute the block, then extract its contents else pos = buffer.length block.call(*args) # extract the block data = buffer[pos..-1] # replace it in the original with empty string buffer[pos..-1] = "" data end end
ruby
{ "resource": "" }
q17450
Slideshow.DebugFilter.dump_content_to_file_debug_text_erb
train
def dump_content_to_file_debug_text_erb( content ) # NB: using attribs from mixed in class # - opts # - outdir return content unless config.verbose? outname = "#{outdir}/#{@name}.debug.text.erb" puts " Dumping content before erb merge to #{outname}..." File.open( outname, 'w' ) do |f| f.write( content ) end content end
ruby
{ "resource": "" }
q17451
PgLtree.Ltree.ltree
train
def ltree(column = :path, options: { cascade: true }) cattr_accessor :ltree_path_column self.ltree_path_column = column if options[:cascade] after_update :cascade_update after_destroy :cascade_destroy end extend ClassMethods include InstanceMethods end
ruby
{ "resource": "" }
q17452
RubocopChallenger.Go.regenerate_rubocop_todo!
train
def regenerate_rubocop_todo! before_version = scan_rubocop_version_in_rubocop_todo_file pull_request.commit! ':police_car: regenerate rubocop todo' do Rubocop::Command.new.auto_gen_config end after_version = scan_rubocop_version_in_rubocop_todo_file [before_version, after_version] end
ruby
{ "resource": "" }
q17453
RubocopChallenger.Go.rubocop_challenge!
train
def rubocop_challenge!(before_version, after_version) Rubocop::Challenge.exec(options[:file_path], options[:mode]).tap do |rule| pull_request.commit! ":police_car: #{rule.title}" end rescue Errors::NoAutoCorrectableRule => e create_another_pull_request!(before_version, after_version) raise e end
ruby
{ "resource": "" }
q17454
RubocopChallenger.Go.add_to_ignore_list_if_challenge_is_incomplete
train
def add_to_ignore_list_if_challenge_is_incomplete(rule) return unless auto_correct_incomplete?(rule) pull_request.commit! ':police_car: add the rule to the ignore list' do config_editor = Rubocop::ConfigEditor.new config_editor.add_ignore(rule) config_editor.save end color_puts DESCRIPTION_THAT_CHALLENGE_IS_INCOMPLETE, PrComet::CommandLine::YELLOW end
ruby
{ "resource": "" }
q17455
RubocopChallenger.Go.auto_correct_incomplete?
train
def auto_correct_incomplete?(rule) todo_reader = Rubocop::TodoReader.new(options[:file_path]) todo_reader.all_rules.include?(rule) end
ruby
{ "resource": "" }
q17456
RubocopChallenger.PullRequest.create_rubocop_challenge_pr!
train
def create_rubocop_challenge_pr!(rule, template_file_path = nil) create_pull_request!( title: "#{rule.title}-#{timestamp}", body: Github::PrTemplate.new(rule, template_file_path).generate, labels: labels ) end
ruby
{ "resource": "" }
q17457
RubocopChallenger.PullRequest.create_regenerate_todo_pr!
train
def create_regenerate_todo_pr!(before_version, after_version) create_pull_request!( title: "Re-generate .rubocop_todo.yml with RuboCop v#{after_version}", body: generate_pull_request_body(before_version, after_version), labels: labels ) end
ruby
{ "resource": "" }
q17458
Lda.Lda.print_topics
train
def print_topics(words_per_topic = 10) raise 'No vocabulary loaded.' unless @vocab beta.each_with_index do |topic, topic_num| # Sort the topic array and return the sorted indices of the best scores indices = topic.zip((0...@vocab.size).to_a).sort { |x| x[0] }.map { |_i, j| j }.reverse[0...words_per_topic] puts "Topic #{topic_num}" puts "\t#{indices.map { |i| @vocab[i] }.join("\n\t")}" puts '' end nil end
ruby
{ "resource": "" }
q17459
Lda.Lda.to_s
train
def to_s outp = ['LDA Settings:'] outp << ' Initial alpha: %0.6f'.format(init_alpha) outp << ' # of topics: %d'.format(num_topics) outp << ' Max iterations: %d'.format(max_iter) outp << ' Convergence: %0.6f'.format(convergence) outp << 'EM max iterations: %d'.format(em_max_iter) outp << ' EM convergence: %0.6f'.format(em_convergence) outp << ' Estimate alpha: %d'.format(est_alpha) outp.join("\n") end
ruby
{ "resource": "" }
q17460
Casting.SuperDelegate.super_delegate
train
def super_delegate(*args, &block) method_name = name_of_calling_method(caller) owner = args.first || method_delegate(method_name) super_delegate_method = unbound_method_from_next_delegate(method_name, owner) if super_delegate_method.arity == 0 super_delegate_method.bind(self).call else super_delegate_method.bind(self).call(*args, &block) end rescue NameError raise NoMethodError.new("super_delegate: no delegate method `#{method_name}' for #{self.inspect} from #{owner}") end
ruby
{ "resource": "" }
q17461
MerchCalendar.MerchWeek.merch_month
train
def merch_month # TODO: This is very inefficient, but less complex than strategic guessing # maybe switch to a binary search or something merch_year = calendar.merch_year_from_date(date) @merch_month ||= (1..12).detect do |num| calendar.end_of_month(merch_year, num) >= date && date >= calendar.start_of_month(merch_year, num) end end
ruby
{ "resource": "" }
q17462
MerchCalendar.RetailCalendar.end_of_year
train
def end_of_year(year) year_end = Date.new((year + 1), Date::MONTHNAMES.index(LAST_MONTH_OF_THE_YEAR), LAST_DAY_OF_THE_YEAR) # Jan 31st wday = (year_end.wday + 1) % 7 if wday > 3 year_end += 7 - wday else year_end -= wday end year_end end
ruby
{ "resource": "" }
q17463
MerchCalendar.RetailCalendar.start_of_month
train
def start_of_month(year, merch_month) # 91 = number of days in a single 4-5-4 set start = start_of_year(year) + ((merch_month - 1) / 3).to_i * 91 case merch_month when *FOUR_WEEK_MONTHS # 28 = 4 weeks start = start + 28 when *FIVE_WEEK_MONTHS # The 5 week months # 63 = 4 weeks + 5 weeks start = start + 63 end start end
ruby
{ "resource": "" }
q17464
MerchCalendar.RetailCalendar.start_of_quarter
train
def start_of_quarter(year, quarter) case quarter when QUARTER_1 start_of_month(year, 1) when QUARTER_2 start_of_month(year, 4) when QUARTER_3 start_of_month(year, 7) when QUARTER_4 start_of_month(year, 10) else raise "invalid quarter" end end
ruby
{ "resource": "" }
q17465
MerchCalendar.RetailCalendar.end_of_quarter
train
def end_of_quarter(year, quarter) case quarter when QUARTER_1 end_of_month(year, 3) when QUARTER_2 end_of_month(year, 6) when QUARTER_3 end_of_month(year, 9) when QUARTER_4 end_of_month(year, 12) else raise "invalid quarter" end end
ruby
{ "resource": "" }
q17466
MerchCalendar.RetailCalendar.merch_year_from_date
train
def merch_year_from_date(date) date_end_of_year = end_of_year(date.year) date_start_of_year = start_of_year(date.year) if date < date_start_of_year date.year - 1 else date.year end end
ruby
{ "resource": "" }
q17467
MerchCalendar.RetailCalendar.merch_months_in
train
def merch_months_in(start_date, end_date) merch_months = [] prev_date = start_date - 2 date = start_date while date <= end_date do date = MerchCalendar.start_of_month(date.year, merch_month: date.month) next if prev_date == date merch_months.push(date) prev_date = date date += 14 end merch_months end
ruby
{ "resource": "" }
q17468
MerchCalendar.RetailCalendar.weeks_for_month
train
def weeks_for_month(year, month_param) merch_month = get_merch_month_param(month_param) start_date = start_of_month(year, merch_month) weeks = (end_of_month(year, merch_month) - start_date + 1) / 7 (1..weeks).map do |week_num| week_start = start_date + ((week_num - 1) * 7) week_end = week_start + 6 MerchWeek.new(week_start, { start_of_week: week_start, end_of_week: week_end, week: week_num, calendar: RetailCalendar.new }) end end
ruby
{ "resource": "" }
q17469
MerchCalendar.StitchFixFiscalYearCalendar.merch_year_from_date
train
def merch_year_from_date(date) if end_of_year(date.year) >= date return date.year else return date.year + 1 end end
ruby
{ "resource": "" }
q17470
MerchCalendar.StitchFixFiscalYearCalendar.merch_months_in
train
def merch_months_in(start_date, end_date) merch_months_combos = merch_year_and_month_from_dates(start_date, end_date) merch_months_combos.map { | merch_month_combo | start_of_month(merch_month_combo[0], merch_month_combo[1]) } end
ruby
{ "resource": "" }
q17471
MerchCalendar.StitchFixFiscalYearCalendar.merch_year_and_month_from_dates
train
def merch_year_and_month_from_dates(start_date, end_date) merch_months = [] middle_of_start_month = Date.new(start_date.year, start_date.month, 14) middle_of_end_month = Date.new(end_date.year, end_date.month, 14) date = middle_of_start_month while date <= middle_of_end_month do merch_months.push(date_conversion(date)) date = date >> 1 end merch_months end
ruby
{ "resource": "" }
q17472
MerchCalendar.Util.start_of_week
train
def start_of_week(year, month, week) retail_calendar.start_of_week(year, julian_to_merch(month), week) end
ruby
{ "resource": "" }
q17473
MerchCalendar.Util.end_of_week
train
def end_of_week(year, month, week) retail_calendar.end_of_week(year, julian_to_merch(month), week) end
ruby
{ "resource": "" }
q17474
MerchCalendar.Util.start_of_month
train
def start_of_month(year, month_param) merch_month = get_merch_month_param(month_param) retail_calendar.start_of_month(year, merch_month) end
ruby
{ "resource": "" }
q17475
MerchCalendar.Util.end_of_month
train
def end_of_month(year, month_param) merch_month = get_merch_month_param(month_param) retail_calendar.end_of_month(year, merch_month) end
ruby
{ "resource": "" }
q17476
Starter.BaseFile.add_to_base
train
def add_to_base(file) occurence = file.scan(/(\s+mount\s.*?\n)/).last.first replacement = occurence + mount_point file.sub!(occurence, replacement) end
ruby
{ "resource": "" }
q17477
Ninetails.Element.valid?
train
def valid? validations = properties_instances.collect do |property_type| if property_type.property.respond_to?(:valid?) property_type.property.valid? else true end end validations.all? end
ruby
{ "resource": "" }
q17478
Ninetails.ContentSection.store_settings
train
def store_settings(settings_hash) settings_hash.each do |key, value| section.public_send "#{key}=", value end self.settings = section.attributes end
ruby
{ "resource": "" }
q17479
Ninetails.Revision.sections_are_all_valid
train
def sections_are_all_valid sections.each do |section| unless section.valid? errors.add :base, section.errors.messages[:base] end end end
ruby
{ "resource": "" }
q17480
Ninetails.Revision.url_is_unique
train
def url_is_unique if container.is_a?(Page) && url.present? url_exists = Ninetails::Container. where.not(id: container.id). includes(:current_revision). where(ninetails_revisions: { url: url }). exists? errors.add :url, "is already in use" if url_exists end end
ruby
{ "resource": "" }
q17481
Lumberjack.Formatter.add
train
def add(klass, formatter = nil, &block) formatter ||= block if formatter.is_a?(Symbol) formatter_class_name = "#{formatter.to_s.gsub(/(^|_)([a-z])/){|m| $~[2].upcase}}Formatter" formatter = Formatter.const_get(formatter_class_name).new end @class_formatters[klass] = formatter self end
ruby
{ "resource": "" }
q17482
Lumberjack.Formatter.formatter_for
train
def formatter_for(klass) #:nodoc: while klass != nil do formatter = @class_formatters[klass] return formatter if formatter klass = klass.superclass end @_default_formatter end
ruby
{ "resource": "" }
q17483
Lumberjack.Logger.add
train
def add(severity, message = nil, progname = nil) severity = Severity.label_to_level(severity) if severity.is_a?(String) || severity.is_a?(Symbol) return unless severity && severity >= level time = Time.now if message.nil? if block_given? message = yield else message = progname progname = nil end end message = @_formatter.format(message) progname ||= self.progname entry = LogEntry.new(time, severity, message, progname, $$, Lumberjack.unit_of_work_id) begin device.write(entry) rescue => e $stderr.puts("#{e.class.name}: #{e.message}#{' at ' + e.backtrace.first if e.backtrace}") $stderr.puts(entry.to_s) end nil end
ruby
{ "resource": "" }
q17484
Lumberjack.Logger.set_thread_local_value
train
def set_thread_local_value(name, value) #:nodoc: values = Thread.current[name] unless values values = {} Thread.current[name] = values end if value.nil? values.delete(self) Thread.current[name] = nil if values.empty? else values[self] = value end end
ruby
{ "resource": "" }
q17485
Lumberjack.Logger.push_thread_local_value
train
def push_thread_local_value(name, value) #:nodoc: save_val = thread_local_value(name) set_thread_local_value(name, value) begin yield ensure set_thread_local_value(name, save_val) end end
ruby
{ "resource": "" }
q17486
Lumberjack.Logger.open_device
train
def open_device(device, options) #:nodoc: if device.is_a?(Device) device elsif device.respond_to?(:write) && device.respond_to?(:flush) Device::Writer.new(device, options) elsif device == :null Device::Null.new else device = device.to_s if options[:roll] Device::DateRollingLogFile.new(device, options) elsif options[:max_size] Device::SizeRollingLogFile.new(device, options) else Device::LogFile.new(device, options) end end end
ruby
{ "resource": "" }
q17487
Lumberjack.Logger.create_flusher_thread
train
def create_flusher_thread(flush_seconds) #:nodoc: if flush_seconds > 0 begin logger = self Thread.new do loop do begin sleep(flush_seconds) logger.flush if Time.now - logger.last_flushed_at >= flush_seconds rescue => e STDERR.puts("Error flushing log: #{e.inspect}") end end end end end end
ruby
{ "resource": "" }
q17488
Lumberjack.Template.compile
train
def compile(template) #:nodoc: template.gsub(/:[a-z0-9_]+/) do |match| position = TEMPLATE_ARGUMENT_ORDER.index(match) if position "%#{position + 1}$s" else match end end end
ruby
{ "resource": "" }
q17489
ForemanFogProxmox.Proxmox.vm_compute_attributes
train
def vm_compute_attributes(vm) vm_attrs = vm.attributes.reject { |key,value| [:config, :vmid].include?(key.to_sym) || value.to_s.empty? } vm_attrs = set_vm_config_attributes(vm, vm_attrs) vm_attrs = set_vm_volumes_attributes(vm, vm_attrs) vm_attrs = set_vm_interfaces_attributes(vm, vm_attrs) vm_attrs end
ruby
{ "resource": "" }
q17490
UUIDTools.UUID.timestamp
train
def timestamp return nil if self.version != 1 gmt_timestamp_100_nanoseconds = 0 gmt_timestamp_100_nanoseconds += ((self.time_hi_and_version & 0x0FFF) << 48) gmt_timestamp_100_nanoseconds += (self.time_mid << 32) gmt_timestamp_100_nanoseconds += self.time_low return Time.at( (gmt_timestamp_100_nanoseconds - 0x01B21DD213814000) / 10000000.0) end
ruby
{ "resource": "" }
q17491
UUIDTools.UUID.generate_s
train
def generate_s result = sprintf("%8.8x-%4.4x-%4.4x-%2.2x%2.2x-", @time_low, @time_mid, @time_hi_and_version, @clock_seq_hi_and_reserved, @clock_seq_low); 6.times do |i| result << sprintf("%2.2x", @nodes[i]) end return result.downcase end
ruby
{ "resource": "" }
q17492
Sawyer.Resource.process_value
train
def process_value(value) case value when Hash then self.class.new(@_agent, value) when Array then value.map { |v| process_value(v) } else value end end
ruby
{ "resource": "" }
q17493
Sawyer.Resource.method_missing
train
def method_missing(method, *args) attr_name, suffix = method.to_s.scan(/([a-z0-9\_]+)(\?|\=)?$/i).first if suffix == ATTR_SETTER @_metaclass.send(:attr_accessor, attr_name) @_fields << attr_name.to_sym send(method, args.first) elsif attr_name && @_fields.include?(attr_name.to_sym) value = @attrs[attr_name.to_sym] case suffix when nil @_metaclass.send(:attr_accessor, attr_name) value when ATTR_PREDICATE then !!value end elsif suffix.nil? && SPECIAL_METHODS.include?(attr_name) instance_variable_get "@_#{attr_name}" elsif attr_name && !@_fields.include?(attr_name.to_sym) nil else super end end
ruby
{ "resource": "" }
q17494
Sawyer.Agent.call
train
def call(method, url, data = nil, options = nil) if NO_BODY.include?(method) options ||= data data = nil end options ||= {} url = expand_url(url, options[:uri]) started = nil res = @conn.send method, url do |req| if data req.body = data.is_a?(String) ? data : encode_body(data) end if params = options[:query] req.params.update params end if headers = options[:headers] req.headers.update headers end started = Time.now end Response.new self, res, :sawyer_started => started, :sawyer_ended => Time.now end
ruby
{ "resource": "" }
q17495
Sawyer.Response.process_data
train
def process_data(data) case data when Hash then Resource.new(agent, data) when Array then data.map { |hash| process_data(hash) } when nil then nil else data end end
ruby
{ "resource": "" }
q17496
Sawyer.Response.process_rels
train
def process_rels links = ( @headers["Link"] || "" ).split(', ').map do |link| href, name = link.match(/<(.*?)>; rel="(\w+)"/).captures [name.to_sym, Relation.from_link(@agent, name, :href => href)] end Hash[*links.flatten] end
ruby
{ "resource": "" }
q17497
Gibberish.AES::SJCL.check_cipher_options
train
def check_cipher_options(c_opts) if @opts[:max_iter] < c_opts[:iter] # Prevent DOS attacks from high PBKDF iterations # You an increase this by passing in opts[:max_iter] raise CipherOptionsError.new("Iteration count of #{c_opts[:iter]} exceeds the maximum of #{@opts[:max_iter]}") elsif !ALLOWED_MODES.include?(c_opts[:mode]) raise CipherOptionsError.new("Mode '#{c_opts[:mode]}' not supported") elsif !ALLOWED_KS.include?(c_opts[:ks]) raise CipherOptionsError.new("Keystrength of #{c_opts[:ks]} not supported") elsif !ALLOWED_TS.include?(c_opts[:ts]) raise CipherOptionsError.new("Tag length of #{c_opts[:ts]} not supported") elsif c_opts[:iv] && Base64.decode64(c_opts[:iv]).length > 12 raise CipherOptionsError.new("Initialization vector's greater than 12 bytes are not supported in Ruby.") end end
ruby
{ "resource": "" }
q17498
Gibberish.AES::CBC.encrypt
train
def encrypt(data, opts={}) salt = generate_salt(opts[:salt]) setup_cipher(:encrypt, salt) e = cipher.update(data) + cipher.final e = "Salted__#{salt}#{e}" #OpenSSL compatible opts[:binary] ? e : Base64.encode64(e) end
ruby
{ "resource": "" }
q17499
Gibberish.RSA.encrypt
train
def encrypt(data, opts={}) data = data.to_s enc = @key.public_encrypt(data) if opts[:binary] enc else Base64.encode64(enc) end end
ruby
{ "resource": "" }