query stringlengths 7 9.5k | document stringlengths 10 1.07M | negatives listlengths 19 19 | metadata dict |
|---|---|---|---|
Disable or enable ETag and Last Modified conditional GET. Equivalent to use_etag(true_or_false) use_last_modified(true_or_false) | def use_conditional_get(true_or_false)
use_etag(true_or_false)
use_last_modified(true_or_false)
end | [
"def not_modified_response(env, etag); end",
"def get(path, headers = {})\n cached_response = fetch_cached_response(path)\n if cached_response\n headers = headers.merge 'If-None-Match' => cached_response['Etag']\n end\n response = super(path, headers)\n case response\n when Ne... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
returns true if a handler is set for the HTTP method in param it is an alternative to get_handler which would return a true value if there is a default handler | def has_handler?(method)
send(method) && true
end | [
"def has_handler_for?(handler_method)\n @jr_handlers ||= {}\n @jr_handlers.has_key?(handler_method)\n end",
"def get?\r\nHTTP_METHOD_LOOKUP[request_method] == :get\r\nend",
"def post?\r\nHTTP_METHOD_LOOKUP[request_method] == :post\r\nend",
"def can_handle?(req)\n true\n end",
"def h... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
sets a handler for the HTTP method in param to val | def set_handler(method, val)
send("#{method}=", val)
end | [
"def http_method(value = nil)\n if value\n @http_method = value\n else\n @http_method\n end\n end",
"def http_method(value = nil)\n if value\n config[:http_method] = value\n else\n config[:http_method]\n end\n end",
"def htt... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns an array of pairs of array with [HTTPverbs, handler] an extra item is the :default handler NB: could also be a hash, but I'm fine with it | def handlers
HTTP_METHODS.map do |sym|
[sym, send(sym)]
end
end | [
"def http_methods(_path)\n []\n end",
"def match(verb, url)\n parts = url.gsub(/^\\/+/,'').split('/')\n\n @routes.each do |route|\n if route.verb == verb\n result = matches?(route, parts)\n if result\n re... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
returns an array like handlers but restricted to the not nil values | def handlers_set
handlers.reject{|sym, handler| handler.nil?}
end | [
"def handlers\n @handlers ||= Hash.new do |hash, key|\n if key.nil?\n raise NoHandler\n else\n raise InvalidHandler, key\n end\n end\n end",
"def violation_handlers\n if defined_violation_handlers.empty?\n Array(default_violation_handler)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Returns true if there is no handler set for this dispatcher | def no_handler?
handlers_set.empty?
end | [
"def handlers?\n registered_handlers.length > 0\n end",
"def has_event_handler?\n !!collected[:event_handler]\n end",
"def has_handler?(handler_name)\n @handlers.key? handler_name\n end",
"def has_handler?(method)\n send(method) && true\n end",
"def handled?\n hand... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks that the model given as a string exists | def model_exists?
if model.present?
begin
model.constantize
rescue NameError
errors.add :model, "#{model} is not a valid model"
end
end
end | [
"def model_exists? name\n name = ruby_to_oo name\n reflection_model.where(:model => name).exists?\n end",
"def model_exists?(model_type, name)\n get_model(model_type, name) != nil\n end",
"def model_exists?(model)\n model.exists? # Throws invalid ActiveRecord::StatementInvali... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Instantiates the reporter to generate the HamlLint report | def reporter
@reporter ||= ::HamlLint::Reporter::HashReporter.new(
::HamlLint::Logger.new(StringIO.new)
)
end | [
"def initialize\n @problems = []\n @default_info = {:check => 'unknown', :linenumber => 0, :column => 0}\n\n PuppetLint.configuration.checks.each do |check|\n method = PuppetLint.configuration.check_method[check]\n self.class.send(:define_method, \"lint_check_#{check}\", &method)\n end\n\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /bbs GET /bbs.json | def index
@bbs = Bb.all
end | [
"def show\n @bbs_body = BbsBody.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @bbs_body }\n end\n end",
"def show\n @bb = Bb.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { rend... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /bbs POST /bbs.json | def create
@bb = Bb.new(bb_params)
respond_to do |format|
if @bb.save
format.html { redirect_to @bb, notice: 'Bb was successfully created.' }
format.json { render :show, status: :created, location: @bb }
else
format.html { render :new }
format.json { render json: @bb... | [
"def new\n @bbs_body = BbsBody.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.json { render json: @bbs_body }\n end\n end",
"def create\n @bb = Bb.new(params[:bb])\n\n respond_to do |format|\n if @bb.save\n format.html { redirect_to @bb, notice: 'Bb was ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /bbs/1 PATCH/PUT /bbs/1.json | def update
respond_to do |format|
if @bb.update(bb_params)
format.html { redirect_to @bb, notice: 'Bb was successfully updated.' }
format.json { render :show, status: :ok, location: @bb }
else
format.html { render :edit }
format.json { render json: @bb.errors, status: :un... | [
"def update\n @bbs_body = BbsBody.find(params[:id])\n\n respond_to do |format|\n if @bbs_body.update_attributes(params[:bbs_body])\n format.html { redirect_to @bbs_body, notice: 'Bbs body was successfully updated.' }\n format.json { head :ok }\n else\n format.html { render actio... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /bbs/1 DELETE /bbs/1.json | def destroy
@bb.destroy
respond_to do |format|
format.html { redirect_to bbs_url, notice: 'Bb was successfully destroyed.' }
format.json { head :no_content }
end
end | [
"def destroy\n @bb = Bb.find(params[:id])\n @bb.destroy\n\n respond_to do |format|\n format.html { redirect_to bbs_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @testbb = Testbb.find(params[:id])\n @testbb.destroy\n\n respond_to do |format|\n format.htm... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
We use these mock accounts to ensure that the ones which are available in development are valid, to test omniauth actions and verify that a mock account is available for every supported omniauth provider. These must be identical to the ones in /config/environments/development.rb Remember to keep them in sync with devel... | def mock_auth_accounts
OmniAuth.config.mock_auth[:facebook] =
OmniAuth::AuthHash.new(
provider: 'facebook',
uid: 'facebook-test-uid-1',
info: {
name: 'facebook user',... | [
"def mock_login\n if Rails.env.development?\n a = Account.find_or_create_by(name: \"development\")\n user = User.find_or_create_by(username: \"luis.perichon\")\n user.current_account_id = a.id\n user.save\n\n sign_in(user)\n end\n end",
"def mock_auth_hash(merchant)\n return {\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete all entities which match all attributes attributes: Hash of attributes to match Returns: Integer count of entities deleted. | def delete_all(attributes = {})
raise NotImplementedError.new
end | [
"def delete_all!\n # find all current criteria and extract operand params from them \n original_criteria = CampaignCriterion.find(:campaign_id => @campaign_id).map do |criterion|\n criterion.select { |k,v| [ :xsi_type, :id ].include?(k) }\n end\n\n # HOTFIX temporarily remove platforms, a... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
The default stitching is a board of square tiles Inherit & overwrite these as necessary | def stitching(x, y)
# Return only what's already been generated (that is, what's above and to the left).
return [:normal, [
([x - 1, y] if x > 0),
([x - 1, y - 1] if x > 0 and y > 0),
([x, y - 1] if y > 0),
([x + 1, y - 1] if y > 0 and x < @field_width - 1)
].compact]
end | [
"def flip_board\n temp_board = deep_dup_array @board\n @size.times do |x|\n @size.times do |y|\n @board[x][y] = temp_board[@size - x - 1][y]\n end\n end\n pieces_fall\n end",
"def randomize_board\n (@size + rand(@size)).times do\n flip_bit rand(@size), rand(@size)\n end\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Public Methods Process => Validate, setup all appropriate request data, and register contexts => Return self | def process
raise RequestValidationError unless validate
@roles = get_roles
@user = get_or_create_user
@resource = get_resource
@assignment = get_assignment
@space = get_space
@space_user = get_or_create_space_user
register
self
end | [
"def process\n raise RequestValidationError unless validate\n\n @roles = get_roles\n @user = get_or_create_user\n @resource = get_resource\n\n self\n end",
"def process\n warn \"Processing configuration...\"\n process_config\n warn \"Processing initializers...\"\n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Helper function to expect a class to have a set of options defined. These options are not firstclass citizens of puppet, but instead a keyvalue map. So regular rspec matchers don't help. To stay DRY, introduce this helper. | def expect_option(klass, key, val)
# test each option
it do
should contain_class(klass).with_options(
lambda do |map|
if map[key] == val
true
else
fail "#{klass} option #{key.inspect} doesn't match (-- expected, ++ actual):\n"\
"-- #{val.inspect}\n"\
... | [
"def expect_option(klass, key, val)\n # test each option\n it do\n should contain_class(klass).with_options(\n lambda do |map|\n # check\n if map[key] == wrap_expected(val)\n true\n else\n fail \"#{klass} option #{key.inspect} doesn't match (-- expected, ++ actual):\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
get list of current available books | def available_books
puts "Available Books:"
@book_status.each { |k, v| puts "#{k}" if v == "available" }
end | [
"def available_books\n \n available_books = []\n \n @books.each do |book|\n \n if book.status == :available\n \n available_books << book.title\n\n end\n\n end\n\n end",
"def available_books\n @books.each do |book|\n if book.status == \"available\"\n puts... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Derives the mac key and encryption key | def derive_keys(key)
derived_key_size = key.size / 2
mac_key = key[0...derived_key_size]
enc_key = key[derived_key_size..-1]
return mac_key, enc_key
end | [
"def mac_encryption_key\n upload_key[0,4]\n end",
"def get_crypto_key_hex\n return @crypto_key if ! @crypto_key\n @crypto_key.unpack(\"H*\")\n end",
"def derive_format_2_mac_key(passphrase)\n key = ::OpenSSL::Digest::SHA1.new\n key.update(FORMAT_2_MAC_KEY)\n key.update(passphrase... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Default empty implementation of _roda_after, usually overridden by Roda.def_roda_before. | def _roda_after(res)
end | [
"def def_roda_after\n meths = private_instance_methods.grep(/\\A_roda_after_\\d\\d/).sort\n unless meths.empty?\n plugin :error_handler unless private_method_defined?(:_roda_after)\n if meths.length == 1\n class_eval(\"alias _roda_after #{meths.first}\", __FILE__... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Handle the given exception using handle_error, using a default status of 500. Run after hooks on the rack response, but if any error occurs when doing so, log the error using rack.errors and return the response. | def _handle_error(e)
res = @_response
res.send(:initialize)
res.status = 500
res = _roda_handle_route{handle_error(e)}
begin
_roda_after(res)
rescue => e2
if errors = env['rack.errors']
errors.puts "Error in after hook pro... | [
"def render_500(exception = nil)\n JsonApiServer.logger.error(exception.try(:message))\n JsonApiServer.logger.error(exception.try(:backtrace))\n\n errors = JsonApiServer.errors(\n status: 500,\n title: I18n.t('json_api_server.render_500.title'),\n detail: I18n.t('json... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
The standard commands which never change. | def standard_commands
{
'C' => Commands::ClearImageBuffer,
'T' => Commands::Feed,
'XS' => Commands::Issue
}
end | [
"def commands\n unless defined? @commands\n @commands = []\n end\n return @commands\n end",
"def supported_commands\n commands.keys\n end",
"def commands\n @commands ||= Foreman::Thor::CoreExt::OrderedHash.new\n end",
"def commands\r\n @hel... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Take a list of commands and execute them. Returns an array of objects. In the case of standard commands will just create a new command. With a derived command it will execute the method. A derived command may itself contain a list of commands so the whole thing is flattened to produce a single array. | def commands
@commands ||= [].tap do |c|
commands_list.each do |command|
if command.instance_of? String
c << standard_commands[command].new
else
ret_command = send(command)
c << (ret_command.instance_of?(Hash) ? ret_command.values : ret... | [
"def execute_commands(commands)\n commands.each do |command|\n execute_command(command)\n end\n \n end",
"def invoke_all #:nodoc:\n self.class.all_commands.map { |_, command| invoke_command(command) }\n end",
"def call\n commands = self.commands.any? ? self.commands : self.class.co... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Converts a Cygwin Unix path to a Windows path, e.g.: /cygdrive/d/path/to/file ==> D:/path/to/file | def cygwin_path(path)
if RUBY_PLATFORM.include?('cygwin') && path.index('/') == 0
IO.popen("cygpath -m #{path.include?(':') ? '-p' : ''} #{path.shellescape}").readline.chomp.gsub(/;/, '\\;')
else
path
end
end | [
"def fix_path(path)\n return path if not OS.windows?\n `cygpath -w #{path}`.strip.gsub(\"\\\\\", \"/\")\nend",
"def windows_path(path)\n path = cygwin_windows_path(path)\n path = wsl_to_windows_path(path)\n if windows? || wsl?\n path = windows_unc_path(path)\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /system/platform_accounts POST /system/platform_accounts.json | def create
@system_platform_account = System::PlatformAccount.new(system_platform_account_params)
if @system_platform_account.save
record_activities('创建接入账号', @system_platform_account.account_name)
flash[:success] = '创建接入账号成功'
redirect_to system_platform_accounts_path
else
flash[:err... | [
"def create_platform_account(data)\n url_params = {\n format: JSON_RESPONSE_FORMAT\n }\n url = Utils.prepare_url('accounts/', @sandbox, url_params)\n data = Utils.prepare_json_data(data)\n response = @transport.make_http_request(url, 'POST', data)\n\n block_given? ? yield(resp... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /system/platform_accounts/1 PATCH/PUT /system/platform_accounts/1.json | def update
if @system_platform_account.update(system_platform_account_params)
record_activities('修改接入账号', @system_platform_account.account_name)
flash[:success] = '修改接入账号成功'
redirect_to system_platform_accounts_path
else
flash[:error] = "修改失败: #{@system_platform_account.errors.full_messa... | [
"def update\n respond_to do |format|\n if @platform_account.update(platform_account_params)\n format.html { redirect_to @platform_account, notice: 'Platform account was successfully updated.' }\n format.json { render :show, status: :ok, location: @platform_account }\n else\n format... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Processes the given email, submits attachments to the Heathen server, delivers responses as configured | def process email, mail_to, is_rts=false
documents = []
unless email.has_attachments?
logger.info "From: #{email.from} Subject: (#{email.subject}) Files: no attachments"
return
end
logger.info "From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:fil... | [
"def process email\n documents = []\n\n unless email.has_attachments?\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: no attachments\"\n return\n end\n\n logger.info \"From: #{email.from} Subject: (#{email.subject}) Files: #{email.attachments.map(&:filename).... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Forward the email to sender, with decoded documents replacing the originals | def deliver_onward email, documents, mail_to
logger.info "Sending response mail to #{mail_to}"
email.cc [] # No CCing, just send to the recipient
email.to mail_to
email.subject "#{'Fwd: ' unless email.subject.to_s.start_with? 'Fwd:'}#{email.subject}"
email.return_path email.from unless ema... | [
"def forward_email(params)\n get_request('configureEmailForward?'+get_url_parameters(params)).body\n end",
"def forward\n message = self.message.class.new(:subject => subject, :body => body)\n message.sender = receiver\n message\n end",
"def forward\n message = self.class.new(:subject => su... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Opens and reads a file, first given the filename, then tries from the project base directory | def read_file filename
f = filename
unless File.exist? f
f = Pathname.new(__FILE__).realpath.parent.parent.parent + f
end
File.read f
end | [
"def readfile(filename)\n\tfilename = File.expand_path(filename, File.dirname(__FILE__))\n\tFile.read(filename)\nrescue\n\tnil\nend",
"def find filename\n return filename if File.exists? filename\n filename = \"./haml/\"+filename\n return filename if File.exists? filename\n filename = @src_folder+\"/\"+filena... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /germinations POST /germinations.json | def create
@germination = Germination.new(germination_params)
respond_to do |format|
if @germination.save
format.html { redirect_to @germination, notice: 'Germination was successfully created.' }
format.json { render :show, status: :created, location: @germin... | [
"def update\n respond_to do |format|\n if @germination.update(germination_params)\n format.html { redirect_to @germination, notice: 'Germination was successfully updated.' }\n format.json { render :show, status: :ok, location: @germination }\n else\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
PATCH/PUT /germinations/1 PATCH/PUT /germinations/1.json | def update
respond_to do |format|
if @germination.update(germination_params)
format.html { redirect_to @germination, notice: 'Germination was successfully updated.' }
format.json { render :show, status: :ok, location: @germination }
else
fo... | [
"def update\n @golfer = Golfer.find(params[:id])\n\n respond_to do |format|\n if @golfer.update_attributes(params[:golfer])\n format.html { redirect_to @golfer, notice: 'Golfer was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Method to update the quantity of an item input: updated_and_removed (hash), the food, and also the integer/value update steps: Call for the key, pass in an argument for the value (to update) output: hash with quantities changed | def update_quantity(updated_and_removed, food, quantity)
updated_and_removed[food] = quantity
return updated_and_removed
end | [
"def update_item_quantity(hash_of_items, item, quantity)\n hash_of_items[item] = quantity\n hash_of_items\nend",
"def update_item_quantity(item, new_quantity)\n\titem_removed_hash = remove_item(\"cereal\")\n\titem_removed_hash[item] = new_quantity\n\tupdate_quantity_hash = item_removed_hash\nend",
"def update... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Top to bottom per col approach | def top_to_bottom_approach
largest_prod_of_all_cols = 0
for each_row in (0..15)
for each_col in (0..19)
curr_prod_in_col = $grid_of_numbers[each_row][each_col] * $grid_of_numbers[each_row + 1][each_col] * $grid_of_numbers[each_row + 2][each_col] * $grid_of_numbers[each_row + 3][each_col]
if curr_pro... | [
"def topToBottom(grid, width, height)\n\n strings = Array.new\n string = String.new\n\n for x in 0..width-1 do\n\n string = \"\"\n\n for y in 0..height-1 do\n letter = grid[[x, y]]\n string << letter\n end\n\n strings << string\n\n end\n\n return stri... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
destroys spelling analysis records for dirty attributes | def expire_spellcheck_analysis!
spellcheck_attributes.each do |attribute|
if send("#{attribute}_changed?")
spellcheck_analyses.for(
attribute
).destroy_all
end
end
end | [
"def update_analysis\n model.wordcounts.destroy_all\n create_analysis\n delete_orphaned_keywords\n end",
"def clean_up\n @dirty = false\n end",
"def clean_attributes\n @attribute_changes = {}\n end",
"def clean_dirty_attributes!\n @dirty_attribute_keys = []\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
schedules a spellcheck analysis if there's any attribute missing without analysis | def schedule_spellcheck_analysis!
if missing_spellcheck_analysis?
self.class.delay.perform_spellcheck!(id)
end
end | [
"def analyse!\n @words = formatted_mispelled_words\n @success = true\n rescue *rescued_exceptions_keys => e\n @success = false\n if rescued_exceptions_messages.include?(e.message)\n # aspell is not present. track analysis as failure\n else\n raise e\n end\n end",
"def spellcheck\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Epay::Subscription.create(:card_no => '...', :) | def create(params)
params.merge!(
:order_no => (Time.now.to_f * 10000).to_i.to_s, # Produce a unique order_no - it's not used anywhere, but ePay needs this
:amount => 0
)
post = Api.default_post_for_params(params).merge({
:subscription => 1,
... | [
"def create_stripe_card\n customer = Stripe::Customer.retrieve(self.user.client_id)\n begin\n token = Stripe::Token.create(\n :card => {\n :name => self.name,\n :number => self.number,\n :exp_month => self.exp_month,\n :exp_year => self.exp_year,\n :cvc... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Locate whitelisted attributes for the supplied association name | def whitelisted_attrs_for(assoc_name, attr_hash = whitelisted_attrs)
if assoc_name.to_s == attr_hash.keys.first.to_s
return attr_hash.values.first.reject { |v| v.is_a? Hash }
end
scoped_whitelisted_attrs = attr_hash.values.first
scoped_whitelisted_attrs.reject { |v|
!v.is_a? Has... | [
"def find_attributes_starting_with(name)\n @attributes.select { |key| key.to_s.start_with?(name) }\n end",
"def attr_in(attr_name, *values)\n predicate! do |batch, candidate|\n candidate.respond_to?(attr_name) &&\n values.include?(candidate.send(attr_name))\n end\n end",
"de... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
days_of_week Helpers Hash mapping day of week (Symbol) to valid(true)/invalid(false) | def days_of_week_hash
@days_of_week_hash ||= {
:sunday => (days_of_week & SUNDAY ) > 0,
:monday => (days_of_week & MONDAY ) > 0,
:tuesday => (days_of_week & TUESDAY ) > 0,
:wednesday => (days_of_week & WEDNESDAY ) > 0,
:thursday => (days_of_week & THURSDAY ) > 0,
... | [
"def day_of_week(days)\n @validations[:day_of_week] ||= {}\n @validation_types[:day_of_week] ||= IceCube::DayOfWeekValidation.new(self)\n days.each do |day, occurrences|\n unless day.is_a?(Integer)\n raise ArgumentError.new('Argument must be a valid day of week') unless IceCube::DAYS.has_key?(d... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Array beginning with Sunday of valid(true)/inactive(false) values | def days_of_week_array
dow = days_of_week_hash
@days_of_week_array ||= [
dow[:sunday],
dow[:monday],
dow[:tuesday],
dow[:wednesday],
dow[:thursday],
dow[:friday],
dow[:saturday]
]
end | [
"def get_days\n\t\tbool_array = []\n\n\t\tbool_array << self[:monday]\n\t\tbool_array << self[:tuesday]\n\t\tbool_array << self[:wednesday]\n\t\tbool_array << self[:thursday]\n\t\tbool_array << self[:friday]\n\t\tbool_array << self[:saturday]\n\t\tbool_array << self[:sunday]\n\n\t\treturn bool_array\n\tend",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Humanreadable string of applicable days of week | def days_of_week_string
dow = days_of_week_hash
@days_of_week_string ||=
(dow[:sunday] ? "Su" : "") +
(dow[:monday] ? "M" : "") +
(dow[:tuesday] ? "Tu" : "") +
(dow[:wednesday] ? "W" : "") ... | [
"def of_week_as_s() WEEKDAYS_AS_STR[of_week_as_i] end",
"def day_name; Date::DAYNAMES[wday] end",
"def day_of_week\n all_dates = self.get_all_dates\n if ! all_dates.empty?\n return Date::DAYNAMES[self.get_all_dates.first.wday] + \"s\"\n else\n return \"No dates specified\"\n end\n end",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
TODO DEPRECATED METHODS Returns a RelativeTime object representing this OperatingTime | def relativeTime
@relativeTime ||= RelativeTime.new(self, :opensAt, :length)
end | [
"def time\n Time.now.localtime + self.time_offset\n end",
"def relative_time(time)\n stamp = time.getutc.iso8601\n %(<time class=\"ltr\" dir=\"ltr\" title=\"#{stamp}\" datetime=\"#{stamp}\">#{time}</time>)\n end",
"def relative_time_element(time)\n time_element(time, 'js-relative-t... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
FIXME Deprecated, use +override+ instead | def special
override
end | [
"def override\n use(:__override__)\n end",
"def override\n use(:__override__)\n end",
"def super_method; end",
"def superclass() end",
"def override()\n puts \"child override()\"\n end",
"def original_method; end",
"def base_class; end",
"def original_method=(_); end",
"d... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Input: isSpecial = true/false FIXME Deprecated, use +override=+ instead | def special=(isSpecial)
if isSpecial == true or isSpecial == "true" or isSpecial == 1
self.override = true
elsif isSpecial == false or isSpecial == "false" or isSpecial == 0
self.override = false
end
end | [
"def special\n override\n end",
"def convert_special(special)\n handled = false\n\n RDoc::Markup::Attribute.each_name_of special.type do |name|\n method_name = \"handle_special_#{name}\"\n\n if respond_to? method_name then\n special.text = send method_name, special\n handled = tr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete VPC. ec2.delete_vpc("vpc890ce2e0") => true | def delete_vpc(vpc_id)
link = generate_request("DeleteVpc", 'VpcId' => vpc_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end | [
"def delete\n client.delete_vpc(:vpc_id => vpc_id)\n nil\n end",
"def destroy\n requires :id\n\n service.delete_vpc(id)\n true\n end",
"def destroy\n @aws_vpc.destroy\n respond_to do |format|\n format.html { redirect_to aws_vpcs_url, notice: 'Aws v... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete DHCP Options. ec2.delete_dhcp_options("doptcb0de3a2") => true | def delete_dhcp_options(dhcp_options_id)
link = generate_request("DeleteDhcpOptions", 'DhcpOptionsId' => dhcp_options_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end | [
"def destroy\n requires :id\n service.delete_dhcp_options(id)\n true\n end",
"def delete\n dhcp_attr = AttrFinder.new(@instanceparameters)\n dhcp_attr.options = @options\n dhcp_attr.validate = @validate\n opts = {}\n BmcAuthenticate.new(@options)\n request = Ora... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete customer gateway. ec2.delete_customer_gateway("cgwd5a643bc") => true | def delete_customer_gateway(customer_gateway_id)
link = generate_request("DeleteCustomerGateway", 'CustomerGatewayId' => customer_gateway_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end | [
"def destroy\n requires :customer_gateway_id\n \n service.delete_customer_gateway(customer_gateway_id)\n true\n end",
"def delete\n internet_gateway_attr = AttrFinder.new(@instanceparameters)\n internet_gateway_attr.options = @options\n internet_gateway_attr.val... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Detach VPN gateway. ec2.detach_vpn_gateway('vgwdfa144b6','vpc890ce2e0') => true | def detach_vpn_gateway(vpn_gateway_id, vpc_id)
link = generate_request("DetachVpnGateway", 'VpnGatewayId' => vpn_gateway_id,
'VpcId' => vpc_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
... | [
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete vpn gateway. ec2.delete_vpn_gateway("vgwdfa144b6") => true | def delete_vpn_gateway(vpn_gateway_id)
link = generate_request("DeleteVpnGateway", 'VpnGatewayId' => vpn_gateway_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end | [
"def delete\n client_opts = {}\n client_opts[:vpn_gateway_id] = vpn_gateway_id\n client.delete_vpn_gateway(client_opts)\n nil\n end",
"def destroy\n requires :vpn_gateway_id\n \n service.delete_vpn_gateway(vpn_gateway_id)\n true\n end",
... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Delete VPN connection. ec2.delete_vpn_connection("vpna9a643c0") => true | def delete_vpn_connection(vpn_connection_id)
link = generate_request("DeleteVpnConnection", 'VpnConnectionId' => vpn_connection_id )
request_info(link, RightHttp2xxParser.new(:logger => @logger))
rescue Exception
on_exception
end | [
"def destroy\n requires :vpn_connection_id\n\n service.delete_vpn_connection(vpn_connection_id)\n true\n end",
"def delete\n client_opts = {}\n client_opts[:vpn_connection_id] = vpn_connection_id\n client.delete_vpn_connection(client_opts)\n nil\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Build score keeper if it is enabled via settings | def batali_build_score_keeper
if(batali_least_impact?)
Chef::Log.warn "Batali 'least impact resolution' is currently enabled!"
if(node[:batali] && node[:batali][:last_resolution])
Batali::ScoreKeeper.new(
:manifest => Batali::Manifest.new(
:cookbooks => node[:batali][:last_... | [
"def set_gtm_score(opts)\n opts = check_params(opts,[:scores])\n super(opts)\n end",
"def buildScoringRoundToPointsMap\n scoringMap = {}\n allScores = NbaPlayoffScore.all\n allScores.each do |score|\n scoringMap[score.round] = score.points\n end\n return scoringMap\n end",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Read in the table JSON | def readTable(path)
return JSON.parse(File.read(path).rstrip)
end | [
"def readTableJson(path)\n return JSON.parse(File.read(path).gsub(/\\s+$/, ''))\nend",
"def load_table (filename)\n\t\t\tnew_table = File.open(filename)\n\t\t\t@file_table = JSON.load(new_table)\n\t\t\tnew_table.close\n\t\tend",
"def get_table_json\n url = @driver.current_url\n table_id = url.split('... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set the "slimmer headers" to configure the page | def set_slimmer_headers(hash)
raise InvalidHeader if (hash.keys - SLIMMER_HEADER_MAPPING.keys).any?
SLIMMER_HEADER_MAPPING.each do |hash_key, header_suffix|
value = hash[hash_key]
headers["#{HEADER_PREFIX}-#{header_suffix}"] = value.to_s if value
end
end | [
"def set_headers! session = nil\n response.headers['sid'] = session.id\n response.headers['utoken'] = session.utoken\n end",
"def show_headers=(value)\n @show_headers = value\n end",
"def set_default_headers\n super\n (@content_security_policy || roda_class.opt... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create or update L2 Bridge Endpoint Profile API will create or update L2 bridge profile with ID profileid. L2 Bridge profile is only allowed under an enforcementpoint with path /infra/sites/default/enforcementpoints/default. | def update_l2_bridge_profile_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile ...'
end
... | [
"def update_l2_bridge_profile_0_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0 ...'\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create or update L2 Bridge Endpoint Profile API will create or update L2 bridge profile with ID profileid. L2 Bridge profile is only allowed under an enforcementpoint with path /infra/sites/default/enforcementpoints/default. | def update_l2_bridge_profile_0_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})
if @api_client.config.debugging
@api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile_0 ...'
end
... | [
"def update_l2_bridge_profile_with_http_info(site_id, enforcement_point_id, profile_id, l2_bridge_endpoint_profile, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: PolicyNetworkingConnectivitySegmentsEdgeBridgeProfilesApi.update_l2_bridge_profile ...'\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks that inbox and processed directories are on the same volume (since we want to be able to move these big files, not copy them) Potential BUG: What happens if I want to move both to a diffent volume. Maybe there should be a method for moving both at the same time..? | def register_defaults_for_processed(target_dir)
if Pathstring.new(inbox).volume == Pathstring.new(target_dir).volume then
@defaults.registerDefaults(:processed => Pathstring.new(target_dir) )
else
raise "Both inbox and processed directories must be on same volume."
end
end | [
"def receive_flip(from_filestore_name, to_area, paths, unique_names)\n# $stderr << \"receive_flip(#{from_filestore_name}, #{to_area}, #{paths.inspect}, #{unique_names})\\n\"\n tmp_area_path = area_path(:tmp)\n\n # tmp_uuid\n tmp_uuid = unique_name\n\n # first move all moveable pat... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Checks that inbox and processed directories are on the same volume | def register_defaults_for_inbox(inboxpath)
if processed then # If there is a default set for processed...
if not Pathstring.new(inboxpath).volume == Pathstring.new(processed).volume then # ...and it's not on the same volume as inboxpath
raise "Both inbox and processed directories must be on same volum... | [
"def verify_shared_folders(folders)\n folders.each do |folder|\n # Warm up PoSH communicator for new instances - any stderr results\n # in failure: https://github.com/mefellows/vagrant-dsc/issues/21\n @machine.communicate.test(\"test -d #{folder}\", sudo: true)\n\n @logger... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /threats/1 GET /threats/1.xml | def show
@threat = Threat.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @threat }
end
end | [
"def index\n @threats = Threat.all\n end",
"def index\n @hats = Hat.find(:all)\n\n respond_to do |format|\n format.html # index.html.erb\n# format.xml { render :xml => @hats }\n end\n end",
"def index\n @threds = Thred.all\n\n respond_to do |format|\n format.html # index.html... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /threats/new GET /threats/new.xml | def new
@threat = Threat.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @threat }
end
end | [
"def new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @coating }\n end\n end",
"def new\n @cheat = Cheat.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @cheat }\n end\n end",
"def new\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /threats POST /threats.xml | def create
@threat = Threat.new(params[:threat])
respond_to do |format|
if @threat.save
format.html { redirect_to(@threat, :notice => 'Threat was successfully created.') }
format.xml { render :xml => @threat, :status => :created, :location => @threat }
else
format.html { re... | [
"def index\n @threats = Threat.all\n end",
"def create\n @hat = Hat.new(params[:hat])\n\n respond_to do |format|\n if @hat.save\n flash[:notice] = 'Hat was successfully created.'\n format.html { redirect_to(@hat) }\n format.xml { render :xml => @hat, :status => :created, :loca... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /threats/1 DELETE /threats/1.xml | def destroy
@threat = Threat.find(params[:id])
@threat.destroy
respond_to do |format|
format.html { redirect_to(threats_url) }
format.xml { head :ok }
end
end | [
"def destroy\n @cheat = Cheat.find(params[:id])\n @cheat.destroy\n\n respond_to do |format|\n format.html { redirect_to(cheats_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @hat = Hat.find(params[:id])\n @hat.destroy\n\n respond_to do |format|\n format.html { ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Takes a range and yields pairs of [value, valid?] | def test_range(r)
yield [r.min-1, false]
yield [r.min, true]
yield [r.max, true]
yield [r.max+1, false]
end | [
"def valid_ranges\n @ranges.select(&:valid?).map do |range|\n [@tiers, @revue, range.min_and_max, @start, @end].flatten.join(TAB)\n end\n end",
"def range(start_value, end_value, args, block)\n filter << (start_value.to_i..end_value.to_i).to_a\n end",
"def it_shou... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Add an +identifer+ / +element+ pair which will be used for looking up unresolved identifers | def add_identifier(ident, element)
map_entry = @identifier_map[ident]
if map_entry
if map_entry.is_a?(Array)
map_entry << element
else
@identifier_map[ident] = [map_entry, element]
end
else
@identifier_map[ident] = element
end
end | [
"def add_identifier(val)\n @identifiers.push( self.class.normalize_id(val) )\n end",
"def add_identifier(val)\n return if val.nil?\n @identifiers.push(self.class.normalize_id(val)) unless @identifiers.index(self.class.normalize_id(val))\n end",
"def identifier_map(id, name, id_map)\n if !i... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Tries to resolve the given +unresolved_refs+. If resolution is successful, the proxy object will be removed, otherwise there will be an error description in the problems array. In case the resolved target element's type is not valid for the given feature, the +target_type_error+ flag will be set on the unresolved refer... | def resolve(unresolved_refs, options={})
problems = options[:problems] || []
still_unresolved_refs = []
failed_resolutions = options[:failed_resolutions] || {}
unresolved_refs.each do |ur|
if @identifier_resolver
target = @identifier_resolver.call(ur.proxy.targetIdentifier)
el... | [
"def unresolve_refs(rrefs)\r\n # make sure any removed_urefs have been removed, \r\n # otherwise they will be removed later even if this method actually re-added them\r\n unresolved_refs\r\n rrefs.each do |rr|\r\n ur = rr.uref\r\n refs = ur.element.getGeneric(ur.feature_name)\r\n if refs.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Return all of the fences straight from mongo | def get_all_fences
@coll.find.to_a
end | [
"def collections\n records = {}\n itql(\"select $object $title from <#ri> where ($object <fedora-model:label> $title and $object <fedora-model:hasModel> <info:fedora/islandora:collectionCModel>)\").each do |rec|\n records[rec.object.sub('info:fedora/', '')] = rec.title\n end\n\n return records\n e... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Create a new dataset edit | def create_edit(commit, key, value)
ActiveRecord::Base.transaction do
DatasetEdit.create!(
commit_id: commit.id,
key: key,
value: value
)
end
end | [
"def create\n\n ## Make sure we have the required fields.\n # if get_with_default(@data, :name, \"\").empty? or \n # get_with_default(@data, :summary, \"\").empty? or\n # get_with_default(@data, :description, \"\").empty?\n # respond_with_error \"You must provide a name, summary, and descri... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Finds all commits belonging to a dataset with an edit to the given key. | def find_commits(dataset, edit_key)
dataset.commits
.joins(:dataset_edits)
.where(dataset_edits: { key: edit_key })
.order(updated_at: :desc)
end | [
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def find_edit(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return nil unless commits.any?\n\n DatasetEdi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Finds the most recent edit of a key belonging to a dataset. | def find_edit(dataset, edit_key)
commits = find_commits(dataset, edit_key)
return nil unless commits.any?
DatasetEdit
.where(commit_id: commits.pluck(:id), key: edit_key)
.order(updated_at: :desc)
.first
end | [
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { key: edit_key })\n .order(updated_at: :desc)\n end",
"def find_commits(dataset, edit_key)\n dataset.commits\n .joins(:dataset_edits)\n .where(dataset_edits: { ke... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Removes all dataset edits matching the `edit_key`. If the key is the only dataset belonging to the commit, the commit will also be removed. | def destroy_edits(dataset, edit_key)
commits = find_commits(dataset, edit_key)
return if commits.none?
commits.each do |commit|
if commit.dataset_edits.one?
commit.destroy
else
commit.dataset_edits.find_by_key(edit_key).destroy
end
end
end | [
"def destroy_edits(dataset, edit_key)\n commits = find_commits(dataset, edit_key)\n\n return if commits.none?\n\n commits.each do |commit|\n if commit.dataset_edits.one?\n commit.destroy\n else\n commit.dataset_edits.find_by(key: edit_key).destroy\n end\n end\n end",
"def... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Address Allows setting use_another_address_for_shipping attribute, ensuring it is stored as a Boolean value and not a number string | def use_another_address_for_shipping=(val)
value = val.is_a?(String) ? (val.to_i > 0) : val
@use_another_address_for_shipping = value
end | [
"def use_billing_address=(switch)\n self.use_shipping_address = !(switch.to_i == 1)\n end",
"def use_billing_address?\n !use_shipping_address\n end",
"def shipping_same_as_billing=(value)\n return :skipped unless !! value and billing_address\n fields = billing_address.attributes.to_options.slice C... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Set the shipping method id on the cart by creating the corresponding adjustment at the same time to ensure cart price and recap takes shipping costs into account | def shipping_method_id=(val)
@shipping_method_id = val
if shipping_method_id
adjustments.reject! { |a| a.type == "shipping-method" }
adjustment = Glysellin::Cart::Adjustment::ShippingMethod.new(self,
shipping_method_id: shipping_method_id
)
adjustmen... | [
"def shipping_method=(method)\n raise ArgumentError.new(\"method cannot be nil\") unless method.present?\n self.shipping_name = method.name\n self.shipping_cost = method.cost\n end",
"def set_shipping_method\n ship_id = params[:ship_type_id]\n # Convert to integers for comparison purposes!\n sh... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Shortcut method to get shipping adjustments from adjustments list | def shipping
adjustments.find { |a| a.type == "shipping-method" }
end | [
"def price_adjustments\n adjustments = []\n\n line_items.each do |line_item|\n adjustments.concat (line_item.adjustments - line_item.adjustments.gift_packaging)\n end\n\n adjustments\n end",
"def shipping\n total_quantity = items.inject(0) {|q, item| q + item.quantity}\n if total_qua... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Validates customer informations are correctly filled | def validate_customer_informations
validate_nested_resource(:customer)
validate_nested_resource(:billing_address)
if use_another_address_for_shipping
validate_nested_resource(:shipping_address)
end
end | [
"def validate_customer\n if !customer_id.nil? && Customer.find_by_id(self.customer_id).nil?\n errors.add(:customer_id, \"Invalid\")\n end\n end",
"def validate_customer_data(payload)\n headers = admin_headers\n\n put_wrapper('/V1/customers/validate', payload.to_json, headers)\n end"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Validates the selected country is eligible for the current cart contents to be shipped to | def validate_shippable
if !shipping || !shipping.valid
code = use_another_address_for_shipping ?
shipping_address.country : billing_address.country
country = Glysellin::Helpers::Countries::COUNTRIES_LIST[code]
errors.add(
:shipping_method_id,
I1... | [
"def validate_ship_to_address_is_acceptable\n if (!ship_to_billing? && ship_to_address && ship_to_address.country != 'US') || (ship_to_billing? && payment && payment.billing_address && payment.billing_address.country != 'US')\n self.errors.add_to_base('We currently only ship orders to the United States.... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Retrieve order from database if it exists, or use cached version | def order
@order ||= Glysellin::Order.where(id: order_id).first
end | [
"def find_reusable_order\n return # FIXME: currently it may return address which is being used by other active order\n expired_orders = find_expired_orders_row\n if expired_orders.size >= Config.reuse_address_orders_threshold &&\n fetch_transactions_for(expired_orders.last.address).empty?\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Assign order and order_id, if nil is explicitly passed, ensure we set order id to nil too | def order=(order)
self.order_id = order && order.id
@order = order
end | [
"def set_order\n @order = Order.find(session[:order_id])\n puts(session[:order_id])\n #if an error occurs (if the session doesn't exist) - creating a new one.\n rescue ActiveRecord::RecordNotFound\n @order = Order.create\n session[:order_id] = @order.id\n end",
"def set_order\n # @order = curr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Cleans cart stored order if it exists | def clean_order!
if order
# Destroy current cart order if not paid already, cause
# we're creating a new one
order.destroy if order.state_name == :ready
# unset order
self.order = nil
end
end | [
"def check_order\n if order.order_lines.count == 0\n order.destroy\n end\n end",
"def clear_cart\n self.order_status_id = 4\n self[:subtotal] = 0\n self.order_items.destroy_all\n end",
"def clear_cart_and_order(destroy_order = true)\n @cart = find_cart.empty!\n if session[:order_id] ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
a random timeout in range range between the min timeout and twice that | def random_timeout
@IS_TIMEOUT_RANDOM ? @MIN_TIMEOUT + rand(@MIN_TIMEOUT) : @MIN_TIMEOUT
end | [
"def random_spawn_countdown minimum\n 10.randomize(:ratio, :sign).to_i + 60\n end",
"def random_timeout\n range = @options[:election_timeout] || ELECTION_TIMEOUT\n min, max = range.first, range.last\n min + rand(max - min)\n end",
"def random_duration\n min_duration + rand(max_duration - mi... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /micgroposts/1 GET /micgroposts/1.xml | def show
@micgropost = Micgropost.find(params[:id])
respond_to do |format|
format.html # show.html.erb
format.xml { render :xml => @micgropost }
end
end | [
"def rss\n @events = Post.find(:all, {:conditions=> \"status=public\"},:order => \"id DESC\")\n render :layout => false\n headers[\"Content-Type\"] = \"application/xml; charset=utf-8\"\n end",
"def show\n @mircopost = Mircopost.find(params[:id])\n\n respond_to do |format|\n format.html # show... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
GET /micgroposts/new GET /micgroposts/new.xml | def new
@micgropost = Micgropost.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @micgropost }
end
end | [
"def new\n @post = Post.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post }\n end\n make_rss\n end",
"def new\n @post201 = Post201.new\n\n respond_to do |format|\n format.html # new.html.erb\n format.xml { render :xml => @post2... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
POST /micgroposts POST /micgroposts.xml | def create
@micgropost = Micgropost.new(params[:micgropost])
respond_to do |format|
if @micgropost.save
format.html { redirect_to(@micgropost, :notice => 'Micgropost was successfully created.') }
format.xml { render :xml => @micgropost, :status => :created, :location => @micgropost }
... | [
"def create\n @micripost = Micripost.new(params[:micripost])\n\n respond_to do |format|\n if @micripost.save\n format.html { redirect_to @micripost, notice: 'Micripost was successfully created.' }\n format.json { render json: @micripost, status: :created, location: @micripost }\n else\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
DELETE /micgroposts/1 DELETE /micgroposts/1.xml | def destroy
@micgropost = Micgropost.find(params[:id])
@micgropost.destroy
respond_to do |format|
format.html { redirect_to(micgroposts_url) }
format.xml { head :ok }
end
end | [
"def destroy\n @mircopost = Mircopost.find(params[:id])\n @mircopost.destroy\n\n respond_to do |format|\n format.html { redirect_to(mircoposts_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @scrap_xml = ScrapXml.find(params[:id])\n @scrap_xml.destroy\n\n respond_to ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
euler020 6/22/12 Find the sum of the digits in the number 100! ============================================================================ | def euler020
def fact(x)
ans = x.downto(1).reduce(:*)
end
fact(100).to_s.split('').map { |x| x.to_i}.reduce(:+)
end | [
"def euler_13\n data = File.new(\"big_ass_number\").to_a\n data_line = []\n sum = 0 \n digits = []\n\n (0..data.length).each do |i|\n sum += data[i].to_i\n end\n digits = sum.to_s\n puts digits.slice(0...10)\n end",
"def euler016\n (2 ** 1000).to_s.split('').map { |x| x.to_i }.reduce(:+)\nend",
"def euler0... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This returns the API user being used for calls | def api_user
@user
end | [
"def user_info\n get(api_get.body.identity).body\n end",
"def current_api_user\n User.find_by(auth_headers) if auth_headers.present? \n end",
"def current_api_user\n if params[:api_key]\n User.find_by_api_key params[:api_key]\n else\n current_user\n end\n end",
"def cur... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This returns the API key being used for calls | def api_key
@key
end | [
"def get_apiKey\n return 'wbf3wr553fuaevgju4ekyng9'\n end",
"def api_key; \"055a1100-a84d-4064-84d7-bce46f7b80c8\"; end",
"def api_key\n @api_key ||= Census.installed_key\n end",
"def api_key\n @config.fetch(:api_key)\n end",
"def retrieve_api_key(key_id)\n start.uri('/api/api-key')... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This authenticates with Rackspace and returns the information necessary to make subsequent authenticated calls to the API | def authenticate
raise Rackspace::NotInitialized unless self.initialized?
headers = RestClient::Request.execute(:method => :get, :url => "#{AUTH_URL}/#{self.api_version}", :headers => {"X-Auth-User" => self.api_user, "X-Auth-Key" => self.api_key}, :raw_response => true).headers
{:auth_token => headers... | [
"def authenticate\n if credentials[:auth_token] && credentials[:tennant_id]\n credentials\n else\n #puts \"RACKSPACE_AUTHENTICATE\"\n #puts \"RACKSPACE_AUTHENTICATE credentials #{credentials.inspect}\" # TODO: DISABLE THIS BEFORE PRODUCTION!\n resp = submit_request(\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This returns the available versions of the API | def versions
JSON.parse(RestClient.get("#{VERSION_URL}/.json", self.default_headers))["versions"].collect { |v| v["id"] }.uniq
end | [
"def known_api_versions\n return ['2']\n end",
"def fetch_api_versions\n raw, _, st = kubectl.run(\"api-versions\", attempts: 5, use_namespace: false)\n # The \"core\" group is represented by an empty string\n versions = { \"\" => %w(v1) }\n if st.success?\n rows = raw.split(\"\... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This caches the authentication response for subsequent usage | def auth_response
@auth_response ||= self.authenticate
end | [
"def refresh(_)\n authenticate\n end",
"def refresh\n @cached_credentials = nil\n end",
"def retry_with_auth(response)\n @already_tried_with_auth = true\n logger.info(\"Authentication Required. Retrying with auth info\")\n accessor.auth_manager.associate_auth_info(resp... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This is the auth token provided by Rackspace after a successful authentication | def auth_token
self.auth_response[:auth_token]
end | [
"def auth_token\n Keycard::Token.rfc7235(safe(\"HTTP_AUTHORIZATION\"))\n end",
"def auth_token\n @auth_token ||= request.headers.fetch('Authorization', '').split(' ').last\n end",
"def auth_token\n generate_auth_token if @auth_token.nil?\n return @auth_token\n end",
"def auth_token=(v... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This performs a basic POST request using the supplied URL, payload and headers | def post(url, payload = {}, headers = {})
http :post, "#{url}.json", payload.to_json, headers
end | [
"def post url, body, headers = {}\n http_request(url, Net::HTTP::Post, body, headers)\n end",
"def make_post_request url, body, headers = []\n make_request url, method: ::Rack::POST, body: body, headers: headers\n end",
"def post path, payload, request_headers = {}\n post_headers = h... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This performs a basic PUT request using the supplied URL, payload and headers | def put(url, payload = {}, headers = {})
http :put, "#{url}.json", payload.to_json, headers
end | [
"def _put(url=\"\", params={}, headers={}, payload)\n\t\tif !params.empty? then\n\t\t\theaders[:params] = params\n\t\tend\n\t\tresponse = RestClient.put(url, payload, headers)\n\t\thandle_response(response)\n\tend",
"def put url, body, headers = {}\n http_request(url, Net::HTTP::Put, body, headers)\n end"... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
This will perform an HTTP call with the specified method, and arguments It will also pick up if the response is that the request was unauthorized, and will attempt the same request again after reauthenticating (in case the auth token has expired) | def http(method, *args)
args.last.merge!(self.default_headers).merge!("X-Auth-Token" => self.auth_token)
response = RestClient.send(method, *args)
@retried = false
response
rescue RestClient::Unauthorized
@auth_response = nil
if @retried
raise
else
@retried ... | [
"def execute(method, path, **options)\n uri = URI(\"#{@base_url}/#{path}\")\n\n case method.to_sym\n when :get\n req = Net::HTTP::Get.new(uri)\n when :post\n req = Net::HTTP::Post.new(uri)\n when :put\n req = Net::HTTP::Put.new(uri)\n when :delete\n req = Net::HTTP::Delete.new(... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
create command for add helm repository =============================================================== | def do_add_repo(repository,mirror_enabled,mirror_url,mirror_ca_cert)
name = repository['name']
url = repository['url']
if !mirror_enabled
cmd = "helm repo add #{name} #{url} "
else
if mirror_ca_cert == ""
cmd = "helm repo add #{name} #{mirror_url} "
else
cmd = "helm repo add --ca-file... | [
"def add_repository(definition)\n definition = URI.parse(definition)\n commands = []\n\n if definition.scheme =~ /^(http|ftp|file)/\n if File.extname(definition.path) == '.ps1'\n commands << %(powershell.exe -NoProfile -ExecutionPolicy Bypass -Command 'iex ((new-object net.w... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
note An instance of `Gitlab::GithubImport::Representation::Note`. project An instance of `Project`. client An instance of `Gitlab::GithubImport::Client`. | def initialize(note, project, client)
@note = note
@project = project
@client = client
@user_finder = GithubImport::UserFinder.new(project, client)
end | [
"def modify_note(project_id, issue_id, note_id, content, access_token)\n rest_client.put(\"/projects/#{project_id}/issues/#{issue_id}/notes/#{note_id}\", { verify: false, body: { body: content, access_token: access_token } })\n end",
"def create\n @project_note = ProjectNote.new(project_note_params(params[... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
end test Test that the graph class will not let you to_s on an incomplete graph | def test_graph_to_s_incomplete
sut_graph = Graph.new
sut_graph.name="test_graph"
#sut_graph.type=:digraph
sut_graph.node_style=:ellipse
sut_graph.add_edge("TEST1" , "TEST2" , "take_me_to_test_2")
assert_raises RuntimeError do
returned_obj = sut_graph.to_s
end # end asser... | [
"def test_to_s_empty_graph\n graph = Graph.new\n\n assert(graph.to_s == '')\n end",
"def test_to_s\n graph = DirectedGraph.new\n vertex_a = Vertex.new('a')\n vertex_b = Vertex.new('b')\n vertex_c = Vertex.new('c')\n graph.add_vertex(vertex_a).add_vertex(vertex_b).add_vertex(vertex_c)\n gr... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Stubs SOAP requests to a given soap_action. | def stubs(soap_action)
setup :stubs, soap_action
self
end | [
"def dispatch(soap_action)\n response = @request.soap @soap\n Response.new response\n end",
"def execute_soap_request(action, args)\n original_action_name =\n get_service_registry.get_method_signature(action)[:original_name]\n original_action_name = action if original_action_name.n... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Expects a given SOAP body Hash to be used. | def with(soap_body)
Savon::SOAP::XML.any_instance.expects(:body=).with(soap_body) if mock_method == :expects
self
end | [
"def to_hash\n @hash ||= (Crack::XML.parse(body) rescue {}).find_soap_body\n end",
"def find_soap_body\n envelope = self[self.keys.first] || {}\n body_key = envelope.keys.find { |key| /.+:Body/ =~ key } rescue nil\n body_key ? envelope[body_key].map_soap_response : {}\n end",
"def find_soap_bo... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Sets up Savon to respond like there was a SOAP fault. | def raises_soap_fault
Savon::SOAP::Response.any_instance.expects(:soap_fault?).returns(true)
self
end | [
"def handle_soap_fault\n if soap_fault_message\n @soap_fault = soap_fault_message\n raise Savon::SOAPFault, @soap_fault if self.class.raise_errors?\n end\n end",
"def handle_fault(request, response)\n xml = Nokogiri::XML(response.body)\n msg = xml.xpath('/soap:Envelope/soa... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Wrap tests that use Mocha and skip if unavailable. | def uses_mocha(test_name)
require 'mocha' unless Object.const_defined?(:Mocha)
rescue LoadError => load_error
$stderr.puts "Skipping #{test_name} tests. `gem install mocha` and try again."
else
yield
end | [
"def uses_mocha(test_name)\n require 'mocha'\n yield\nrescue LoadError\n $stderr.puts \"Skipping #{test_name} tests. `gem install mocha` and try again.\"\nend",
"def uses_mocha(test_name)\n require 'mocha' unless Object.const_defined?(:Mocha)\n yield\nrescue LoadError => load_error\n raise unless load_error... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
lexer rules lexer rule t__6! (T__6) (in CSV.g) | def t__6!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 1 )
type = T__6
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 7:8: '+'
match( 0x2b )
@state.type = type
@sta... | [
"def t__34!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 14 )\n\n type = T__34\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 23:9: ','\n match( 0x2c )\n\n \n @state.type = type\n ... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
lexer rule number! (NUMBER) (in CSV.g) | def number!
# -> uncomment the next line to manually enable rule tracing
# trace_in( __method__, 2 )
type = NUMBER
channel = ANTLR3::DEFAULT_CHANNEL
# - - - - main rule block - - - -
# at line 11:10: ( '0' .. '9' )+
# at file 11:10: ( '0' .. '9' )+
mat... | [
"def num!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 5 )\n\n type = NUM\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 37:9: ( '1' .. '9' ) ( '0' .. '9' )*\n # at line 37:9: ( '1' .. '9' )... | {
"objective": {
"paired": [],
"self": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.