_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q12100 | Generamba.CatalogDownloader.update_all_catalogs_and_return_filepaths | train | def update_all_catalogs_and_return_filepaths
does_rambafile_exist = Dir[RAMBAFILE_NAME].count > 0
if does_rambafile_exist
rambafile = YAML.load_file(RAMBAFILE_NAME)
catalogs = rambafile[CATALOGS_KEY]
end
terminator = CatalogTerminator.new
terminator.remove_all_catalogs
catalog_paths = [download_catalog(GENERAMBA_CATALOG_NAME, RAMBLER_CATALOG_REPO)]
if catalogs != nil && catalogs.count > 0
catalogs.each do |catalog_url|
catalog_name = catalog_url.split('://').last
catalog_name = catalog_name.gsub('/', '-');
catalog_paths.push(download_catalog(catalog_name, catalog_url))
end
end
return catalog_paths
end | ruby | {
"resource": ""
} |
q12101 | Generamba.CatalogDownloader.download_catalog | train | def download_catalog(name, url)
catalogs_local_path = Pathname.new(ENV['HOME'])
.join(GENERAMBA_HOME_DIR)
.join(CATALOGS_DIR)
current_catalog_path = catalogs_local_path
.join(name)
if File.exists?(current_catalog_path)
g = Git.open(current_catalog_path)
g.pull
else
Git.clone(url, name, :path => catalogs_local_path)
end
return current_catalog_path
end | ruby | {
"resource": ""
} |
q12102 | Generamba.TemplateInstallerFactory.installer_for_type | train | def installer_for_type(type)
case type
when TemplateDeclarationType::LOCAL_TEMPLATE
return Generamba::LocalInstaller.new
when TemplateDeclarationType::REMOTE_TEMPLATE
return Generamba::RemoteInstaller.new
when TemplateDeclarationType::CATALOG_TEMPLATE
return Generamba::CatalogInstaller.new
else
return nil
end
end | ruby | {
"resource": ""
} |
q12103 | Generamba.CatalogInstaller.browse_catalog_for_a_template | train | def browse_catalog_for_a_template(catalog_path, template_name)
template_path = catalog_path.join(template_name)
if Dir.exist?(template_path)
return template_path
end
return nil
end | ruby | {
"resource": ""
} |
q12104 | Generamba.TemplateProcessor.install_templates | train | def install_templates(rambafile)
# We always clear previously installed templates to avoid conflicts in different versions
clear_installed_templates
templates = rambafile[TEMPLATES_KEY]
if !templates || templates.count == 0
puts 'You must specify at least one template in Rambafile under the key *templates*'.red
return
end
# Mapping hashes to model objects
templates = rambafile[TEMPLATES_KEY].map { |template_hash|
Generamba::TemplateDeclaration.new(template_hash)
}
catalogs = rambafile[CATALOGS_KEY]
# If there is at least one template from catalogs, we should update our local copy of the catalog
update_catalogs_if_needed(catalogs, templates)
templates.each do |template_declaration|
strategy = @installer_factory.installer_for_type(template_declaration.type)
template_declaration.install(strategy)
end
end | ruby | {
"resource": ""
} |
q12105 | Generamba.TemplateProcessor.clear_installed_templates | train | def clear_installed_templates
install_path = Pathname.new(TEMPLATES_FOLDER)
FileUtils.rm_rf(Dir.glob(install_path))
end | ruby | {
"resource": ""
} |
q12106 | Generamba.TemplateProcessor.update_catalogs_if_needed | train | def update_catalogs_if_needed(catalogs, templates)
needs_update = templates.any? {|template| template.type == TemplateDeclarationType::CATALOG_TEMPLATE}
return unless needs_update
terminator = CatalogTerminator.new
terminator.remove_all_catalogs
puts('Updating shared generamba-catalog specs...')
@catalog_downloader.download_catalog(GENERAMBA_CATALOG_NAME, RAMBLER_CATALOG_REPO)
return unless catalogs != nil && catalogs.count > 0
catalogs.each do |catalog_url|
catalog_name = catalog_url.split('://').last
catalog_name = catalog_name.gsub('/', '-');
puts("Updating #{catalog_name} specs...")
@catalog_downloader.download_catalog(catalog_name, catalog_url)
end
end | ruby | {
"resource": ""
} |
q12107 | Xcodeproj.XCScheme.configure_with_targets | train | def configure_with_targets(runnable_target, test_target, launch_target: false)
if runnable_target
add_build_target(runnable_target)
set_launch_target(runnable_target) if launch_target
end
if test_target
add_build_target(test_target, false) if test_target != runnable_target
add_test_target(test_target)
end
end | ruby | {
"resource": ""
} |
q12108 | Xcodeproj.XCScheme.set_launch_target | train | def set_launch_target(build_target)
launch_runnable = BuildableProductRunnable.new(build_target, 0)
launch_action.buildable_product_runnable = launch_runnable
profile_runnable = BuildableProductRunnable.new(build_target)
profile_action.buildable_product_runnable = profile_runnable
macro_exp = MacroExpansion.new(build_target)
test_action.add_macro_expansion(macro_exp)
end | ruby | {
"resource": ""
} |
q12109 | Xcodeproj.XCScheme.save_as | train | def save_as(project_path, name, shared = true)
scheme_folder_path = if shared
self.class.shared_data_dir(project_path)
else
self.class.user_data_dir(project_path)
end
scheme_folder_path.mkpath
scheme_path = scheme_folder_path + "#{name}.xcscheme"
@file_path = scheme_path
File.open(scheme_path, 'w') do |f|
f.write(to_s)
end
end | ruby | {
"resource": ""
} |
q12110 | Xcodeproj.Project.initialize_from_file | train | def initialize_from_file
pbxproj_path = path + 'project.pbxproj'
plist = Plist.read_from_path(pbxproj_path.to_s)
root_object.remove_referrer(self) if root_object
@root_object = new_from_plist(plist['rootObject'], plist['objects'], self)
@archive_version = plist['archiveVersion']
@object_version = plist['objectVersion']
@classes = plist['classes'] || {}
@dirty = false
unless root_object
raise "[Xcodeproj] Unable to find a root object in #{pbxproj_path}."
end
if archive_version.to_i > Constants::LAST_KNOWN_ARCHIVE_VERSION
raise '[Xcodeproj] Unknown archive version.'
end
if object_version.to_i > Constants::LAST_KNOWN_OBJECT_VERSION
raise '[Xcodeproj] Unknown object version.'
end
# Projects can have product_ref_groups that are not listed in the main_groups["Products"]
root_object.product_ref_group ||= root_object.main_group['Products'] || root_object.main_group.new_group('Products')
end | ruby | {
"resource": ""
} |
q12111 | Xcodeproj.Project.to_tree_hash | train | def to_tree_hash
hash = {}
objects_dictionary = {}
hash['objects'] = objects_dictionary
hash['archiveVersion'] = archive_version.to_s
hash['objectVersion'] = object_version.to_s
hash['classes'] = classes
hash['rootObject'] = root_object.to_tree_hash
hash
end | ruby | {
"resource": ""
} |
q12112 | Xcodeproj.Project.generate_available_uuid_list | train | def generate_available_uuid_list(count = 100)
new_uuids = (0..count).map { SecureRandom.hex(12).upcase }
uniques = (new_uuids - (@generated_uuids + uuids))
@generated_uuids += uniques
@available_uuids += uniques
end | ruby | {
"resource": ""
} |
q12113 | Xcodeproj.Project.reference_for_path | train | def reference_for_path(absolute_path)
absolute_pathname = Pathname.new(absolute_path)
unless absolute_pathname.absolute?
raise ArgumentError, "Paths must be absolute #{absolute_path}"
end
objects.find do |child|
child.isa == 'PBXFileReference' && child.real_path == absolute_pathname
end
end | ruby | {
"resource": ""
} |
q12114 | Xcodeproj.Project.embedded_targets_in_native_target | train | def embedded_targets_in_native_target(native_target)
native_targets.select do |target|
host_targets_for_embedded_target(target).map(&:uuid).include? native_target.uuid
end
end | ruby | {
"resource": ""
} |
q12115 | Xcodeproj.Project.host_targets_for_embedded_target | train | def host_targets_for_embedded_target(embedded_target)
native_targets.select do |native_target|
((embedded_target.uuid != native_target.uuid) &&
(native_target.dependencies.map(&:native_target_uuid).include? embedded_target.uuid))
end
end | ruby | {
"resource": ""
} |
q12116 | Xcodeproj.Project.new_resources_bundle | train | def new_resources_bundle(name, platform, product_group = nil)
product_group ||= products_group
ProjectHelper.new_resources_bundle(self, name, platform, product_group)
end | ruby | {
"resource": ""
} |
q12117 | Xcodeproj.Project.add_build_configuration | train | def add_build_configuration(name, type)
build_configuration_list = root_object.build_configuration_list
if build_configuration = build_configuration_list[name]
build_configuration
else
build_configuration = new(XCBuildConfiguration)
build_configuration.name = name
common_settings = Constants::PROJECT_DEFAULT_BUILD_SETTINGS
settings = ProjectHelper.deep_dup(common_settings[:all])
settings.merge!(ProjectHelper.deep_dup(common_settings[type]))
build_configuration.build_settings = settings
build_configuration_list.build_configurations << build_configuration
build_configuration
end
end | ruby | {
"resource": ""
} |
q12118 | Xcodeproj.Config.save_as | train | def save_as(pathname, prefix = nil)
if File.exist?(pathname)
return if Config.new(pathname) == self
end
pathname.open('w') { |file| file << to_s(prefix) }
end | ruby | {
"resource": ""
} |
q12119 | Xcodeproj.Config.hash_from_file_content | train | def hash_from_file_content(string)
hash = {}
string.split("\n").each do |line|
uncommented_line = strip_comment(line)
if include = extract_include(uncommented_line)
@includes.push normalized_xcconfig_path(include)
else
key, value = extract_key_value(uncommented_line)
next unless key
value.gsub!(INHERITED_REGEXP) { |m| hash.fetch(key, m) }
hash[key] = value
end
end
hash
end | ruby | {
"resource": ""
} |
q12120 | Xcodeproj.Config.merge_attributes! | train | def merge_attributes!(attributes)
@attributes.merge!(attributes) do |_, v1, v2|
v1 = v1.strip
v2 = v2.strip
v1_split = v1.shellsplit
v2_split = v2.shellsplit
if (v2_split - v1_split).empty? || v1_split.first(v2_split.size) == v2_split
v1
elsif v2_split.first(v1_split.size) == v1_split
v2
else
"#{v1} #{v2}"
end
end
end | ruby | {
"resource": ""
} |
q12121 | Xcodeproj.Config.extract_key_value | train | def extract_key_value(line)
match = line.match(KEY_VALUE_PATTERN)
if match
key = match[1]
value = match[2]
[key.strip, value.strip]
else
[]
end
end | ruby | {
"resource": ""
} |
q12122 | SpecHelper.ProjectHelper.compare_settings | train | def compare_settings(produced, expected, params)
it 'should match build settings' do
# Find faulty settings in different categories
missing_settings = expected.keys.reject { |k| produced.key?(k) }
unexpected_settings = produced.keys.reject { |k| expected.key?(k) }
wrong_settings = (expected.keys - missing_settings).select do |k|
produced_setting = produced[k]
produced_setting = produced_setting.join(' ') if produced_setting.respond_to? :join
produced_setting != expected[k]
end
# Build pretty description for what is going on
description = []
description << "Doesn't match build settings for \e[1m#{params}\e[0m"
if wrong_settings.count > 0
description << 'Wrong build settings:'
description += wrong_settings.map { |s| "* #{s.to_s.yellow} is #{produced[s].to_s.red}, but should be #{expected[s].to_s.green}" }
description << ''
end
if missing_settings.count > 0
description << 'Missing build settings:'
description << missing_settings.map { |s| "* #{s.to_s.red} (#{expected[s]})" }
description << ''
end
if unexpected_settings.count > 0
description << 'Unexpected additional build settings:'
description += unexpected_settings.map { |s| "* #{s.to_s.green} (#{produced[s]})" }
description << ''
end
# Expect
faulty_settings = missing_settings + unexpected_settings + wrong_settings
faulty_settings.should.satisfy(description * "\n") do
faulty_settings.length == 0
end
end
end | ruby | {
"resource": ""
} |
q12123 | SpecHelper.ProjectHelper.load_settings | train | def load_settings(path, type)
# Load fixture
base_path = Pathname(fixture_path("CommonBuildSettings/configs/#{path}"))
config_fixture = base_path + "#{path}_#{type}.xcconfig"
config = Xcodeproj::Config.new(config_fixture)
settings = config.to_hash
# Filter exclusions
settings = apply_exclusions(settings, EXCLUDED_KEYS)
project_defaults_by_config = Xcodeproj::Constants::PROJECT_DEFAULT_BUILD_SETTINGS
project_defaults = project_defaults_by_config[:all]
project_defaults.merge(project_defaults_by_config[type]) unless type == :base
settings = apply_exclusions(settings, project_defaults)
settings
end | ruby | {
"resource": ""
} |
q12124 | Xcodeproj.Workspace.save_as | train | def save_as(path)
FileUtils.mkdir_p(path)
File.open(File.join(path, 'contents.xcworkspacedata'), 'w') do |out|
out << to_s
end
end | ruby | {
"resource": ""
} |
q12125 | Xcodeproj.Workspace.load_schemes_from_project | train | def load_schemes_from_project(project_full_path)
schemes = Xcodeproj::Project.schemes project_full_path
schemes.each do |scheme_name|
@schemes[scheme_name] = project_full_path
end
end | ruby | {
"resource": ""
} |
q12126 | EPUBMaker.EPUBCommon.container | train | def container
@opf_path = opf_path
tmplfile = File.expand_path('./xml/container.xml.erb', ReVIEW::Template::TEMPLATE_DIR)
tmpl = ReVIEW::Template.load(tmplfile)
tmpl.result(binding)
end | ruby | {
"resource": ""
} |
q12127 | EPUBMaker.EPUBCommon.mytoc | train | def mytoc
@title = CGI.escapeHTML(@producer.res.v('toctitle'))
@body = %Q( <h1 class="toc-title">#{CGI.escapeHTML(@producer.res.v('toctitle'))}</h1>\n)
if @producer.config['epubmaker']['flattoc'].nil?
@body << hierarchy_ncx('ul')
else
@body << flat_ncx('ul', @producer.config['epubmaker']['flattocindent'])
end
@language = @producer.config['language']
@stylesheets = @producer.config['stylesheet']
tmplfile = if @producer.config['htmlversion'].to_i == 5
File.expand_path('./html/layout-html5.html.erb', ReVIEW::Template::TEMPLATE_DIR)
else
File.expand_path('./html/layout-xhtml1.html.erb', ReVIEW::Template::TEMPLATE_DIR)
end
tmpl = ReVIEW::Template.load(tmplfile)
tmpl.result(binding)
end | ruby | {
"resource": ""
} |
q12128 | ReVIEW.Compiler.compile_inline | train | def compile_inline(str)
op, arg = /\A@<(\w+)>\{(.*?)\}\z/.match(str).captures
unless inline_defined?(op)
raise CompileError, "no such inline op: #{op}"
end
unless @strategy.respond_to?("inline_#{op}")
raise "strategy does not support inline op: @<#{op}>"
end
@strategy.__send__("inline_#{op}", arg)
rescue => e
error e.message
@strategy.nofunc_text(str)
end | ruby | {
"resource": ""
} |
q12129 | EPUBMaker.Producer.load | train | def load(file)
if file.nil? || !File.exist?(file)
raise "Can't open #{file}."
end
loader = ReVIEW::YAMLLoader.new
merge_config(@config.deep_merge(loader.load_file(file)))
end | ruby | {
"resource": ""
} |
q12130 | EPUBMaker.Producer.merge_config | train | def merge_config(config)
@config.deep_merge!(config)
complement
unless @config['epubversion'].nil?
case @config['epubversion'].to_i
when 2
@epub = EPUBMaker::EPUBv2.new(self)
when 3
@epub = EPUBMaker::EPUBv3.new(self)
else
raise "Invalid EPUB version (#{@config['epubversion']}.)"
end
end
if config['language']
ReVIEW::I18n.locale = config['language']
end
support_legacy_maker
end | ruby | {
"resource": ""
} |
q12131 | EPUBMaker.Producer.mimetype | train | def mimetype(wobj)
s = @epub.mimetype
if !s.nil? && !wobj.nil?
wobj.print s
end
end | ruby | {
"resource": ""
} |
q12132 | EPUBMaker.Producer.opf | train | def opf(wobj)
s = @epub.opf
if !s.nil? && !wobj.nil?
wobj.puts s
end
end | ruby | {
"resource": ""
} |
q12133 | EPUBMaker.Producer.ncx | train | def ncx(wobj, indentarray = [])
s = @epub.ncx(indentarray)
if !s.nil? && !wobj.nil?
wobj.puts s
end
end | ruby | {
"resource": ""
} |
q12134 | EPUBMaker.Producer.container | train | def container(wobj)
s = @epub.container
if !s.nil? && !wobj.nil?
wobj.puts s
end
end | ruby | {
"resource": ""
} |
q12135 | EPUBMaker.Producer.colophon | train | def colophon(wobj)
s = @epub.colophon
if !s.nil? && !wobj.nil?
wobj.puts s
end
end | ruby | {
"resource": ""
} |
q12136 | EPUBMaker.Producer.mytoc | train | def mytoc(wobj)
s = @epub.mytoc
if !s.nil? && !wobj.nil?
wobj.puts s
end
end | ruby | {
"resource": ""
} |
q12137 | EPUBMaker.EPUBv2.ncx | train | def ncx(indentarray)
@ncx_isbn = ncx_isbn
@ncx_doctitle = ncx_doctitle
@ncx_navmap = ncx_navmap(indentarray)
tmplfile = File.expand_path('./ncx/epubv2.ncx.erb', ReVIEW::Template::TEMPLATE_DIR)
ReVIEW::Template.load(tmplfile).result(binding)
end | ruby | {
"resource": ""
} |
q12138 | EPUBMaker.EPUBv2.produce | train | def produce(epubfile, basedir, tmpdir)
produce_write_common(basedir, tmpdir)
File.open("#{tmpdir}/OEBPS/#{@producer.config['bookname']}.ncx", 'w') do |f|
@producer.ncx(f, @producer.config['epubmaker']['ncxindent'])
end
if @producer.config['mytoc']
File.open("#{tmpdir}/OEBPS/#{@producer.config['bookname']}-toc.#{@producer.config['htmlext']}", 'w') do |f|
@producer.mytoc(f)
end
end
@producer.call_hook(@producer.config['epubmaker']['hook_prepack'], tmpdir)
expoter = EPUBMaker::ZipExporter.new(tmpdir, @producer.config)
expoter.export_zip(epubfile)
end | ruby | {
"resource": ""
} |
q12139 | ReVIEW.LATEXBuilder.inline_i | train | def inline_i(str)
if @book.config.check_version('2', exception: false)
macro('textit', escape(str))
else
macro('reviewit', escape(str))
end
end | ruby | {
"resource": ""
} |
q12140 | ReVIEW.YAMLLoader.load_file | train | def load_file(yamlfile)
file_queue = [File.expand_path(yamlfile)]
loaded_files = {}
yaml = {}
loop do
# Check exit condition
return yaml if file_queue.empty?
current_file = file_queue.shift
current_yaml = YAML.load_file(current_file)
yaml = current_yaml.deep_merge(yaml)
next unless yaml.key?('inherit')
buf = []
yaml['inherit'].reverse_each do |item|
inherit_file = File.expand_path(item, File.dirname(yamlfile))
# Check loop
if loaded_files[inherit_file]
raise "Found circular YAML inheritance '#{inherit_file}' in #{yamlfile}."
end
loaded_files[inherit_file] = true
buf << inherit_file
end
yaml.delete('inherit')
file_queue = buf + file_queue
end
end | ruby | {
"resource": ""
} |
q12141 | EPUBMaker.Content.complement | train | def complement
if @id.nil?
@id = @file.gsub(%r{[\\/\. ]}, '-')
end
if @id =~ /\A[^a-z]/i
@id = "rv-#{@id}"
end
if !@file.nil? && @media.nil?
@media = @file.sub(/.+\./, '').downcase
end
case @media
when 'xhtml', 'xml', 'html'
@media = 'application/xhtml+xml'
when 'css'
@media = 'text/css'
when 'jpg', 'jpeg', 'image/jpg'
@media = 'image/jpeg'
when 'png'
@media = 'image/png'
when 'gif'
@media = 'image/gif'
when 'svg', 'image/svg'
@media = 'image/svg+xml'
when 'ttf', 'otf'
@media = 'application/vnd.ms-opentype'
when 'woff'
@media = 'application/font-woff'
end
if @id.nil? || @file.nil? || @media.nil?
raise "Type error: #{id}, #{file}, #{media}, #{title}, #{notoc}"
end
end | ruby | {
"resource": ""
} |
q12142 | Split.Trial.choose! | train | def choose!(context = nil)
@user.cleanup_old_experiments!
# Only run the process once
return alternative if @alternative_choosen
if override_is_alternative?
self.alternative = @options[:override]
if should_store_alternative? && !@user[@experiment.key]
self.alternative.increment_participation
end
elsif @options[:disabled] || Split.configuration.disabled?
self.alternative = @experiment.control
elsif @experiment.has_winner?
self.alternative = @experiment.winner
else
cleanup_old_versions
if exclude_user?
self.alternative = @experiment.control
else
self.alternative = @user[@experiment.key]
if alternative.nil?
self.alternative = @experiment.next_alternative
# Increment the number of participants since we are actually choosing a new alternative
self.alternative.increment_participation
run_callback context, Split.configuration.on_trial_choose
end
end
end
@user[@experiment.key] = alternative.name if !@experiment.has_winner? && should_store_alternative?
@alternative_choosen = true
run_callback context, Split.configuration.on_trial unless @options[:disabled] || Split.configuration.disabled?
alternative
end | ruby | {
"resource": ""
} |
q12143 | Mysql2.Client.parse_connect_attrs | train | def parse_connect_attrs(conn_attrs)
return {} if Mysql2::Client::CONNECT_ATTRS.zero?
conn_attrs ||= {}
conn_attrs[:program_name] ||= $PROGRAM_NAME
conn_attrs.each_with_object({}) do |(key, value), hash|
hash[key.to_s] = value.to_s
end
end | ruby | {
"resource": ""
} |
q12144 | Mysql2.Error.clean_message | train | def clean_message(message)
if @server_version && @server_version > 50500
message.encode(ENCODE_OPTS)
else
message.encode(Encoding::UTF_8, ENCODE_OPTS)
end
end | ruby | {
"resource": ""
} |
q12145 | RspecApiDocumentation.DSL.resource | train | def resource(*args, &block)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:api_doc_dsl] = :resource
options[:resource_name] = args.first.to_s
options[:document] = :all unless options.key?(:document)
args.push(options)
describe(*args, &block)
end | ruby | {
"resource": ""
} |
q12146 | RspecApiDocumentation.Configuration.define_group | train | def define_group(name, &block)
subconfig = self.class.new(self)
subconfig.filter = name
subconfig.docs_dir = self.docs_dir.join(name.to_s)
yield subconfig
groups << subconfig
end | ruby | {
"resource": ""
} |
q12147 | ActiveRecordQueryTrace.CustomLogSubscriber.lines_to_display | train | def lines_to_display(full_trace)
ActiveRecordQueryTrace.lines.zero? ? full_trace : full_trace.first(ActiveRecordQueryTrace.lines)
end | ruby | {
"resource": ""
} |
q12148 | ActiveRecordQueryTrace.CustomLogSubscriber.setup_backtrace_cleaner_path | train | def setup_backtrace_cleaner_path
return unless Rails.backtrace_cleaner.instance_variable_get(:@root) == '/'
Rails.backtrace_cleaner.instance_variable_set :@root, Rails.root.to_s
end | ruby | {
"resource": ""
} |
q12149 | Stripe.StripeObject.update_attributes | train | def update_attributes(values, opts = {}, dirty: true)
values.each do |k, v|
add_accessors([k], values) unless metaclass.method_defined?(k.to_sym)
@values[k] = Util.convert_to_stripe_object(v, opts)
dirty_value!(@values[k]) if dirty
@unsaved_values.add(k)
end
end | ruby | {
"resource": ""
} |
q12150 | Stripe.StripeObject.empty_values | train | def empty_values(obj)
values = case obj
when Hash then obj
when StripeObject then obj.instance_variable_get(:@values)
else
raise ArgumentError, "#empty_values got unexpected object type: #{obj.class.name}"
end
values.each_with_object({}) do |(k, _), update|
update[k] = ""
end
end | ruby | {
"resource": ""
} |
q12151 | Stripe.ListObject.auto_paging_each | train | def auto_paging_each(&blk)
return enum_for(:auto_paging_each) unless block_given?
page = self
loop do
page.each(&blk)
page = page.next_page
break if page.empty?
end
end | ruby | {
"resource": ""
} |
q12152 | Stripe.StripeClient.format_app_info | train | def format_app_info(info)
str = info[:name]
str = "#{str}/#{info[:version]}" unless info[:version].nil?
str = "#{str} (#{info[:url]})" unless info[:url].nil?
str
end | ruby | {
"resource": ""
} |
q12153 | Stripe.StripeClient.specific_oauth_error | train | def specific_oauth_error(resp, error_code, context)
description = resp.data[:error_description] || error_code
Util.log_error("Stripe OAuth error",
status: resp.http_status,
error_code: error_code,
error_description: description,
idempotency_key: context.idempotency_key,
request_id: context.request_id)
args = [error_code, description, {
http_status: resp.http_status, http_body: resp.http_body,
json_body: resp.data, http_headers: resp.http_headers,
},]
case error_code
when "invalid_client" then OAuth::InvalidClientError.new(*args)
when "invalid_grant" then OAuth::InvalidGrantError.new(*args)
when "invalid_request" then OAuth::InvalidRequestError.new(*args)
when "invalid_scope" then OAuth::InvalidScopeError.new(*args)
when "unsupported_grant_type" then OAuth::UnsupportedGrantTypeError.new(*args)
when "unsupported_response_type" then OAuth::UnsupportedResponseTypeError.new(*args)
else
# We'd prefer that all errors are typed, but we create a generic
# OAuthError in case we run into a code that we don't recognize.
OAuth::OAuthError.new(*args)
end
end | ruby | {
"resource": ""
} |
q12154 | AwesomePrint.ActiveRecord.awesome_active_record_instance | train | def awesome_active_record_instance(object)
return object.inspect if !defined?(::ActiveSupport::OrderedHash)
return awesome_object(object) if @options[:raw]
data = if object.class.column_names != object.attributes.keys
object.attributes
else
object.class.column_names.inject(::ActiveSupport::OrderedHash.new) do |hash, name|
if object.has_attribute?(name) || object.new_record?
value = object.respond_to?(name) ? object.send(name) : object.read_attribute(name)
hash[name.to_sym] = value
end
hash
end
end
"#{object} " << awesome_hash(data)
end | ruby | {
"resource": ""
} |
q12155 | AwesomePrint.Ripple.awesome_ripple_document_instance | train | def awesome_ripple_document_instance(object)
return object.inspect if !defined?(::ActiveSupport::OrderedHash)
return awesome_object(object) if @options[:raw]
exclude_assoc = @options[:exclude_assoc] or @options[:exclude_associations]
data = object.attributes.inject(::ActiveSupport::OrderedHash.new) do |hash, (name, value)|
hash[name.to_sym] = object.send(name)
hash
end
unless exclude_assoc
data = object.class.embedded_associations.inject(data) do |hash, assoc|
hash[assoc.name] = object.get_proxy(assoc) # Should always be array or Ripple::EmbeddedDocument for embedded associations
hash
end
end
"#{object} " << awesome_hash(data)
end | ruby | {
"resource": ""
} |
q12156 | AwesomePrint.MongoMapper.awesome_mongo_mapper_instance | train | def awesome_mongo_mapper_instance(object)
return object.inspect if !defined?(::ActiveSupport::OrderedHash)
return awesome_object(object) if @options[:raw]
data = object.keys.keys.sort_by { |k| k }.inject(::ActiveSupport::OrderedHash.new) do |hash, name|
hash[name] = object[name]
hash
end
# Add in associations
if @options[:mongo_mapper][:show_associations]
object.associations.each do |name, assoc|
data[name.to_s] = if @options[:mongo_mapper][:inline_embedded] and assoc.embeddable?
object.send(name)
else
assoc
end
end
end
label = object.to_s
label = "#{colorize('embedded', :assoc)} #{label}" if object.is_a?(::MongoMapper::EmbeddedDocument)
"#{label} " << awesome_hash(data)
end | ruby | {
"resource": ""
} |
q12157 | AwesomePrint.ActionView.ap_debug | train | def ap_debug(object, options = {})
object.ai(
options.merge(html: true)
).sub(
/^<pre([\s>])/,
'<pre class="debug_dump"\\1'
)
end | ruby | {
"resource": ""
} |
q12158 | ComfortableMexicanSofa::ActsAsTree.InstanceMethods.ancestors | train | def ancestors
node = self
nodes = []
nodes << node = node.parent while node.parent
nodes
end | ruby | {
"resource": ""
} |
q12159 | ComfortableMexicanSofa::ActsAsTree.InstanceMethods.descendants | train | def descendants
nodes = []
children.each do |c|
nodes << c
nodes << c.descendants
end
nodes.flatten
end | ruby | {
"resource": ""
} |
q12160 | ComfortableMexicanSofa::Seeds.Exporter.export! | train | def export!(classes = nil)
classes ||= SEED_CLASSES
classes.each do |klass|
klass = "ComfortableMexicanSofa::Seeds::#{klass}::Exporter"
klass.constantize.new(from, to).export!
end
end | ruby | {
"resource": ""
} |
q12161 | ComfortableMexicanSofa::Seeds.Exporter.write_file_content | train | def write_file_content(path, data)
::File.open(::File.join(path), "wb") do |f|
data.each do |item|
f.write("[#{item[:header]}]\n")
f.write("#{item[:content]}\n")
end
end
end | ruby | {
"resource": ""
} |
q12162 | ComfortableMexicanSofa::Seeds::Page.Exporter.fragments_data | train | def fragments_data(record, page_path)
record.fragments.collect do |frag|
header = "#{frag.tag} #{frag.identifier}"
content =
case frag.tag
when "datetime", "date"
frag.datetime
when "checkbox"
frag.boolean
when "file", "files"
frag.attachments.map do |attachment|
::File.open(::File.join(page_path, attachment.filename.to_s), "wb") do |f|
f.write(attachment.download)
end
attachment.filename
end.join("\n")
else
frag.content
end
{ header: header, content: content }
end
end | ruby | {
"resource": ""
} |
q12163 | ComfortableMexicanSofa::Seeds::Page.Importer.import_translations | train | def import_translations(path, page)
old_translations = page.translations.pluck(:locale)
new_translations = []
Dir["#{path}content.*.html"].each do |file_path|
locale = File.basename(file_path).match(%r{content\.(\w+)\.html})[1]
new_translations << locale
translation = page.translations.where(locale: locale).first_or_initialize
next unless fresh_seed?(translation, file_path)
# reading file content in, resulting in a hash
fragments_hash = parse_file_content(file_path)
# parsing attributes section
attributes_yaml = fragments_hash.delete("attributes")
attrs = YAML.safe_load(attributes_yaml)
# applying attributes
layout = site.layouts.find_by(identifier: attrs.delete("layout")) || page.try(:layout)
translation.attributes = attrs.merge(
layout: layout
)
# applying fragments
old_frag_identifiers = translation.fragments.pluck(:identifier)
new_frag_identifiers, fragments_attributes =
construct_fragments_attributes(fragments_hash, translation, path)
translation.fragments_attributes = fragments_attributes
if translation.save
message = "[CMS SEEDS] Imported Translation \t #{locale}"
ComfortableMexicanSofa.logger.info(message)
# cleaning up old fragments
frags_to_remove = old_frag_identifiers - new_frag_identifiers
translation.fragments.where(identifier: frags_to_remove).destroy_all
else
message = "[CMS SEEDS] Failed to import Translation \n#{locale}"
ComfortableMexicanSofa.logger.warn(message)
end
end
# Cleaning up removed translations
translations_to_remove = old_translations - new_translations
page.translations.where(locale: translations_to_remove).destroy_all
end | ruby | {
"resource": ""
} |
q12164 | ComfortableMexicanSofa::Seeds::Page.Importer.construct_fragments_attributes | train | def construct_fragments_attributes(hash, record, path)
frag_identifiers = []
frag_attributes = hash.collect do |frag_header, frag_content|
tag, identifier = frag_header.split
frag_hash = {
identifier: identifier,
tag: tag
}
# tracking fragments that need removing later
frag_identifiers << identifier
# based on tag we need to cram content in proper place and proper format
case tag
when "date", "datetime"
frag_hash[:datetime] = frag_content
when "checkbox"
frag_hash[:boolean] = frag_content
when "file", "files"
files, file_ids_destroy = files_content(record, identifier, path, frag_content)
frag_hash[:files] = files
frag_hash[:file_ids_destroy] = file_ids_destroy
else
frag_hash[:content] = frag_content
end
frag_hash
end
[frag_identifiers, frag_attributes]
end | ruby | {
"resource": ""
} |
q12165 | ComfortableMexicanSofa::Seeds::Page.Importer.files_content | train | def files_content(record, identifier, path, frag_content)
# preparing attachments
files = frag_content.split("\n").collect do |filename|
file_handler = File.open(File.join(path, filename))
{
io: file_handler,
filename: filename,
content_type: MimeMagic.by_magic(file_handler)
}
end
# ensuring that old attachments get removed
ids_destroy = []
if (frag = record.fragments.find_by(identifier: identifier))
ids_destroy = frag.attachments.pluck(:id)
end
[files, ids_destroy]
end | ruby | {
"resource": ""
} |
q12166 | Comfy.CmsHelper.cms_fragment_render | train | def cms_fragment_render(identifier, page = @cms_page)
node = page.fragment_nodes.detect { |n| n.identifier == identifier.to_s }
return "" unless node
node.renderable = true
render inline: page.render([node])
end | ruby | {
"resource": ""
} |
q12167 | Comfy.CmsHelper.cms_snippet_render | train | def cms_snippet_render(identifier, cms_site = @cms_site)
cms_site ||= cms_site_detect
snippet = cms_site&.snippets&.find_by_identifier(identifier)
return "" unless snippet
r = ComfortableMexicanSofa::Content::Renderer.new(snippet)
render inline: r.render(r.nodes(r.tokenize(snippet.content)))
end | ruby | {
"resource": ""
} |
q12168 | Comfy.CmsHelper.comfy_paginate | train | def comfy_paginate(collection)
return unless collection
if defined?(WillPaginate)
will_paginate collection
elsif defined?(Kaminari)
paginate collection, theme: "comfy"
end
end | ruby | {
"resource": ""
} |
q12169 | JIRA.Client.post | train | def post(path, body = '', headers = {})
headers = { 'Content-Type' => 'application/json' }.merge(headers)
request(:post, path, body, merge_default_headers(headers))
end | ruby | {
"resource": ""
} |
q12170 | JIRA.Base.respond_to? | train | def respond_to?(method_name, _include_all = false)
if attrs.key?(method_name.to_s)
true
else
super(method_name)
end
end | ruby | {
"resource": ""
} |
q12171 | JIRA.Base.method_missing | train | def method_missing(method_name, *_args)
if attrs.key?(method_name.to_s)
attrs[method_name.to_s]
else
super(method_name)
end
end | ruby | {
"resource": ""
} |
q12172 | JIRA.Base.fetch | train | def fetch(reload = false, query_params = {})
return if expanded? && !reload
response = client.get(url_with_query_params(url, query_params))
set_attrs_from_response(response)
@expanded = true
end | ruby | {
"resource": ""
} |
q12173 | JIRA.Base.set_attrs_from_response | train | def set_attrs_from_response(response)
unless response.body.nil? || (response.body.length < 2)
json = self.class.parse_json(response.body)
set_attrs(json)
end
end | ruby | {
"resource": ""
} |
q12174 | JIRA.Base.set_attrs | train | def set_attrs(hash, clobber = true, target = nil)
target ||= @attrs
if clobber
target.merge!(hash)
hash
else
hash.each do |k, v|
if v.is_a?(Hash)
set_attrs(v, clobber, target[k])
else
target[k] = v
end
end
end
end | ruby | {
"resource": ""
} |
q12175 | Que.JobMethods._run | train | def _run(args: nil, reraise_errors: false)
if args.nil? && que_target
args = que_target.que_attrs.fetch(:args)
end
run(*args)
default_resolve_action if que_target && !que_target.que_resolved
rescue => error
raise error unless que_target
que_target.que_error = error
run_error_notifier =
begin
handle_error(error)
rescue => error_2
Que.notify_error(error_2, que_target.que_attrs)
true
end
Que.notify_error(error, que_target.que_attrs) if run_error_notifier
retry_in_default_interval unless que_target.que_resolved
raise error if reraise_errors
end | ruby | {
"resource": ""
} |
q12176 | Que.JobMethods.handle_error | train | def handle_error(error)
return unless que_target
max = resolve_que_setting(:maximum_retry_count)
if max && error_count > max
expire
else
retry_in_default_interval
end
end | ruby | {
"resource": ""
} |
q12177 | Que.JobMethods.retry_in | train | def retry_in(period)
return unless que_target
if id = que_target.que_attrs[:id]
values = [period]
if e = que_target.que_error
values << "#{e.class}: #{e.message}".slice(0, 500) << e.backtrace.join("\n").slice(0, 10000)
else
values << nil << nil
end
Que.execute :set_error, values << id
end
que_target.que_resolved = true
end | ruby | {
"resource": ""
} |
q12178 | Que.JobBuffer.push | train | def push(*metajobs)
Que.internal_log(:job_buffer_push, self) do
{
maximum_size: maximum_size,
ids: metajobs.map(&:id),
current_queue: to_a,
}
end
sync do
return metajobs if _stopping?
@array.concat(metajobs).sort!
# Relying on the hash's contents being sorted, here.
priority_queues.reverse_each do |_, pq|
pq.waiting_count.times do
job = _shift_job(pq.priority)
break if job.nil? # False would mean we're stopping.
pq.push(job)
end
end
# If we passed the maximum buffer size, drop the lowest sort keys and
# return their ids to be unlocked.
overage = -_buffer_space
pop(overage) if overage > 0
end
end | ruby | {
"resource": ""
} |
q12179 | Middleman.DnsResolver.ips_for | train | def ips_for(name)
resolvers.each do |r|
ips = r.getaddresses(name)
return ips unless ips.nil? || ips.empty?
end
[]
end | ruby | {
"resource": ""
} |
q12180 | Middleman.ConfigContext.mime_type | train | def mime_type(type, value)
type = ".#{type}" unless type.to_s[0] == '.'
::Rack::Mime::MIME_TYPES[type] = value
end | ruby | {
"resource": ""
} |
q12181 | Middleman.ExtensionManager.activate | train | def activate(ext_name, options_hash = ::Middleman::EMPTY_HASH, &block)
begin
extension = ::Middleman::Extensions.load(ext_name)
rescue LoadError => e
logger.debug "== Failed Activation `#{ext_name}` : #{e.message}"
return
end
logger.debug "== Activating: #{ext_name}"
if extension.supports_multiple_instances?
@activated[ext_name] ||= {}
key = "instance_#{@activated[ext_name].keys.length}"
@activated[ext_name][key] = extension.new(@app, options_hash, &block)
elsif active?(ext_name)
raise "#{ext_name} has already been activated and cannot be re-activated."
else
@activated[ext_name] = extension.new(@app, options_hash, &block)
end
end | ruby | {
"resource": ""
} |
q12182 | Middleman.Application.apply_cli_options | train | def apply_cli_options
config[:cli_options].each do |k, v|
setting = config.setting(k.to_sym)
next unless setting
v = setting.options[:import].call(v) if setting.options[:import]
config[k.to_sym] = v
end
end | ruby | {
"resource": ""
} |
q12183 | Middleman.Application.prune_tilt_templates! | train | def prune_tilt_templates!
mapping = ::Tilt.default_mapping
mapping.lazy_map.each_key do |key|
begin
mapping[key]
rescue LoadError, NameError
end
end
mapping.lazy_map.clear
end | ruby | {
"resource": ""
} |
q12184 | Middleman::Cli.Extension.extension | train | def extension
copy_file 'extension/gitignore', File.join(name, '.gitignore') unless options[:'skip-git']
template 'extension/Rakefile', File.join(name, 'Rakefile')
template 'extension/gemspec', File.join(name, "#{name}.gemspec")
template 'extension/Gemfile', File.join(name, 'Gemfile')
template 'extension/lib/lib.rb', File.join(name, 'lib', "#{name}.rb")
template 'extension/lib/lib/extension.rb', File.join(name, 'lib', name, 'extension.rb')
template 'extension/features/support/env.rb', File.join(name, 'features', 'support', 'env.rb')
empty_directory File.join(name, 'fixtures')
end | ruby | {
"resource": ""
} |
q12185 | Middleman.Extension.add_exposed_to_context | train | def add_exposed_to_context(context)
(self.class.exposed_to_template || {}).each do |k, v|
context.define_singleton_method(k, &method(v))
end
end | ruby | {
"resource": ""
} |
q12186 | Middleman::Cli.Init.init | train | def init
require 'fileutils'
require 'tmpdir'
unless git_present?
msg = 'You need to install the git command line tool to initialize a new project. '
msg << "For help installing git, please refer to GitHub's tutorial at https://help.github.com/articles/set-up-git"
say msg, :red
exit 1
end
repo_path, repo_branch = if shortname?(options[:template])
require 'open-uri'
require 'json'
api = 'https://directory.middlemanapp.com/api'
uri = ::URI.parse("#{api}/#{options[:template]}.json")
begin
data = ::JSON.parse(uri.read)
is_local_dir = false
data['links']['github'].split('#')
rescue ::OpenURI::HTTPError
say "Template `#{options[:template]}` not found in Middleman Directory."
say 'Did you mean to use a full `user/repo` path?'
exit 1
end
else
repo_name, repo_branch = options[:template].split('#')
repo_path, is_local_dir = repository_path(repo_name)
[repo_path, repo_branch]
end
begin
dir = is_local_dir ? repo_path : clone_repository(repo_path, repo_branch)
inside(target) do
thorfile = File.join(dir, 'Thorfile')
if File.exist?(thorfile)
::Thor::Util.load_thorfile(thorfile)
invoke 'middleman:generator'
else
source_paths << dir
directory dir, '.', exclude_pattern: /\.git\/|\.gitignore$/
end
bundle_args = options[:'bundle-path'] ? " --path=#{options[:'bundle-path']}" : ''
run("bundle install#{bundle_args}") unless ENV['TEST'] || options[:'skip-bundle']
end
ensure
FileUtils.remove_entry(dir) if !is_local_dir && File.directory?(dir)
end
end | ruby | {
"resource": ""
} |
q12187 | Middleman::Cli.Init.which | train | def which(executable)
if File.file?(executable) && File.executable?(executable)
executable
elsif ENV['PATH']
path = ENV['PATH'].split(File::PATH_SEPARATOR).find do |p|
abs_path = File.join(p, executable)
File.file?(abs_path) && File.executable?(abs_path)
end
path && File.expand_path(executable, path)
end
end | ruby | {
"resource": ""
} |
q12188 | Middleman::Cli.Build.build | train | def build
root = ENV['MM_ROOT'] || Dir.pwd
raise Thor::Error, 'Error: Could not find a Middleman project config, perhaps you are in the wrong folder?' unless File.exist?(File.join(root, 'config.rb'))
require 'middleman-core'
require 'middleman-core/logger'
require 'middleman-core/builder'
require 'fileutils'
verbose = options['verbose'] ? 0 : 1
instrument = options['instrument']
builder = nil
cli_options = options
::Middleman::Logger.singleton(verbose, instrument)
::Middleman::Util.instrument 'builder.setup' do
missing_and_changed = !options['only_changed'] && options['missing_and_changed']
should_track_dependencies = options['only_changed'] || missing_and_changed || options['track_dependencies']
data_collection_depth = options['data_collection_depth']
@app = ::Middleman::Application.new do
config[:mode] = :build
config[:show_exceptions] = false
config[:cli_options] = cli_options.each_with_object({}) do |(k, v), sum|
sum[k] = v
end
config[:track_data_access] = should_track_dependencies
config[:data_collection_depth] = data_collection_depth
end
builder = Middleman::Builder.new(@app,
glob: options['glob'],
dry_run: options['dry_run'],
clean: options['clean'],
parallel: options['parallel'],
only_changed: options['only_changed'],
missing_and_changed: missing_and_changed,
track_dependencies: should_track_dependencies,
visualize_graph: options['visualize_graph'])
builder.thor = self
builder.on_build_event(&method(:on_event))
end
::Middleman::Util.instrument 'builder.run' do
if builder.run!
clean_directories! if options['clean']
puts 'Project built successfully.'
else
msg = 'There were errors during this build'
msg << ', re-run with `middleman build --verbose` to see the full exception.' unless options['verbose']
shell.say msg, :red
exit(1)
end
end
end | ruby | {
"resource": ""
} |
q12189 | Middleman::Cli.Build.on_event | train | def on_event(event_type, target, extra = nil)
case event_type
when :error
say_status :error, target, :red
shell.say extra, :red if options['verbose'] || options['bail']
raise 'Build error' if options['bail']
when :deleted
say_status :remove, target, :green
when :created
say_status :create, target, :green
when :identical
say_status :identical, target, :blue
when :skipped
say_status :skipped, target, :blue
when :updated
say_status :updated, target, :yellow
else
say_status event_type, extra, :blue
end
end | ruby | {
"resource": ""
} |
q12190 | Middleman::Cli.Build.clean_directories! | train | def clean_directories!
all_build_files = File.join(@app.config[:build_dir], '**', '*')
empty_directories = Dir[all_build_files].select do |d|
File.directory?(d)
end
empty_directories.each do |d|
remove_file d, force: true if Pathname(d).children.empty?
end
end | ruby | {
"resource": ""
} |
q12191 | Middleman.Rack.process_request | train | def process_request(env, req, res)
start_time = Time.now
request_path = URI.decode(env['PATH_INFO'].dup)
request_path.force_encoding('UTF-8') if request_path.respond_to? :force_encoding
request_path = ::Middleman::Util.full_path(request_path, @middleman)
full_request_path = File.join(env['SCRIPT_NAME'], request_path) # Path including rack mount
# Get the resource object for this path
resource = @middleman.sitemap.by_destination_path(request_path.gsub(' ', '%20'))
# Return 404 if not in sitemap
return not_found(res, full_request_path) unless resource && !resource.ignored?
# If this path is a binary file, send it immediately
return send_file(resource, env) if resource.binary? || resource.static_file?
res['Content-Type'] = resource.content_type || 'text/plain'
begin
# Write out the contents of the page
res.write resource.render({}, rack: { request: req })
# Valid content is a 200 status
res.status = 200
rescue Middleman::TemplateRenderer::TemplateNotFound => e
res.write "Error: #{e.message}"
res.status = 500
end
# End the request
logger.debug "== Finishing Request: #{resource.destination_path} (#{(Time.now - start_time).round(2)}s)"
halt res.finish
end | ruby | {
"resource": ""
} |
q12192 | Middleman.Rack.not_found | train | def not_found(res, path)
path = ::Rack::Utils.escape_html(path)
res.status = 404
res.write "<html><head></head><body><h1>File Not Found</h1><p>#{path}</p></body></html>"
res.finish
end | ruby | {
"resource": ""
} |
q12193 | Middleman.Rack.send_file | train | def send_file(resource, env)
file = ::Rack::File.new nil
path = resource.file_descriptor[:full_path]
if !file.respond_to?(:path=)
request = ::Rack::Request.new(env)
response = file.serving(request, path)
else
file.path = path
response = file.serving(env)
end
status = response[0]
response[1]['Content-Encoding'] = 'gzip' if %w[.svgz .gz].include?(resource.ext)
# Do not set Content-Type if status is 1xx, 204, 205 or 304, otherwise
# Rack will throw an error (500)
if !(100..199).cover?(status) && ![204, 205, 304].include?(status)
response[1]['Content-Type'] = resource.content_type || (resource.binary? ? 'application/octet-stream' : 'text/plain')
end
halt response
end | ruby | {
"resource": ""
} |
q12194 | Middleman::Cli.Server.server | train | def server
require 'middleman-core'
require 'middleman-core/preview_server'
unless ENV['MM_ROOT']
puts '== Could not find a Middleman project config.rb'
exit
end
params = {
debug: options['verbose'],
instrumenting: options['instrument'],
reload_paths: options['reload_paths'],
daemon: options['daemon']
}
puts '== The Middleman is loading'
::Middleman::PreviewServer.start(params, options)
end | ruby | {
"resource": ""
} |
q12195 | Middleman.TemplateContext.wrap_layout | train | def wrap_layout(layout_name, &block)
# Save current buffer for later
buf_was = save_buffer
# Find a layout for this file
layout_file = ::Middleman::TemplateRenderer.locate_layout(@app, layout_name, current_engine)
# Get the layout engine
extension = File.extname(layout_file[:relative_path])
engine = extension[1..-1].to_sym
# Store last engine for later (could be inside nested renders)
self.current_engine = engine
engine_was = current_engine
# By default, no content is captured
content = ''
# Attempt to capture HTML from block
begin
content = capture_html(&block) if block_given?
ensure
# Reset stored buffer, regardless of success
restore_buffer(buf_was)
end
@vertices <<= ::Middleman::Dependencies::FileVertex.from_source_file(@app, layout_file)
# Render the layout, with the contents of the block inside.
concat_safe_content render_file(layout_file, @locs, @opts) { content }
ensure
# Reset engine back to template's value, regardless of success
self.current_engine = engine_was
end | ruby | {
"resource": ""
} |
q12196 | Middleman.TemplateContext.render_file | train | def render_file(file, locs, opts, &block)
_render_with_all_renderers(file[:relative_path].to_s, locs, self, opts, &block)
end | ruby | {
"resource": ""
} |
q12197 | Middleman.Util.glob_directory | train | def glob_directory(path)
results = ::Dir[path]
return results unless RUBY_PLATFORM =~ /darwin/
results.map { |r| r.encode('UTF-8', 'UTF-8-MAC') }
end | ruby | {
"resource": ""
} |
q12198 | Razorpay.Request.create_instance | train | def create_instance(res)
response = res.parsed_response
# if there was an error, throw it
raise_error(response['error'], res.code) if response.nil? || response.key?('error')
# There must be a top level entity
# This is either one of payment, refund, or collection at present
begin
class_name = response['entity'].split('_').collect(&:capitalize).join
klass = Razorpay.const_get class_name
rescue NameError
# Use Entity class if we don't find any
klass = Razorpay::Entity
end
klass.new(response)
end | ruby | {
"resource": ""
} |
q12199 | AttrEncrypted.InstanceMethods.decrypt | train | def decrypt(attribute, encrypted_value)
encrypted_attributes[attribute.to_sym][:operation] = :decrypting
encrypted_attributes[attribute.to_sym][:value_present] = self.class.not_empty?(encrypted_value)
self.class.decrypt(attribute, encrypted_value, evaluated_attr_encrypted_options_for(attribute))
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.