_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q17300
Templater.Generator.actions
train
def actions(type=nil) actions = type ? self.class.actions[type] : self.class.actions.values.flatten actions.inject([]) do |actions, description| actions << description.compile(self) if match_options?(description.options) actions end end
ruby
{ "resource": "" }
q17301
Templater.Generator.all_actions
train
def all_actions(type=nil) all_actions = actions(type) all_actions += invocations.map { |i| i.all_actions(type) } all_actions.flatten end
ruby
{ "resource": "" }
q17302
WEBrick.Utils.create_self_signed_cert
train
def create_self_signed_cert(bits, cn, comment) rsa = OpenSSL::PKey::RSA.new(bits){|p, n| case p when 0; $stderr.putc "." # BN_generate_prime when 1; $stderr.putc "+" # BN_generate_prime when 2; $stderr.putc "*" # searching good prime, # n = #of try, # but also data from BN_generate_prime when 3; $stderr.putc "\n" # found good prime, n==0 - p, n==1 - q, # but also data from BN_generate_prime else; $stderr.putc "*" # BN_generate_prime end } cert = OpenSSL::X509::Certificate.new cert.version = 2 cert.serial = 1 name = OpenSSL::X509::Name.new(cn) cert.subject = name cert.issuer = name cert.not_before = Time.now cert.not_after = Time.now + (365*24*60*60) cert.public_key = rsa.public_key ef = OpenSSL::X509::ExtensionFactory.new(nil,cert) ef.issuer_certificate = cert cert.extensions = [ ef.create_extension("basicConstraints","CA:FALSE"), ef.create_extension("keyUsage", "keyEncipherment"), ef.create_extension("subjectKeyIdentifier", "hash"), ef.create_extension("extendedKeyUsage", "serverAuth"), ef.create_extension("nsComment", comment), ] aki = ef.create_extension("authorityKeyIdentifier", "keyid:always,issuer:always") cert.add_extension(aki) cert.sign(rsa, OpenSSL::Digest::SHA1.new) return [ cert, rsa ] end
ruby
{ "resource": "" }
q17303
WEBrick.GenericServer.listen
train
def listen(address, port) # :nodoc: listeners = Utils::create_listeners(address, port, @logger) if @config[:SSLEnable] unless ssl_context @ssl_context = setup_ssl_context(@config) @logger.info("\n" + @config[:SSLCertificate].to_text) end listeners.collect!{|svr| ssvr = ::OpenSSL::SSL::SSLServer.new(svr, ssl_context) ssvr.start_immediately = @config[:SSLStartImmediately] ssvr } end @listeners += listeners end
ruby
{ "resource": "" }
q17304
Extlib.Logger.initialize_log
train
def initialize_log(log) close if @log # be sure that we don't leave open files laying around. if log.respond_to?(:write) @log = log elsif File.exist?(log) @log = open(log, (File::WRONLY | File::APPEND)) @log.sync = true else FileUtils.mkdir_p(File.dirname(log)) unless File.directory?(File.dirname(log)) @log = open(log, (File::WRONLY | File::APPEND | File::CREAT)) @log.sync = true @log.write("#{Time.now.httpdate} #{delimiter} info #{delimiter} Logfile created\n") end end
ruby
{ "resource": "" }
q17305
Extlib.Logger.<<
train
def <<(string = nil) message = "" message << delimiter message << string if string message << "\n" unless message[-1] == ?\n @buffer << message flush if @auto_flush message end
ruby
{ "resource": "" }
q17306
Net.Telnet.write
train
def write(string) length = string.length while 0 < length IO::select(nil, [@sock]) @dumplog.log_dump('>', string[-length..-1]) if @options.has_key?("Dump_log") length -= @sock.syswrite(string[-length..-1]) end end
ruby
{ "resource": "" }
q17307
Net.Telnet.cmd
train
def cmd(options) # :yield: recvdata match = @options["Prompt"] time_out = @options["Timeout"] fail_eof = @options["FailEOF"] if options.kind_of?(Hash) string = options["String"] match = options["Match"] if options.has_key?("Match") time_out = options["Timeout"] if options.has_key?("Timeout") fail_eof = options["FailEOF"] if options.has_key?("FailEOF") else string = options end self.puts(string) if block_given? waitfor({"Prompt" => match, "Timeout" => time_out, "FailEOF" => fail_eof}){|c| yield c } else waitfor({"Prompt" => match, "Timeout" => time_out, "FailEOF" => fail_eof}) end end
ruby
{ "resource": "" }
q17308
Net.Telnet.login
train
def login(options, password = nil) # :yield: recvdata login_prompt = /[Ll]ogin[: ]*\z/n password_prompt = /[Pp]ass(?:word|phrase)[: ]*\z/n if options.kind_of?(Hash) username = options["Name"] password = options["Password"] login_prompt = options["LoginPrompt"] if options["LoginPrompt"] password_prompt = options["PasswordPrompt"] if options["PasswordPrompt"] else username = options end if block_given? line = waitfor(login_prompt){|c| yield c } if password line += cmd({"String" => username, "Match" => password_prompt}){|c| yield c } line += cmd(password){|c| yield c } else line += cmd(username){|c| yield c } end else line = waitfor(login_prompt) if password line += cmd({"String" => username, "Match" => password_prompt}) line += cmd(password) else line += cmd(username) end end line end
ruby
{ "resource": "" }
q17309
WEBrick.HTTPUtils.normalize_path
train
def normalize_path(path) raise "abnormal path `#{path}'" if path[0] != ?/ ret = path.dup ret.gsub!(%r{/+}o, '/') # // => / while ret.sub!(%r'/\.(?:/|\Z)', '/'); end # /. => / while ret.sub!(%r'/(?!\.\./)[^/]+/\.\.(?:/|\Z)', '/'); end # /foo/.. => /foo raise "abnormal path `#{path}'" if %r{/\.\.(/|\Z)} =~ ret ret end
ruby
{ "resource": "" }
q17310
WEBrick.HTTPUtils.load_mime_types
train
def load_mime_types(file) open(file){ |io| hash = Hash.new io.each{ |line| next if /^#/ =~ line line.chomp! mimetype, ext0 = line.split(/\s+/, 2) next unless ext0 next if ext0.empty? ext0.split(/\s+/).each{ |ext| hash[ext] = mimetype } } hash } end
ruby
{ "resource": "" }
q17311
WEBrick.HTTPUtils.parse_range_header
train
def parse_range_header(ranges_specifier) if /^bytes=(.*)/ =~ ranges_specifier byte_range_set = split_header_value($1) byte_range_set.collect{|range_spec| case range_spec when /^(\d+)-(\d+)/ then $1.to_i .. $2.to_i when /^(\d+)-/ then $1.to_i .. -1 when /^-(\d+)/ then -($1.to_i) .. -1 else return nil end } end end
ruby
{ "resource": "" }
q17312
WEBrick.HTTPUtils.parse_qvalues
train
def parse_qvalues(value) tmp = [] if value parts = value.split(/,\s*/) parts.each {|part| if m = %r{^([^\s,]+?)(?:;\s*q=(\d+(?:\.\d+)?))?$}.match(part) val = m[1] q = (m[2] or 1).to_f tmp.push([val, q]) end } tmp = tmp.sort_by{|val, q| -q} tmp.collect!{|val, q| val} end return tmp end
ruby
{ "resource": "" }
q17313
WEBrick.HTTPUtils.parse_query
train
def parse_query(str) query = Hash.new if str str.split(/[&;]/).each{|x| next if x.empty? key, val = x.split(/=/,2) key = unescape_form(key) val = unescape_form(val.to_s) val = FormData.new(val) val.name = key if query.has_key?(key) query[key].append_data(val) next end query[key] = val } end query end
ruby
{ "resource": "" }
q17314
WEBrick.HTTPUtils.escape_path
train
def escape_path(str) result = "" str.scan(%r{/([^/]*)}).each{|i| result << "/" << _escape(i[0], UNESCAPED_PCHAR) } return result end
ruby
{ "resource": "" }
q17315
Rake.FileList.partition
train
def partition(&block) # :nodoc: resolve result = @items.partition(&block) [ FileList.new.import(result[0]), FileList.new.import(result[1]), ] end
ruby
{ "resource": "" }
q17316
Rake.FileList.add_matching
train
def add_matching(pattern) FileList.glob(pattern).each do |fn| self << fn unless excluded_from_list?(fn) end end
ruby
{ "resource": "" }
q17317
Rake.Promise.value
train
def value unless complete? stat :sleeping_on, :item_id => object_id @mutex.synchronize do stat :has_lock_on, :item_id => object_id chore stat :releasing_lock_on, :item_id => object_id end end error? ? raise(@error) : @result end
ruby
{ "resource": "" }
q17318
Rake.Promise.chore
train
def chore if complete? stat :found_completed, :item_id => object_id return end stat :will_execute, :item_id => object_id begin @result = @block.call(*@args) rescue Exception => e @error = e end stat :did_execute, :item_id => object_id discard end
ruby
{ "resource": "" }
q17319
Net.HTTPHeader.[]=
train
def []=(key, val) unless val @header.delete key.downcase return val end @header[key.downcase] = [val] end
ruby
{ "resource": "" }
q17320
Net.HTTPHeader.each_header
train
def each_header #:yield: +key+, +value+ block_given? or return enum_for(__method__) @header.each do |k,va| yield k, va.join(', ') end end
ruby
{ "resource": "" }
q17321
Net.HTTPResponse.read_body
train
def read_body(dest = nil, &block) if @read raise IOError, "#{self.class}\#read_body called twice" if dest or block return @body end to = procdest(dest, block) stream_check if @body_exist read_body_0 to @body = to else @body = nil end @read = true @body end
ruby
{ "resource": "" }
q17322
RestClient.AbstractResponse.follow_redirection
train
def follow_redirection request = nil, result = nil, & block url = headers[:location] if url !~ /^http/ url = URI.parse(args[:url]).merge(url).to_s end args[:url] = url if request if request.max_redirects == 0 raise MaxRedirectsReached end args[:password] = request.password args[:user] = request.user args[:headers] = request.headers args[:max_redirects] = request.max_redirects - 1 # pass any cookie set in the result if result && result['set-cookie'] args[:headers][:cookies] = (args[:headers][:cookies] || {}).merge(parse_cookie(result['set-cookie'])) end end Request.execute args, &block end
ruby
{ "resource": "" }
q17323
RestClient.AbstractResponse.parse_cookie
train
def parse_cookie cookie_content out = {} CGI::Cookie::parse(cookie_content).each do |key, cookie| unless ['expires', 'path'].include? key out[CGI::escape(key)] = cookie.value[0] ? (CGI::escape(cookie.value[0]) || '') : '' end end out end
ruby
{ "resource": "" }
q17324
Rake.TaskManager.[]
train
def [](task_name, scopes=nil) task_name = task_name.to_s self.lookup(task_name, scopes) or enhance_with_matching_rule(task_name) or synthesize_file_task(task_name) or fail "Don't know how to build task '#{task_name}'" end
ruby
{ "resource": "" }
q17325
WEBrick.HTTPServer.run
train
def run(sock) while true res = HTTPResponse.new(@config) req = HTTPRequest.new(@config) server = self begin timeout = @config[:RequestTimeout] while timeout > 0 break if IO.select([sock], nil, nil, 0.5) timeout = 0 if @status != :Running timeout -= 0.5 end raise HTTPStatus::EOFError if timeout <= 0 raise HTTPStatus::EOFError if sock.eof? req.parse(sock) res.request_method = req.request_method res.request_uri = req.request_uri res.request_http_version = req.http_version res.keep_alive = req.keep_alive? server = lookup_server(req) || self if callback = server[:RequestCallback] callback.call(req, res) elsif callback = server[:RequestHandler] msg = ":RequestHandler is deprecated, please use :RequestCallback" @logger.warn(msg) callback.call(req, res) end server.service(req, res) rescue HTTPStatus::EOFError, HTTPStatus::RequestTimeout => ex res.set_error(ex) rescue HTTPStatus::Error => ex @logger.error(ex.message) res.set_error(ex) rescue HTTPStatus::Status => ex res.status = ex.code rescue StandardError => ex @logger.error(ex) res.set_error(ex, true) ensure if req.request_line if req.keep_alive? && res.keep_alive? req.fixup() end res.send_response(sock) server.access_log(@config, req, res) end end break if @http_version < "1.1" break unless req.keep_alive? break unless res.keep_alive? end end
ruby
{ "resource": "" }
q17326
WEBrick.HTTPServer.service
train
def service(req, res) if req.unparsed_uri == "*" if req.request_method == "OPTIONS" do_OPTIONS(req, res) raise HTTPStatus::OK end raise HTTPStatus::NotFound, "`#{req.unparsed_uri}' not found." end servlet, options, script_name, path_info = search_servlet(req.path) raise HTTPStatus::NotFound, "`#{req.path}' not found." unless servlet req.script_name = script_name req.path_info = path_info si = servlet.get_instance(self, *options) @logger.debug(format("%s is invoked.", si.class.name)) si.service(req, res) end
ruby
{ "resource": "" }
q17327
WEBrick.HTTPServer.mount
train
def mount(dir, servlet, *options) @logger.debug(sprintf("%s is mounted on %s.", servlet.inspect, dir)) @mount_tab[dir] = [ servlet, options ] end
ruby
{ "resource": "" }
q17328
WEBrick.HTTPServer.search_servlet
train
def search_servlet(path) script_name, path_info = @mount_tab.scan(path) servlet, options = @mount_tab[script_name] if servlet [ servlet, options, script_name, path_info ] end end
ruby
{ "resource": "" }
q17329
WEBrick.HTTPServer.virtual_host
train
def virtual_host(server) @virtual_hosts << server @virtual_hosts = @virtual_hosts.sort_by{|s| num = 0 num -= 4 if s[:BindAddress] num -= 2 if s[:Port] num -= 1 if s[:ServerName] num } end
ruby
{ "resource": "" }
q17330
WEBrick.HTTPServer.lookup_server
train
def lookup_server(req) @virtual_hosts.find{|s| (s[:BindAddress].nil? || req.addr[3] == s[:BindAddress]) && (s[:Port].nil? || req.port == s[:Port]) && ((s[:ServerName].nil? || req.host == s[:ServerName]) || (!s[:ServerAlias].nil? && s[:ServerAlias].find{|h| h === req.host})) } end
ruby
{ "resource": "" }
q17331
WEBrick.HTTPServer.access_log
train
def access_log(config, req, res) param = AccessLog::setup_params(config, req, res) @config[:AccessLog].each{|logger, fmt| logger << AccessLog::format(fmt+"\n", param) } end
ruby
{ "resource": "" }
q17332
WEBrick.Utils.set_non_blocking
train
def set_non_blocking(io) flag = File::NONBLOCK if defined?(Fcntl::F_GETFL) flag |= io.fcntl(Fcntl::F_GETFL) end io.fcntl(Fcntl::F_SETFL, flag) end
ruby
{ "resource": "" }
q17333
WEBrick.Utils.set_close_on_exec
train
def set_close_on_exec(io) if defined?(Fcntl::FD_CLOEXEC) io.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC) end end
ruby
{ "resource": "" }
q17334
WEBrick.Utils.su
train
def su(user) if defined?(Etc) pw = Etc.getpwnam(user) Process::initgroups(user, pw.gid) Process::Sys::setgid(pw.gid) Process::Sys::setuid(pw.uid) else warn("WEBrick::Utils::su doesn't work on this platform") end end
ruby
{ "resource": "" }
q17335
WEBrick.Utils.random_string
train
def random_string(len) rand_max = RAND_CHARS.bytesize ret = "" len.times{ ret << RAND_CHARS[rand(rand_max)] } ret end
ruby
{ "resource": "" }
q17336
WEBrick.Utils.timeout
train
def timeout(seconds, exception=Timeout::Error) return yield if seconds.nil? or seconds.zero? # raise ThreadError, "timeout within critical session" if Thread.critical id = TimeoutHandler.register(seconds, exception) begin yield(seconds) ensure TimeoutHandler.cancel(id) end end
ruby
{ "resource": "" }
q17337
Rake.FtpUploader.makedirs
train
def makedirs(path) route = [] File.split(path).each do |dir| route << dir current_dir = File.join(route) if @created[current_dir].nil? @created[current_dir] = true $stderr.puts "Creating Directory #{current_dir}" if @verbose @ftp.mkdir(current_dir) rescue nil end end end
ruby
{ "resource": "" }
q17338
Digest.Instance.file
train
def file(name) File.open(name, "rb") {|f| buf = "" while f.read(16384, buf) update buf end } self end
ruby
{ "resource": "" }
q17339
OAuth.RequestToken.authorize_url
train
def authorize_url(params = nil) return nil if self.token.nil? params = (params || {}).merge(:oauth_token => self.token) build_authorize_url(consumer.authorize_url, params) end
ruby
{ "resource": "" }
q17340
OAuth.RequestToken.get_access_token
train
def get_access_token(options = {}, *arguments) response = consumer.token_request(consumer.http_method, (consumer.access_token_url? ? consumer.access_token_url : consumer.access_token_path), self, options, *arguments) OAuth::AccessToken.from_hash(consumer, response) end
ruby
{ "resource": "" }
q17341
OAuth.RequestToken.build_authorize_url
train
def build_authorize_url(base_url, params) uri = URI.parse(base_url.to_s) queries = {} queries = Hash[URI.decode_www_form(uri.query)] if uri.query # TODO doesn't handle array values correctly queries.merge!(params) if params uri.query = URI.encode_www_form(queries) if !queries.empty? uri.to_s end
ruby
{ "resource": "" }
q17342
OAuth::RequestProxy.Base.signature_base_string
train
def signature_base_string base = [method, normalized_uri, normalized_parameters] base.map { |v| escape(v) }.join("&") end
ruby
{ "resource": "" }
q17343
OAuth::RequestProxy.Base.signed_uri
train
def signed_uri(with_oauth = true) if signed? if with_oauth params = parameters else params = non_oauth_parameters end [uri, normalize(params)] * "?" else STDERR.puts "This request has not yet been signed!" end end
ruby
{ "resource": "" }
q17344
OAuth::RequestProxy.Base.oauth_header
train
def oauth_header(options = {}) header_params_str = oauth_parameters.map { |k,v| "#{k}=\"#{escape(v)}\"" }.join(', ') realm = "realm=\"#{options[:realm]}\", " if options[:realm] "OAuth #{realm}#{header_params_str}" end
ruby
{ "resource": "" }
q17345
OAuth.Consumer.create_signed_request
train
def create_signed_request(http_method, path, token = nil, request_options = {}, *arguments) request = create_http_request(http_method, path, *arguments) sign!(request, token, request_options) request end
ruby
{ "resource": "" }
q17346
OAuth.Consumer.token_request
train
def token_request(http_method, path, token = nil, request_options = {}, *arguments) request_options[:token_request] ||= true response = request(http_method, path, token, request_options, *arguments) case response.code.to_i when (200..299) if block_given? yield response.body else # symbolize keys # TODO this could be considered unexpected behavior; symbols or not? # TODO this also drops subsequent values from multi-valued keys CGI.parse(response.body).inject({}) do |h,(k,v)| h[k.strip.to_sym] = v.first h[k.strip] = v.first h end end when (300..399) # this is a redirect uri = URI.parse(response['location']) response.error! if uri.path == path # careful of those infinite redirects self.token_request(http_method, uri.path, token, request_options, arguments) when (400..499) raise OAuth::Unauthorized, response else response.error! end end
ruby
{ "resource": "" }
q17347
OAuth.Consumer.sign!
train
def sign!(request, token = nil, request_options = {}) request.oauth!(http, self, token, options.merge(request_options)) end
ruby
{ "resource": "" }
q17348
OAuth.Consumer.signature_base_string
train
def signature_base_string(request, token = nil, request_options = {}) request.signature_base_string(http, self, token, options.merge(request_options)) end
ruby
{ "resource": "" }
q17349
OAuth.Consumer.create_http_request
train
def create_http_request(http_method, path, *arguments) http_method = http_method.to_sym if [:post, :put, :patch].include?(http_method) data = arguments.shift end # if the base site contains a path, add it now # only add if the site host matches the current http object's host # (in case we've specified a full url for token requests) uri = URI.parse(site) path = uri.path + path if uri.path && uri.path != '/' && uri.host == http.address headers = arguments.first.is_a?(Hash) ? arguments.shift : {} case http_method when :post request = Net::HTTP::Post.new(path,headers) request["Content-Length"] = '0' # Default to 0 when :put request = Net::HTTP::Put.new(path,headers) request["Content-Length"] = '0' # Default to 0 when :patch request = Net::HTTP::Patch.new(path,headers) request["Content-Length"] = '0' # Default to 0 when :get request = Net::HTTP::Get.new(path,headers) when :delete request = Net::HTTP::Delete.new(path,headers) when :head request = Net::HTTP::Head.new(path,headers) else raise ArgumentError, "Don't know how to handle http_method: :#{http_method.to_s}" end if data.is_a?(Hash) request.body = OAuth::Helper.normalize(data) request.content_type = 'application/x-www-form-urlencoded' elsif data if data.respond_to?(:read) request.body_stream = data if data.respond_to?(:length) request["Content-Length"] = data.length.to_s elsif data.respond_to?(:stat) && data.stat.respond_to?(:size) request["Content-Length"] = data.stat.size.to_s else raise ArgumentError, "Don't know how to send a body_stream that doesn't respond to .length or .stat.size" end else request.body = data.to_s request["Content-Length"] = request.body.length.to_s end end request end
ruby
{ "resource": "" }
q17350
OAuth.Helper.escape
train
def escape(value) _escape(value.to_s.to_str) rescue ArgumentError _escape(value.to_s.to_str.force_encoding(Encoding::UTF_8)) end
ruby
{ "resource": "" }
q17351
OAuth.Helper.normalize
train
def normalize(params) params.sort.map do |k, values| if values.is_a?(Array) # make sure the array has an element so we don't lose the key values << nil if values.empty? # multiple values were provided for a single key values.sort.collect do |v| [escape(k),escape(v)] * "=" end elsif values.is_a?(Hash) normalize_nested_query(values, k) else [escape(k),escape(values)] * "=" end end * "&" end
ruby
{ "resource": "" }
q17352
OAuth.Server.create_consumer
train
def create_consumer creds = generate_credentials Consumer.new(creds[0], creds[1], { :site => base_url, :request_token_path => request_token_path, :authorize_path => authorize_path, :access_token_path => access_token_path }) end
ruby
{ "resource": "" }
q17353
GELF.Notifier.convert_hoptoad_keys_to_graylog2
train
def convert_hoptoad_keys_to_graylog2(hash) if hash['short_message'].to_s.empty? if hash.has_key?('error_class') && hash.has_key?('error_message') hash['short_message'] = hash.delete('error_class') + ': ' + hash.delete('error_message') end end end
ruby
{ "resource": "" }
q17354
Gretel.Crumb.parent
train
def parent(*args) return @parent if args.empty? key = args.shift @parent = Gretel::Crumb.new(context, key, *args) end
ruby
{ "resource": "" }
q17355
Gretel.Link.method_missing
train
def method_missing(method, *args, &block) if method =~ /(.+)\?$/ options[$1.to_sym].present? else options[method] end end
ruby
{ "resource": "" }
q17356
Gretel.ViewHelpers.with_breadcrumb
train
def with_breadcrumb(key, *args, &block) original_renderer = @_gretel_renderer @_gretel_renderer = Gretel::Renderer.new(self, key, *args) yield @_gretel_renderer = original_renderer end
ruby
{ "resource": "" }
q17357
Gretel.Renderer.render
train
def render(options) options = options_for_render(options) links = links_for_render(options) LinkCollection.new(context, links, options) end
ruby
{ "resource": "" }
q17358
Gretel.Renderer.options_for_render
train
def options_for_render(options = {}) style = options_for_style(options[:style] || DEFAULT_OPTIONS[:style]) DEFAULT_OPTIONS.merge(style).merge(options) end
ruby
{ "resource": "" }
q17359
Gretel.Renderer.links_for_render
train
def links_for_render(options = {}) out = links.dup # Handle autoroot if options[:autoroot] && out.map(&:key).exclude?(:root) && Gretel::Crumbs.crumb_defined?(:root) out.unshift *Gretel::Crumb.new(context, :root).links end # Set current link to actual path if options[:link_current_to_request_path] && out.any? && request out.last.url = request.fullpath end # Handle show root alone if out.size == 1 && !options[:display_single_fragment] out.shift end # Set last link to current out.last.try(:current!) out end
ruby
{ "resource": "" }
q17360
Gretel.Renderer.links
train
def links @links ||= if @breadcrumb_key.present? # Reload breadcrumbs configuration if needed Gretel::Crumbs.reload_if_needed # Get breadcrumb set by the `breadcrumb` method crumb = Gretel::Crumb.new(context, breadcrumb_key, *breadcrumb_args) # Links of first crumb links = crumb.links.dup # Get parent links links.unshift *parent_links_for(crumb) links else [] end end
ruby
{ "resource": "" }
q17361
Gretel.Renderer.parent_links_for
train
def parent_links_for(crumb) links = [] while crumb = crumb.parent links.unshift *crumb.links end links end
ruby
{ "resource": "" }
q17362
Gretel.Resettable.reset!
train
def reset! instance_variables.each { |var| remove_instance_variable var } constants.each do |c| c = const_get(c) c.reset! if c.respond_to?(:reset!) end end
ruby
{ "resource": "" }
q17363
Pkgr.Builder.extract
train
def extract FileUtils.mkdir_p source_dir opts = {} if tarball == "-" # FIXME: not really happy with reading everything in memory opts[:input] = $stdin.read end tarball_extract = Mixlib::ShellOut.new("tar xzf #{tarball} -C #{source_dir}", opts) tarball_extract.logger = Pkgr.logger tarball_extract.run_command tarball_extract.error! end
ruby
{ "resource": "" }
q17364
Pkgr.Builder.update_config
train
def update_config if File.exist?(config_file) Pkgr.debug "Loading #{distribution.slug} from #{config_file}." @config = Config.load_file(config_file, distribution.slug).merge(config) Pkgr.debug "Found .pkgr.yml file. Updated config is now: #{config.inspect}" # update distribution config distribution.config = @config # FIXME: make Config the authoritative source of the runner config (distribution only tells the default runner) if @config.runner type, *version = @config.runner.split("-") distribution.runner = Distributions::Runner.new(type, version.join("-")) end end config.distribution = distribution config.env.variables.push("TARGET=#{distribution.target}") # useful for templates that need to read files config.source_dir = source_dir config.build_dir = build_dir end
ruby
{ "resource": "" }
q17365
Pkgr.Builder.check
train
def check raise Errors::ConfigurationInvalid, config.errors.join("; ") unless config.valid? distribution.check end
ruby
{ "resource": "" }
q17366
Pkgr.Builder.setup
train
def setup Dir.chdir(build_dir) do distribution.templates.each do |template| template.install(config.sesame) end end end
ruby
{ "resource": "" }
q17367
Pkgr.Builder.compile
train
def compile begin FileUtils.mkdir_p(app_home_dir) rescue Errno::EACCES => e Pkgr.logger.warn "Can't create #{app_home_dir.inspect}, which may be needed by some buildpacks." end FileUtils.mkdir_p(compile_cache_dir) FileUtils.mkdir_p(compile_env_dir) if buildpacks_for_app.size > 0 run_hook config.before_hook buildpacks_for_app.each do |buildpack| puts "-----> #{buildpack.banner} app" buildpack.compile(source_dir, compile_cache_dir, compile_env_dir) buildpack.release(source_dir) end run_hook config.after_hook else raise Errors::UnknownAppType, "Can't find a buildpack for your app" end end
ruby
{ "resource": "" }
q17368
Pkgr.Builder.write_init
train
def write_init FileUtils.mkdir_p scaling_dir Dir.chdir(scaling_dir) do distribution.initializers_for(config.name, procfile_entries).each do |(process, file)| process_config = config.dup process_config.process_name = process.name process_config.process_command = process.command file.install(process_config.sesame) end end end
ruby
{ "resource": "" }
q17369
Pkgr.Builder.setup_crons
train
def setup_crons crons_dir = File.join("/", distribution.crons_dir) config.crons.map! do |cron_path| Cron.new(File.expand_path(cron_path, config.home), File.join(crons_dir, File.basename(cron_path))) end config.crons.each do |cron| puts "-----> [cron] #{cron.source} => #{cron.destination}" end end
ruby
{ "resource": "" }
q17370
Pkgr.Builder.package
train
def package(remaining_attempts = 3) app_package = Mixlib::ShellOut.new(fpm_command) app_package.logger = Pkgr.logger app_package.run_command app_package.error! begin verify rescue Mixlib::ShellOut::ShellCommandFailed => e if remaining_attempts > 0 package(remaining_attempts - 1) else raise end end end
ruby
{ "resource": "" }
q17371
Pkgr.Builder.buildpacks_for_app
train
def buildpacks_for_app raise "#{source_dir} does not exist" unless File.directory?(source_dir) @buildpacks_for_app ||= begin mode, buildpacks = distribution.buildpacks case mode when :custom buildpacks.find_all do |buildpack| buildpack.setup(config.edge, config.home) buildpack.detect(source_dir) end else [buildpacks.find do |buildpack| buildpack.setup(config.edge, config.home) buildpack.detect(source_dir) end].compact end end end
ruby
{ "resource": "" }
q17372
DropboxApi.OptionsValidator.validate_options
train
def validate_options(valid_option_keys, options) options.keys.each do |key| unless valid_option_keys.include? key.to_sym raise ArgumentError, "Invalid option `#{key}`" end end end
ruby
{ "resource": "" }
q17373
DropboxApi::Metadata.Base.to_hash
train
def to_hash Hash[self.class.fields.keys.map do |field_name| [field_name.to_s, serialized_field(field_name)] end.select { |k, v| !v.nil? }] end
ruby
{ "resource": "" }
q17374
GoogleIDToken.Validator.check
train
def check(token, aud, cid = nil) synchronize do payload = check_cached_certs(token, aud, cid) unless payload # no certs worked, might've expired, refresh if refresh_certs payload = check_cached_certs(token, aud, cid) unless payload raise SignatureError, 'Token not verified as issued by Google' end else raise CertificateError, 'Unable to retrieve Google public keys' end end payload end end
ruby
{ "resource": "" }
q17375
GoogleIDToken.Validator.check_cached_certs
train
def check_cached_certs(token, aud, cid) payload = nil # find first public key that validates this token @certs.detect do |key, cert| begin public_key = cert.public_key decoded_token = JWT.decode(token, public_key, !!public_key, { :algorithm => 'RS256' }) payload = decoded_token.first # in Feb 2013, the 'cid' claim became the 'azp' claim per changes # in the OIDC draft. At some future point we can go all-azp, but # this should keep everything running for a while if payload['azp'] payload['cid'] = payload['azp'] elsif payload['cid'] payload['azp'] = payload['cid'] end payload rescue JWT::ExpiredSignature raise ExpiredTokenError, 'Token signature is expired' rescue JWT::DecodeError nil # go on, try the next cert end end if payload if !(payload.has_key?('aud') && payload['aud'] == aud) raise AudienceMismatchError, 'Token audience mismatch' end if cid && payload['cid'] != cid raise ClientIDMismatchError, 'Token client-id mismatch' end if !GOOGLE_ISSUERS.include?(payload['iss']) raise InvalidIssuerError, 'Token issuer mismatch' end payload else nil end end
ruby
{ "resource": "" }
q17376
Gimli.Wkhtmltopdf.output_pdf
train
def output_pdf(html, filename) args = command(filename) invoke = args.join(' ') IO.popen(invoke, "wb+") do |pdf| pdf.puts(html) pdf.close_write pdf.gets(nil) end end
ruby
{ "resource": "" }
q17377
Gimli.Converter.convert!
train
def convert! merged_contents = [] @files.each do |file| markup = Markup::Renderer.new file, @config.remove_front_matter html = convert_image_urls markup.render, file.filename if @config.merge html = "<div class=\"page-break\"></div>#{html}" unless merged_contents.empty? merged_contents << html else output_pdf(html, file) end puts html if @config.debug end unless merged_contents.empty? html = merged_contents.join output_pdf(html, nil) end end
ruby
{ "resource": "" }
q17378
Gimli.Converter.convert_image_urls
train
def convert_image_urls(html, filename) dir_string = ::File.dirname(::File.expand_path(filename)) html.scan(/<img[^>]+src="([^"]+)"/).each do |url| html.gsub!(url[0], ::File.expand_path(url[0], dir_string)) unless url[0] =~ /^https?/ end html end
ruby
{ "resource": "" }
q17379
Gimli.Converter.output_pdf
train
def output_pdf(html, filename) html = add_head html load_stylesheets generate_cover! append_stylesheets html puts @wkhtmltopdf.command(output_file(filename)).join(' ') if @config.debug @wkhtmltopdf.output_pdf html, output_file(filename) end
ruby
{ "resource": "" }
q17380
Gimli.Converter.load_stylesheets
train
def load_stylesheets # Load standard stylesheet style = ::File.expand_path("../../../config/style.css", __FILE__) @stylesheets << style @stylesheets << stylesheet if ::File.exists?(stylesheet) end
ruby
{ "resource": "" }
q17381
Gimli.Converter.output_file
train
def output_file(file = nil) if file output_filename = file.name if !@config.output_filename.nil? && @files.length == 1 output_filename = @config.output_filename end else output_filename = Time.now.to_s.split(' ').join('_') output_filename = @files.last.name if @files.length == 1 || @config.merge output_filename = @config.output_filename unless @config.output_filename.nil? end ::File.join(output_dir, "#{output_filename}.pdf") end
ruby
{ "resource": "" }
q17382
Gimli.Converter.generate_cover!
train
def generate_cover! return unless @config.cover cover_file = MarkupFile.new @config.cover markup = Markup::Renderer.new cover_file html = "<div class=\"cover\">\n#{markup.render}\n</div>" append_stylesheets(html) html = add_head(html) @coverfile.write(html) @coverfile.close end
ruby
{ "resource": "" }
q17383
Google.Calendar.find_events_in_range
train
def find_events_in_range(start_min, start_max, options = {}) formatted_start_min = encode_time(start_min) formatted_start_max = encode_time(start_max) query = "?timeMin=#{formatted_start_min}&timeMax=#{formatted_start_max}#{parse_options(options)}" event_lookup(query) end
ruby
{ "resource": "" }
q17384
Google.Calendar.find_events_by_extended_properties
train
def find_events_by_extended_properties(extended_properties, options = {}) query = "?" + parse_extended_properties(extended_properties) + parse_options(options) event_lookup(query) end
ruby
{ "resource": "" }
q17385
Google.Calendar.find_events_by_extended_properties_in_range
train
def find_events_by_extended_properties_in_range(extended_properties, start_min, start_max, options = {}) formatted_start_min = encode_time(start_min) formatted_start_max = encode_time(start_max) base_query = parse_extended_properties(extended_properties) + parse_options(options) query = "?" + base_query + (base_query.empty? ? '' : '&') + "timeMin=#{formatted_start_min}&timeMax=#{formatted_start_max}" event_lookup(query) end
ruby
{ "resource": "" }
q17386
Google.Calendar.find_or_create_event_by_id
train
def find_or_create_event_by_id(id, &blk) event = id ? find_event_by_id(id)[0] : nil if event setup_event(event, &blk) elsif id event = Event.new(id: id, new_event_with_id_specified: true) setup_event(event, &blk) else event = Event.new setup_event(event, &blk) end end
ruby
{ "resource": "" }
q17387
Google.Calendar.save_event
train
def save_event(event) method = event.new_event? ? :post : :put body = event.use_quickadd? ? nil : event.to_json notifications = "sendNotifications=#{event.send_notifications?}" query_string = if event.use_quickadd? "/quickAdd?#{notifications}&text=#{event.title}" elsif event.new_event? "?#{notifications}" else # update existing event. "/#{event.id}?#{notifications}" end send_events_request(query_string, method, body) end
ruby
{ "resource": "" }
q17388
Google.Calendar.parse_options
train
def parse_options(options) # :nodoc options[:max_results] ||= 25 options[:order_by] ||= 'startTime' # other option is 'updated' options[:expand_recurring_events] ||= true query_string = "&orderBy=#{options[:order_by]}" query_string << "&maxResults=#{options[:max_results]}" query_string << "&singleEvents=#{options[:expand_recurring_events]}" query_string << "&q=#{options[:query]}" unless options[:query].nil? query_string end
ruby
{ "resource": "" }
q17389
Google.Calendar.parse_extended_properties
train
def parse_extended_properties(extended_properties) # :nodoc query_parts = [] ['shared', 'private'].each do |prop_type| next unless extended_properties[prop_type] query_parts << extended_properties[prop_type].map {|key, value| (prop_type == "shared" ? "sharedExtendedProperty=" : "privateExtendedProperty=") + "#{key}%3D#{value}" }.join("&") end query_parts.join('&') end
ruby
{ "resource": "" }
q17390
Google.Calendar.event_lookup
train
def event_lookup(query_string = '') #:nodoc: begin response = send_events_request(query_string, :get) parsed_json = JSON.parse(response.body) @summary = parsed_json['summary'] events = Event.build_from_google_feed(parsed_json, self) || [] return events if events.empty? events.length > 1 ? events : [events[0]] rescue Google::HTTPNotFound return [] end end
ruby
{ "resource": "" }
q17391
Google.Freebusy.json_for_query
train
def json_for_query(calendar_ids, start_time, end_time) {}.tap{ |obj| obj[:items] = calendar_ids.map {|id| Hash[:id, id] } obj[:timeMin] = start_time.utc.iso8601 obj[:timeMax] = end_time.utc.iso8601 }.to_json end
ruby
{ "resource": "" }
q17392
Google.Connection.send
train
def send(path, method, content = '') uri = BASE_URI + path response = @client.fetch_protected_resource( :uri => uri, :method => method, :body => content, :headers => {'Content-type' => 'application/json'} ) check_for_errors(response) return response end
ruby
{ "resource": "" }
q17393
Google.Connection.parse_403_error
train
def parse_403_error(response) case JSON.parse(response.body)["error"]["message"] when "Forbidden" then raise ForbiddenError, response.body when "Daily Limit Exceeded" then raise DailyLimitExceededError, response.body when "User Rate Limit Exceeded" then raise UserRateLimitExceededError, response.body when "Rate Limit Exceeded" then raise RateLimitExceededError, response.body when "Calendar usage limits exceeded." then raise CalendarUsageLimitExceededError, response.body else raise ForbiddenError, response.body end end
ruby
{ "resource": "" }
q17394
Google.Event.all_day?
train
def all_day? time = (@start_time.is_a? String) ? Time.parse(@start_time) : @start_time.dup.utc duration % (24 * 60 * 60) == 0 && time == Time.local(time.year,time.month,time.day) end
ruby
{ "resource": "" }
q17395
Google.Event.to_json
train
def to_json attributes = { "summary" => title, "visibility" => visibility, "transparency" => transparency, "description" => description, "location" => location, "start" => time_or_all_day(start_time), "end" => time_or_all_day(end_time), "reminders" => reminders_attributes, "guestsCanInviteOthers" => guests_can_invite_others, "guestsCanSeeOtherGuests" => guests_can_see_other_guests } if id attributes["id"] = id end if timezone_needed? attributes['start'].merge!(local_timezone_attributes) attributes['end'].merge!(local_timezone_attributes) end attributes.merge!(recurrence_attributes) attributes.merge!(color_attributes) attributes.merge!(attendees_attributes) attributes.merge!(extended_properties_attributes) JSON.generate attributes end
ruby
{ "resource": "" }
q17396
Google.Event.attendees_attributes
train
def attendees_attributes return {} unless @attendees attendees = @attendees.map do |attendee| attendee.select { |k,_v| ['displayName', 'email', 'responseStatus'].include?(k) } end { "attendees" => attendees } end
ruby
{ "resource": "" }
q17397
Google.Event.local_timezone_attributes
train
def local_timezone_attributes tz = Time.now.getlocal.zone tz_name = TimezoneParser::getTimezones(tz).last { "timeZone" => tz_name } end
ruby
{ "resource": "" }
q17398
Google.Event.recurrence_attributes
train
def recurrence_attributes return {} unless is_recurring_event? @recurrence[:until] = @recurrence[:until].strftime('%Y%m%dT%H%M%SZ') if @recurrence[:until] rrule = "RRULE:" + @recurrence.collect { |k,v| "#{k}=#{v}" }.join(';').upcase @recurrence[:until] = Time.parse(@recurrence[:until]) if @recurrence[:until] { "recurrence" => [rrule] } end
ruby
{ "resource": "" }
q17399
Google.CalendarList.fetch_entries
train
def fetch_entries response = @connection.send("/users/me/calendarList", :get) return nil if response.status != 200 || response.body.empty? CalendarListEntry.build_from_google_feed(JSON.parse(response.body), @connection) end
ruby
{ "resource": "" }