_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q22100
Moped.Node.refresh
train
def refresh if address.resolve(self) begin @refreshed_at = Time.now configure(command("admin", ismaster: 1)) if !primary? && executing?(:ensure_primary) raise Errors::ReplicaSetReconfigured.new("#{inspect} is no longer the primary node.", {}) elsif !messagable? # not primary or secondary so mark it as down, since it's probably # a recovering node withing the replica set down! end rescue Timeout::Error down! end end end
ruby
{ "resource": "" }
q22101
Moped.Node.remove
train
def remove(database, collection, selector, concern, options = {}) write(Protocol::Delete.new(database, collection, selector, options), concern) end
ruby
{ "resource": "" }
q22102
Moped.Node.update
train
def update(database, collection, selector, change, concern, options = {}) write(Protocol::Update.new(database, collection, selector, change, options), concern) end
ruby
{ "resource": "" }
q22103
Moped.Node.connect
train
def connect(conn) start = Time.now conn.connect @latency = Time.now - start @down_at = nil true end
ruby
{ "resource": "" }
q22104
Moped.Node.discover
train
def discover(*nodes) nodes.flatten.compact.each do |peer| node = Node.new(peer, options) node.credentials.merge!(@credentials) peers.push(node) unless peers.include?(node) end end
ruby
{ "resource": "" }
q22105
Moped.Node.flush
train
def flush(ops = queue) operations, callbacks = ops.transpose logging(operations) do ensure_connected do |conn| conn.write(operations) replies = conn.receive_replies(operations) replies.zip(callbacks).map do |reply, callback| callback ? callback[reply] : reply end.last end end ensure ops.clear end
ruby
{ "resource": "" }
q22106
Support.ReplicaSetSimulator.start
train
def start @nodes.each(&:start) @worker = Thread.start do Thread.abort_on_exception = true catch(:shutdown) do loop do Moped.logger.debug "replica_set: waiting for next client" server, client = @manager.next_client if server Moped.logger.debug "replica_set: proxying incoming request to mongo" server.proxy(client, @mongo) else Moped.logger.debug "replica_set: no requests; passing" Thread.pass end end end end end
ruby
{ "resource": "" }
q22107
Support.ReplicaSetSimulator.initiate
train
def initiate primary, *secondaries = @nodes.shuffle primary.promote secondaries.each(&:demote) return primary, secondaries end
ruby
{ "resource": "" }
q22108
Moped.Cluster.nodes
train
def nodes # Find the nodes that were down but are ready to be refreshed, or those # with stale connection information. needs_refresh, available = seeds.partition do |node| refreshable?(node) end # Refresh those nodes. available.concat(refresh(needs_refresh)) # Now return all the nodes that are available and participating in the # replica set. available.reject{ |node| node.down? } end
ruby
{ "resource": "" }
q22109
Moped.Cluster.refresh
train
def refresh(nodes_to_refresh = seeds) refreshed_nodes = [] seen = {} # Set up a recursive lambda function for refreshing a node and it's peers. refresh_node = ->(node) do unless node.address.resolved begin node.refresh rescue Errors::ConnectionFailure end end unless seen[node] || !node.address.resolved seen[node] = true # Add the node to the global list of known nodes. seeds.push(node) unless seeds.include?(node) begin node.refresh # This node is good, so add it to the list of nodes to return. refreshed_nodes.push(node) unless refreshed_nodes.include?(node) # Now refresh any newly discovered peer nodes - this will also # remove nodes that are not included in the peer list. refresh_peers(node, &refresh_node) rescue Errors::ConnectionFailure # We couldn't connect to the node. end end end nodes_to_refresh.each(&refresh_node) refreshed_nodes end
ruby
{ "resource": "" }
q22110
Moped.Cluster.with_primary
train
def with_primary(&block) if node = nodes.find(&:primary?) begin node.ensure_primary do return yield(node) end rescue Errors::ConnectionFailure, Errors::ReplicaSetReconfigured end end raise Errors::ConnectionFailure, "Could not connect to a primary node for replica set #{inspect}" end
ruby
{ "resource": "" }
q22111
Moped.Cluster.with_secondary
train
def with_secondary(&block) available_nodes = available_secondary_nodes while node = available_nodes.shift begin return yield(node) rescue Errors::ConnectionFailure, Errors::ReplicaSetReconfigured => e next end end raise Errors::ConnectionFailure, "Could not connect to a secondary node for replica set #{inspect}" end
ruby
{ "resource": "" }
q22112
Moped.Cluster.refreshable?
train
def refreshable?(node) return false if node.arbiter? node.down? ? node.down_at < down_boundary : node.needs_refresh?(refresh_boundary) end
ruby
{ "resource": "" }
q22113
Moped.Cluster.refresh_peers
train
def refresh_peers(node, &block) node.peers.each do |node| if node.address.resolved block.call(node) unless seeds.include?(node) peers.push(node) unless peers.include?(node) end end end
ruby
{ "resource": "" }
q22114
Moped.Cursor.each
train
def each documents = load_docs documents.each { |doc| yield doc } while more? return kill if limited? && @limit <= 0 documents = get_more documents.each { |doc| yield doc } end end
ruby
{ "resource": "" }
q22115
Moped.Cursor.get_more
train
def get_more with_retry(session.cluster) do reply = @node.get_more @database, @collection, @cursor_id, request_limit @limit -= reply.count if limited? @cursor_id = reply.cursor_id reply.documents end end
ruby
{ "resource": "" }
q22116
Moped.Cursor.load_docs
train
def load_docs @options[:flags] |= [:no_cursor_timeout] if @options[:no_timeout] options = @options.clone options[:limit] = request_limit reply, @node = read_preference.with_node(session.cluster) do |node| [ node.query(@database, @collection, @selector, query_options(options)), node ] end @limit -= reply.count if limited? @cursor_id = reply.cursor_id reply.documents end
ruby
{ "resource": "" }
q22117
Moped.Uri.options
train
def options options_string, options = match[10], { database: database } unless options_string.nil? options_string.split(/\&/).each do |option_string| key, value = option_string.split(/=/) if WRITE_OPTIONS.include?(key) options[:write] = { key.to_sym => cast(value) } elsif read = READ_MAPPINGS[value.downcase] options[:read] = read else options[key.to_sym] = cast(value) end end end options end
ruby
{ "resource": "" }
q22118
Moped.Uri.to_hash
train
def to_hash config = { database: database, hosts: hosts } if username && password config.merge!(username: username, password: password) end config end
ruby
{ "resource": "" }
q22119
Moped.Database.command
train
def command(command) read_preference.with_node(cluster) do |node| node.command(name, command, query_options({})) end end
ruby
{ "resource": "" }
q22120
Moped.Indexes.create
train
def create(key, options = {}) spec = options.merge(ns: namespace, key: key) spec[:name] ||= key.to_a.join("_") database.session.with(write: { w: 1 }) do |_s| _s[:"system.indexes"].insert(spec) end end
ruby
{ "resource": "" }
q22121
Moped.Indexes.drop
train
def drop(key = nil) if key index = self[key] or return false name = index["name"] else name = "*" end result = database.command deleteIndexes: collection_name, index: name result["ok"] == 1 end
ruby
{ "resource": "" }
q22122
Moped.Session.new
train
def new(options = {}) session = with(options) session.instance_variable_set(:@cluster, cluster.dup) if block_given? yield(session) else session end end
ruby
{ "resource": "" }
q22123
Moped.Session.with
train
def with(options = {}) session = dup session.options.update(options) if block_given? yield(session) else session end end
ruby
{ "resource": "" }
q22124
Moped.Address.resolve
train
def resolve(node) return @resolved if @resolved start = Time.now retries = 0 begin # This timeout should be very large since Timeout::timeout plays very badly with multithreaded code # TODO: Remove this Timeout entirely Timeout::timeout(@timeout * 10) do Resolv.each_address(host) do |ip| if ip =~ Resolv::IPv4::Regex @ip ||= ip break end end raise Resolv::ResolvError unless @ip end @resolved = "#{ip}:#{port}" rescue Timeout::Error, Resolv::ResolvError, SocketError => e msg = [" MOPED:", "Could not resolve IP for: #{original}, delta is #{Time.now - start}, error class is #{e.inspect}, retries is #{retries}. Node is #{node.inspect}", "n/a"] if retries == 0 Loggable.info(*msg) else Loggable.warn(*msg) end if retries < 2 retries += 1 retry else node.down! and false end end end
ruby
{ "resource": "" }
q22125
Moped.Authenticatable.apply_credentials
train
def apply_credentials(logins) unless credentials == logins logouts = credentials.keys - logins.keys logouts.each do |database| logout(database) end logins.each do |database, (username, password)| unless credentials[database] == [ username, password ] login(database, username, password) end end @original_credentials = credentials.dup end self end
ruby
{ "resource": "" }
q22126
Moped.Authenticatable.login
train
def login(database, username, password) getnonce = Protocol::Command.new(database, getnonce: 1) self.write([getnonce]) reply = self.receive_replies([getnonce]).first if getnonce.failure?(reply) return end result = getnonce.results(reply) authenticate = Protocol::Commands::Authenticate.new(database, username, password, result["nonce"]) self.write([ authenticate ]) document = self.read.documents.first unless result["ok"] == 1 # See if we had connectivity issues so we can retry e = Errors::PotentialReconfiguration.new(authenticate, document) if e.reconfiguring_replica_set? raise Errors::ReplicaSetReconfigured.new(e.command, e.details) elsif e.connection_failure? raise Errors::ConnectionFailure.new(e.inspect) end raise Errors::AuthenticationFailure.new(authenticate, document) end credentials[database] = [username, password] end
ruby
{ "resource": "" }
q22127
Moped.Authenticatable.logout
train
def logout(database) command = Protocol::Command.new(database, logout: 1) self.write([command]) reply = self.receive_replies([command]).first if command.failure?(reply) return end credentials.delete(database) end
ruby
{ "resource": "" }
q22128
Moped.Retryable.with_retry
train
def with_retry(cluster, retries = cluster.max_retries, &block) begin block.call rescue Errors::ConnectionFailure, Errors::PotentialReconfiguration => e raise e if e.is_a?(Errors::PotentialReconfiguration) && ! (e.message.include?("not master") || e.message.include?("Not primary")) if retries > 0 Loggable.warn(" MOPED:", "Retrying connection attempt #{retries} more time(s), nodes is #{cluster.nodes.inspect}, seeds are #{cluster.seeds.inspect}, cluster is #{cluster.inspect}. Error backtrace is #{e.backtrace}.", "n/a") sleep(cluster.retry_interval) cluster.refresh with_retry(cluster, retries - 1, &block) else raise e end end end
ruby
{ "resource": "" }
q22129
Moped.Collection.drop
train
def drop begin session.with(read: :primary).command(drop: name) rescue Moped::Errors::OperationFailure => e raise e unless e.ns_not_found? false end end
ruby
{ "resource": "" }
q22130
Moped.Collection.rename
train
def rename(to_name) begin session. with(database: "admin", read: :primary). command(renameCollection: "#{database.name}.#{name}", to: "#{database.name}.#{to_name}") rescue Moped::Errors::OperationFailure => e raise e unless e.ns_not_exists? false end end
ruby
{ "resource": "" }
q22131
Moped.Collection.insert
train
def insert(documents, flags = nil) with_retry(cluster) do docs = documents.is_a?(Array) ? documents : [ documents ] cluster.with_primary do |node| node.insert(database.name, name, docs, write_concern, flags: flags || []) end end end
ruby
{ "resource": "" }
q22132
Moped.Query.count
train
def count(limit = false) command = { count: collection.name, query: selector } command.merge!(skip: operation.skip, limit: operation.limit) if limit result = collection.database.command(command) result["n"].to_i end
ruby
{ "resource": "" }
q22133
Moped.Query.distinct
train
def distinct(key) result = collection.database.command( distinct: collection.name, key: key.to_s, query: selector ) result["values"] end
ruby
{ "resource": "" }
q22134
Moped.Query.explain
train
def explain explanation = operation.selector.dup hint = explanation["$hint"] sort = explanation["$orderby"] max_scan = explanation["$maxScan"] explanation = { "$query" => selector, "$explain" => true, } explanation["$orderby"] = sort if sort explanation["$hint"] = hint if hint explanation["$maxScan"] = max_scan if max_scan Query.new(collection, explanation).limit(-(operation.limit.abs)).each { |doc| return doc } end
ruby
{ "resource": "" }
q22135
Moped.Query.first
train
def first reply = read_preference.with_node(cluster) do |node| node.query( operation.database, operation.collection, operation.selector, query_options( fields: operation.fields, flags: operation.flags, skip: operation.skip, limit: -1 ) ) end reply.documents.first end
ruby
{ "resource": "" }
q22136
Moped.Query.remove
train
def remove with_retry(cluster) do cluster.with_primary do |node| node.remove( operation.database, operation.collection, operation.basic_selector, write_concern, flags: [ :remove_first ] ) end end end
ruby
{ "resource": "" }
q22137
Moped.Query.update
train
def update(change, flags = nil) with_retry(cluster) do cluster.with_primary do |node| node.update( operation.database, operation.collection, operation.selector["$query"] || operation.selector, change, write_concern, flags: flags ) end end end
ruby
{ "resource": "" }
q22138
Moped.WriteConcern.get
train
def get(value) propagate = value[:w] || value["w"] if propagate == 0 || propagate == -1 Unverified.new else Propagate.new(value) end end
ruby
{ "resource": "" }
q22139
ADB.Instrumentation.instrument
train
def instrument(runner, args = {}) with(the(args) << using_the(runner)) raise ADBError, last_stdout unless last_stdout.empty? end
ruby
{ "resource": "" }
q22140
ScopedSearch.QueryBuilder.build_find_params
train
def build_find_params(options) keyconditions = [] keyparameters = [] parameters = [] includes = [] joins = [] # Build SQL WHERE clause using the AST sql = @ast.to_sql(self, definition) do |notification, value| # Handle the notifications encountered during the SQL generation: # Store the parameters, includes, etc so that they can be added to # the find-hash later on. case notification when :keycondition then keyconditions << value when :keyparameter then keyparameters << value when :parameter then parameters << value when :include then includes << value when :joins then joins << value else raise ScopedSearch::QueryNotSupported, "Cannot handle #{notification.inspect}: #{value.inspect}" end end # Build SQL ORDER BY clause order = order_by(options[:order]) do |notification, value| case notification when :parameter then parameters << value when :include then includes << value when :joins then joins << value else raise ScopedSearch::QueryNotSupported, "Cannot handle #{notification.inspect}: #{value.inspect}" end end sql = (keyconditions + (sql.blank? ? [] : [sql]) ).map {|c| "(#{c})"}.join(" AND ") # Build hash for ActiveRecord::Base#find for the named scope find_attributes = {} find_attributes[:conditions] = [sql] + keyparameters + parameters unless sql.blank? find_attributes[:include] = includes.uniq unless includes.empty? find_attributes[:joins] = joins.uniq unless joins.empty? find_attributes[:order] = order unless order.nil? # p find_attributes # Uncomment for debugging return find_attributes end
ruby
{ "resource": "" }
q22141
ScopedSearch.QueryBuilder.sql_operator
train
def sql_operator(operator, field) raise ScopedSearch::QueryNotSupported, "the operator '#{operator}' is not supported for field type '#{field.type}'" if !field.virtual? and [:like, :unlike].include?(operator) and !field.textual? SQL_OPERATORS[operator] end
ruby
{ "resource": "" }
q22142
ScopedSearch.QueryBuilder.translate_value
train
def translate_value(field, value) translated_value = field.complete_value[value.to_sym] raise ScopedSearch::QueryNotSupported, "'#{field.field}' should be one of '#{field.complete_value.keys.join(', ')}', but the query was '#{value}'" if translated_value.nil? translated_value end
ruby
{ "resource": "" }
q22143
ScopedSearch.RailsHelper.sort
train
def sort(field, as: nil, default: "ASC", html_options: {}, url_options: params) unless as id = field.to_s.downcase == "id" as = id ? field.to_s.upcase : field.to_s.humanize end ascend = "#{field} ASC" descend = "#{field} DESC" selected_sort = [ascend, descend].find { |o| o == params[:order] } case params[:order] when ascend new_sort = descend when descend new_sort = ascend else new_sort = ["ASC", "DESC"].include?(default) ? "#{field} #{default}" : ascend end unless selected_sort.nil? css_classes = html_options[:class] ? html_options[:class].split(" ") : [] if selected_sort == ascend as = "&#9650;&nbsp;".html_safe + as css_classes << "ascending" else as = "&#9660;&nbsp;".html_safe + as css_classes << "descending" end html_options[:class] = css_classes.join(" ") end url_options = url_options.to_h if url_options.respond_to?(:permit) # convert ActionController::Parameters if given url_options = url_options.merge(:order => new_sort) as = raw(as) if defined?(RailsXss) content_tag(:a, as, html_options.merge(href: url_for(url_options))) end
ruby
{ "resource": "" }
q22144
ScopedSearch.AutoCompleteBuilder.build_autocomplete_options
train
def build_autocomplete_options # First parse to find illegal syntax in the existing query, # this method will throw exception on bad syntax. is_query_valid # get the completion options node = last_node completion = complete_options(node) suggestions = [] suggestions += complete_keyword if completion.include?(:keyword) suggestions += LOGICAL_INFIX_OPERATORS if completion.include?(:logical_op) suggestions += LOGICAL_PREFIX_OPERATORS + NULL_PREFIX_COMPLETER if completion.include?(:prefix_op) suggestions += complete_operator(node) if completion.include?(:infix_op) suggestions += complete_value if completion.include?(:value) build_suggestions(suggestions, completion.include?(:value)) end
ruby
{ "resource": "" }
q22145
ScopedSearch.AutoCompleteBuilder.complete_options
train
def complete_options(node) return [:keyword] + [:prefix_op] if tokens.empty? #prefix operator return [:keyword] if last_token_is(PREFIX_OPERATORS) # left hand if is_left_hand(node) if (tokens.size == 1 || last_token_is(PREFIX_OPERATORS + LOGICAL_INFIX_OPERATORS) || last_token_is(PREFIX_OPERATORS + LOGICAL_INFIX_OPERATORS, 2)) options = [:keyword] options += [:prefix_op] unless last_token_is(PREFIX_OPERATORS) else options = [:logical_op] end return options end if is_right_hand # right hand return [:value] else # comparison operator completer return [:infix_op] end end
ruby
{ "resource": "" }
q22146
ScopedSearch.AutoCompleteBuilder.complete_keyword
train
def complete_keyword keywords = [] definition.fields.each do|f| next unless f[1].complete_enabled if (f[1].key_field) keywords += complete_key(f[0], f[1], tokens.last) else keywords << f[0].to_s + ' ' end end keywords.sort end
ruby
{ "resource": "" }
q22147
ScopedSearch.AutoCompleteBuilder.complete_key
train
def complete_key(name, field, val) return ["#{name}."] if !val || !val.is_a?(String) || !(val.include?('.')) val = val.sub(/.*\./,'') connection = definition.klass.connection quoted_table = field.key_klass.connection.quote_table_name(field.key_klass.table_name) quoted_field = field.key_klass.connection.quote_column_name(field.key_field) field_name = "#{quoted_table}.#{quoted_field}" field.key_klass .where(value_conditions(field_name, val)) .select(field_name) .limit(20) .distinct .map(&field.key_field) .compact .map { |f| "#{name}.#{f} " } end
ruby
{ "resource": "" }
q22148
ScopedSearch.AutoCompleteBuilder.complete_date_value
train
def complete_date_value options = [] options << '"30 minutes ago"' options << '"1 hour ago"' options << '"2 hours ago"' options << 'Today' options << 'Yesterday' options << 'Tomorrow' options << 2.days.ago.strftime('%A') options << 3.days.ago.strftime('%A') options << 4.days.ago.strftime('%A') options << 5.days.ago.strftime('%A') options << '"6 days ago"' options << 7.days.ago.strftime('"%b %d,%Y"') options << '"2 weeks from now"' options end
ruby
{ "resource": "" }
q22149
ScopedSearch.AutoCompleteBuilder.complete_key_value
train
def complete_key_value(field, token, val) key_name = token.sub(/^.*\./,"") key_klass = field.key_klass.where(field.key_field => key_name).first raise ScopedSearch::QueryNotSupported, "Field '#{key_name}' not recognized for searching!" if key_klass.nil? query = completer_scope(field) if field.key_klass != field.klass key = field.key_klass.to_s.gsub(/.*::/,'').underscore.to_sym fk = definition.reflection_by_name(field.klass, key).association_foreign_key.to_sym query = query.where(fk => key_klass.id) end query .where(value_conditions(field.quoted_field, val)) .select("DISTINCT #{field.quoted_field}") .limit(20) .map(&field.field) .compact .map { |v| v.to_s =~ /\s/ ? "\"#{v}\"" : v } end
ruby
{ "resource": "" }
q22150
ScopedSearch.AutoCompleteBuilder.complete_operator
train
def complete_operator(node) definition.operator_by_field_name(node.value).map { |o| o.end_with?(' ') ? o : "#{o} " } end
ruby
{ "resource": "" }
q22151
ScopedSearch.ClassMethods.scoped_search
train
def scoped_search(*definitions) self.scoped_search_definition ||= ScopedSearch::Definition.new(self) unless self.scoped_search_definition.klass == self # inheriting the parent self.scoped_search_definition = ScopedSearch::Definition.new(self) end definitions.each do |definition| if definition[:on].kind_of?(Array) definition[:on].each { |field| self.scoped_search_definition.define(definition.merge(:on => field)) } else self.scoped_search_definition.define(definition) end end return self.scoped_search_definition end
ruby
{ "resource": "" }
q22152
ScopedSearch.Definition.operator_by_field_name
train
def operator_by_field_name(name) field = field_by_name(name) return [] if field.nil? return field.operators if field.operators return ['=', '!=', '>', '<', '<=', '>=', '~', '!~', '^', '!^'] if field.virtual? return ['=', '!='] if field.set? || field.uuid? return ['=', '>', '<', '<=', '>=', '!=', '^', '!^'] if field.numerical? return ['=', '!=', '~', '!~', '^', '!^'] if field.textual? return ['=', '>', '<'] if field.temporal? raise ScopedSearch::QueryNotSupported, "Unsupported type '#{field.type.inspect}')' for field '#{name}'. This can be a result of a search definition problem." end
ruby
{ "resource": "" }
q22153
ScopedSearch.Definition.default_fields_for
train
def default_fields_for(value, operator = nil) column_types = [:virtual] column_types += [:string, :text] if [nil, :like, :unlike, :ne, :eq].include?(operator) column_types += [:double, :float, :decimal] if value =~ NUMERICAL_REGXP column_types += [:integer] if value =~ INTEGER_REGXP column_types += [:uuid] if value =~ UUID_REGXP column_types += [:datetime, :date, :timestamp] if (parse_temporal(value)) default_fields.select { |field| !field.set? && column_types.include?(field.type) } end
ruby
{ "resource": "" }
q22154
ScopedSearch.Definition.parse_temporal
train
def parse_temporal(value) return Date.current if value =~ /\btoday\b/i return 1.day.ago.to_date if value =~ /\byesterday\b/i return 1.day.from_now.to_date if value =~ /\btomorrow\b/i return (eval($1.strip.gsub(/\s+/,'.').downcase)).to_datetime if value =~ /\A\s*(\d+\s+\b(?:hours?|minutes?)\b\s+\bago)\b\s*\z/i return (eval($1.strip.gsub(/\s+/,'.').downcase)).to_date if value =~ /\A\s*(\d+\s+\b(?:days?|weeks?|months?|years?)\b\s+\bago)\b\s*\z/i return (eval($1.strip.gsub(/from\s+now/i,'from_now').gsub(/\s+/,'.').downcase)).to_datetime if value =~ /\A\s*(\d+\s+\b(?:hours?|minutes?)\b\s+\bfrom\s+now)\b\s*\z/i return (eval($1.strip.gsub(/from\s+now/i,'from_now').gsub(/\s+/,'.').downcase)).to_date if value =~ /\A\s*(\d+\s+\b(?:days?|weeks?|months?|years?)\b\s+\bfrom\s+now)\b\s*\z/i DateTime.parse(value, true) rescue nil end
ruby
{ "resource": "" }
q22155
ScopedSearch.Definition.reflection_by_name
train
def reflection_by_name(klass, name) return if name.nil? klass.reflections[name.to_sym] || klass.reflections[name.to_s] end
ruby
{ "resource": "" }
q22156
ScopedSearch.Definition.register_named_scope!
train
def register_named_scope! # :nodoc @klass.define_singleton_method(:search_for) do |query = '', options = {}| # klass may be different to @klass if the scope is called on a subclass klass = self definition = klass.scoped_search_definition search_scope = klass.all find_options = ScopedSearch::QueryBuilder.build_query(definition, query || '', options) search_scope = search_scope.where(find_options[:conditions]) if find_options[:conditions] search_scope = search_scope.includes(find_options[:include]) if find_options[:include] search_scope = search_scope.joins(find_options[:joins]) if find_options[:joins] search_scope = search_scope.reorder(find_options[:order]) if find_options[:order] search_scope = search_scope.references(find_options[:include]) if find_options[:include] search_scope end end
ruby
{ "resource": "" }
q22157
OData.Service.method_missing
train
def method_missing(name, *args) # Queries if @collections.include?(name.to_s) @query = build_collection_query_object(name,@additional_params, *args) return @query # Adds elsif name.to_s =~ /^AddTo(.*)/ type = $1 if @collections.include?(type) @save_operations << Operation.new("Add", $1, args[0]) else super end elsif @function_imports.include?(name.to_s) execute_import_function(name.to_s, args) else super end end
ruby
{ "resource": "" }
q22158
OData.Service.delete_object
train
def delete_object(obj) type = obj.class.to_s if obj.respond_to?(:__metadata) && !obj.send(:__metadata).nil? @save_operations << Operation.new("Delete", type, obj) else raise OData::NotSupportedError.new "You cannot delete a non-tracked entity" end end
ruby
{ "resource": "" }
q22159
OData.Service.respond_to?
train
def respond_to?(method) if @collections.include?(method.to_s) return true # Adds elsif method.to_s =~ /^AddTo(.*)/ type = $1 if @collections.include?(type) return true else super end # Function Imports elsif @function_imports.include?(method.to_s) return true else super end end
ruby
{ "resource": "" }
q22160
OData.Service.load_property
train
def load_property(obj, nav_prop) raise NotSupportedError, "You cannot load a property on an entity that isn't tracked" if obj.send(:__metadata).nil? raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless obj.respond_to?(nav_prop.to_sym) raise ArgumentError, "'#{nav_prop}' is not a valid navigation property" unless @class_metadata[obj.class.to_s][nav_prop].nav_prop results = OData::Resource.new(build_load_property_uri(obj, nav_prop), @rest_options).get prop_results = build_classes_from_result(results.body) obj.send "#{nav_prop}=", (singular?(nav_prop) ? prop_results.first : prop_results) end
ruby
{ "resource": "" }
q22161
OData.Service.add_link
train
def add_link(parent, nav_prop, child) raise NotSupportedError, "You cannot add a link on an entity that isn't tracked (#{parent.class})" if parent.send(:__metadata).nil? raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless parent.respond_to?(nav_prop.to_sym) raise ArgumentError, "'#{nav_prop}' is not a valid navigation property for #{parent.class}" unless @class_metadata[parent.class.to_s][nav_prop].nav_prop raise NotSupportedError, "You cannot add a link on a child entity that isn't tracked (#{child.class})" if child.send(:__metadata).nil? @save_operations << Operation.new("AddLink", nav_prop, parent, child) end
ruby
{ "resource": "" }
q22162
OData.Service.build_collection_query_object
train
def build_collection_query_object(name, additional_parameters, *args) root = "/#{name.to_s}" if args.empty? #nothing to add elsif args.size == 1 if args.first.to_s =~ /\d+/ id_metadata = find_id_metadata(name.to_s) root << build_id_path(args.first, id_metadata) else root << "(#{args.first})" end else root << "(#{args.join(',')})" end QueryBuilder.new(root, additional_parameters) end
ruby
{ "resource": "" }
q22163
OData.Service.build_collections_and_classes
train
def build_collections_and_classes @classes = Hash.new @class_metadata = Hash.new # This is used to store property information about a class # Build complex types first, these will be used for entities complex_types = @edmx.xpath("//edm:ComplexType", @ds_namespaces) || [] complex_types.each do |c| name = qualify_class_name(c['Name']) props = c.xpath(".//edm:Property", @ds_namespaces) methods = props.collect { |p| p['Name'] } # Standard Properties @classes[name] = ClassBuilder.new(name, methods, [], self, @namespace).build unless @classes.keys.include?(name) end entity_types = @edmx.xpath("//edm:EntityType", @ds_namespaces) entity_types.each do |e| next if e['Abstract'] == "true" klass_name = qualify_class_name(e['Name']) methods = collect_properties(klass_name, e, @edmx) nav_props = collect_navigation_properties(klass_name, e, @edmx) @classes[klass_name] = ClassBuilder.new(klass_name, methods, nav_props, self, @namespace).build unless @classes.keys.include?(klass_name) end # Fill in the collections instance variable collections = @edmx.xpath("//edm:EntityContainer/edm:EntitySet", @ds_namespaces) collections.each do |c| entity_type = c["EntityType"] @collections[c["Name"]] = { :edmx_type => entity_type, :type => convert_to_local_type(entity_type) } end build_function_imports end
ruby
{ "resource": "" }
q22164
OData.Service.build_function_imports
train
def build_function_imports # Fill in the function imports functions = @edmx.xpath("//edm:EntityContainer/edm:FunctionImport", @ds_namespaces) functions.each do |f| http_method_attribute = f.xpath("@m:HttpMethod", @ds_namespaces).first # HttpMethod is no longer required http://www.odata.org/2011/10/actions-in-odata/ is_side_effecting_attribute = f.xpath("@edm:IsSideEffecting", @ds_namespaces).first http_method = 'POST' # default to POST if http_method_attribute http_method = http_method_attribute.content elsif is_side_effecting_attribute is_side_effecting = is_side_effecting_attribute.content http_method = is_side_effecting ? 'POST' : 'GET' end return_type = f["ReturnType"] inner_return_type = nil unless return_type.nil? return_type = (return_type =~ /^Collection/) ? Array : convert_to_local_type(return_type) if f["ReturnType"] =~ /\((.*)\)/ inner_return_type = convert_to_local_type($~[1]) end end params = f.xpath("edm:Parameter", @ds_namespaces) parameters = nil if params.length > 0 parameters = {} params.each do |p| parameters[p["Name"]] = p["Type"] end end @function_imports[f["Name"]] = { :http_method => http_method, :return_type => return_type, :inner_return_type => inner_return_type, :parameters => parameters } end end
ruby
{ "resource": "" }
q22165
OData.Service.convert_to_local_type
train
def convert_to_local_type(edmx_type) return edm_to_ruby_type(edmx_type) if edmx_type =~ /^Edm/ klass_name = qualify_class_name(edmx_type.split('.').last) klass_name.camelize.constantize end
ruby
{ "resource": "" }
q22166
OData.Service.build_property_metadata
train
def build_property_metadata(props, keys=[]) metadata = {} props.each do |property_element| prop_meta = PropertyMetadata.new(property_element) prop_meta.is_key = keys.include?(prop_meta.name) # If this is a navigation property, we need to add the association to the property metadata prop_meta.association = Association.new(property_element, @edmx) if prop_meta.nav_prop metadata[prop_meta.name] = prop_meta end metadata end
ruby
{ "resource": "" }
q22167
OData.Service.handle_collection_result
train
def handle_collection_result(result) results = build_classes_from_result(result) while partial? && @options[:eager_partial] results.concat handle_partial end results end
ruby
{ "resource": "" }
q22168
OData.Service.handle_exception
train
def handle_exception(e) raise e unless defined?(e.response) && e.response != nil code = e.response[:status] error = Nokogiri::XML(e.response[:body]) message = if error.xpath("m:error/m:message", @ds_namespaces).first error.xpath("m:error/m:message", @ds_namespaces).first.content else "Server returned error but no message." end raise ServiceError.new(code), message end
ruby
{ "resource": "" }
q22169
OData.Service.collect_navigation_properties
train
def collect_navigation_properties(klass_name, element, doc) nav_props = element.xpath(".//edm:NavigationProperty", @ds_namespaces) @class_metadata[klass_name].merge!(build_property_metadata(nav_props)) nav_props.collect { |p| p['Name'] } end
ruby
{ "resource": "" }
q22170
OData.Service.build_classes_from_result
train
def build_classes_from_result(result) doc = Nokogiri::XML(result) is_links = doc.at_xpath("/ds:links", @ds_namespaces) return parse_link_results(doc) if is_links entries = doc.xpath("//atom:entry[not(ancestor::atom:entry)]", @ds_namespaces) extract_partial(doc) results = [] entries.each do |entry| results << entry_to_class(entry) end return results end
ruby
{ "resource": "" }
q22171
OData.Service.entry_to_class
train
def entry_to_class(entry) # Retrieve the class name from the fully qualified name (the last string after the last dot) klass_name = entry.xpath("./atom:category/@term", @ds_namespaces).to_s.split('.')[-1] # Is the category missing? See if there is a title that we can use to build the class if klass_name.nil? title = entry.xpath("./atom:title", @ds_namespaces).first return nil if title.nil? klass_name = title.content.to_s end return nil if klass_name.nil? properties = entry.xpath("./atom:content/m:properties/*", @ds_namespaces) klass = @classes[qualify_class_name(klass_name)].new # Fill metadata meta_id = entry.xpath("./atom:id", @ds_namespaces)[0].content klass.send :__metadata=, { :uri => meta_id } # Fill properties for prop in properties prop_name = prop.name klass.send "#{prop_name}=", parse_value_xml(prop) end # Fill properties represented outside of the properties collection @class_metadata[qualify_class_name(klass_name)].select { |k,v| v.fc_keep_in_content == false }.each do |k, meta| if meta.fc_target_path == "SyndicationTitle" title = entry.xpath("./atom:title", @ds_namespaces).first klass.send "#{meta.name}=", title.content elsif meta.fc_target_path == "SyndicationSummary" summary = entry.xpath("./atom:summary", @ds_namespaces).first klass.send "#{meta.name}=", summary.content end end inline_links = entry.xpath("./atom:link[m:inline]", @ds_namespaces) for link in inline_links # TODO: Use the metadata's associations to determine the multiplicity instead of this "hack" property_name = link.attributes['title'].to_s if singular?(property_name) inline_entry = link.xpath("./m:inline/atom:entry", @ds_namespaces).first inline_klass = build_inline_class(klass, inline_entry, property_name) klass.send "#{property_name}=", inline_klass else inline_classes, inline_entries = [], link.xpath("./m:inline/atom:feed/atom:entry", @ds_namespaces) for inline_entry in inline_entries # Build the class inline_klass = entry_to_class(inline_entry) # Add the property to the temp collection inline_classes << inline_klass end # Assign the array of classes to the property property_name = link.xpath("@title", @ds_namespaces) klass.send "#{property_name}=", inline_classes end end klass end
ruby
{ "resource": "" }
q22172
OData.Service.extract_partial
train
def extract_partial(doc) next_links = doc.xpath('//atom:link[@rel="next"]', @ds_namespaces) @has_partial = next_links.any? if @has_partial uri = Addressable::URI.parse(next_links[0]['href']) uri.query_values = uri.query_values.merge @additional_params unless @additional_params.empty? @next_uri = uri.to_s end end
ruby
{ "resource": "" }
q22173
OData.Service.parse_link_results
train
def parse_link_results(doc) uris = doc.xpath("/ds:links/ds:uri", @ds_namespaces) results = [] uris.each do |uri_el| link = uri_el.content results << URI.parse(link) end results end
ruby
{ "resource": "" }
q22174
OData.Service.link_child_to_parent
train
def link_child_to_parent(operation) child_collection = operation.klass.send("#{operation.klass_name}") || [] child_collection << operation.child_klass operation.klass.send("#{operation.klass_name}=", child_collection) # Attach the parent to the child parent_meta = @class_metadata[operation.klass.class.to_s][operation.klass_name] child_meta = @class_metadata[operation.child_klass.class.to_s] # Find the matching relationship on the child object child_properties = Helpers.normalize_to_hash( child_meta.select { |k, prop| prop.nav_prop && prop.association.relationship == parent_meta.association.relationship }) child_property_to_set = child_properties.keys.first # There should be only one match # TODO: Handle many to many scenarios where the child property is an enumerable operation.child_klass.send("#{child_property_to_set}=", operation.klass) end
ruby
{ "resource": "" }
q22175
OData.Service.fill_complex_type_properties
train
def fill_complex_type_properties(complex_type_xml, klass) properties = complex_type_xml.xpath(".//*") properties.each do |prop| klass.send "#{prop.name}=", parse_value_xml(prop) end end
ruby
{ "resource": "" }
q22176
OData.Service.parse_date
train
def parse_date(sdate) # Assume this is UTC if no timezone is specified sdate = sdate + "Z" unless sdate.match(/Z|([+|-]\d{2}:\d{2})$/) # This is to handle older versions of Ruby (e.g. ruby 1.8.7 (2010-12-23 patchlevel 330) [i386-mingw32]) # See http://makandra.com/notes/1017-maximum-representable-value-for-a-ruby-time-object # In recent versions of Ruby, Time has a much larger range begin result = Time.parse(sdate) rescue ArgumentError result = DateTime.parse(sdate) end return result end
ruby
{ "resource": "" }
q22177
OData.Service.parse_value_xml
train
def parse_value_xml(property_xml) property_type = Helpers.get_namespaced_attribute(property_xml, 'type', 'm') property_null = Helpers.get_namespaced_attribute(property_xml, 'null', 'm') if property_type.nil? || (property_type && property_type.match(/^Edm/)) return parse_value(property_xml.content, property_type, property_null) end complex_type_to_class(property_xml) end
ruby
{ "resource": "" }
q22178
OData.Service.parse_primative_type
train
def parse_primative_type(value, return_type) return value.to_i if return_type == Fixnum return value.to_d if return_type == Float return parse_date(value.to_s) if return_type == Time return value.to_s end
ruby
{ "resource": "" }
q22179
OData.Service.execute_import_function
train
def execute_import_function(name, *args) func = @function_imports[name] # Check the args making sure that more weren't passed in than the function needs param_count = func[:parameters].nil? ? 0 : func[:parameters].count arg_count = args.nil? ? 0 : args[0].count if arg_count > param_count raise ArgumentError, "wrong number of arguments (#{arg_count} for #{param_count})" end # Convert the parameters to a hash params = {} func[:parameters].keys.each_with_index { |key, i| params[key] = args[0][i] } unless func[:parameters].nil? function_uri = build_function_import_uri(name, params) result = OData::Resource.new(function_uri, @rest_options).send(func[:http_method].downcase, {}) # Is this a 204 (No content) result? return true if result.status == 204 # No? Then we need to parse the results. There are 4 kinds... if func[:return_type] == Array # a collection of entites return build_classes_from_result(result.body) if @classes.include?(func[:inner_return_type].to_s) # a collection of native types elements = Nokogiri::XML(result.body).xpath("//ds:element", @ds_namespaces) results = [] elements.each do |e| results << parse_primative_type(e.content, func[:inner_return_type]) end return results end # a single entity if @classes.include?(func[:return_type].to_s) entry = Nokogiri::XML(result.body).xpath("atom:entry[not(ancestor::atom:entry)]", @ds_namespaces) return entry_to_class(entry) end # or a single native type unless func[:return_type].nil? e = Nokogiri::XML(result.body).xpath("/*").first return parse_primative_type(e.content, func[:return_type]) end # Nothing could be parsed, so just return if we got a 200 or not return (result.status == 200) end
ruby
{ "resource": "" }
q22180
Pickle.Session.model_with_associations
train
def model_with_associations(name) model = created_model(name) return nil unless model OData::PickleAdapter.get_model(model.class, model.id, true) end
ruby
{ "resource": "" }
q22181
OData.ClassBuilder.build
train
def build # return if already built return @klass unless @klass.nil? # need the class name to build class return nil if @klass_name.nil? # return if we can find constant corresponding to class name already_defined = eval("defined?(#{@klass_name}) == 'constant' and #{@klass_name}.class == Class") if already_defined @klass = @klass_name.constantize return @klass end if @namespace namespaces = @namespace.split(/\.|::/) namespaces.each_with_index do |ns, index| if index == 0 next if Object.const_defined? ns Object.const_set(ns, Module.new) else current_ns = namespaces[0..index-1].join '::' next if eval "#{current_ns}.const_defined? '#{ns}'" eval "#{current_ns}.const_set('#{ns}', Module.new)" end end klass_constant = @klass_name.split('::').last eval "#{namespaces.join '::'}.const_set('#{klass_constant}', Class.new.extend(ActiveSupport::JSON))" else Object.const_set(@klass_name, Class.new.extend(ActiveSupport::JSON)) end @klass = @klass_name.constantize @klass.class_eval do include OData end add_initializer(@klass) add_methods(@klass) add_nav_props(@klass) add_class_methods(@klass) return @klass end
ruby
{ "resource": "" }
q22182
OData.QueryBuilder.links
train
def links(navigation_property) raise OData::NotSupportedError.new("You cannot call both the `links` method and the `count` method in the same query.") if @count raise OData::NotSupportedError.new("You cannot call both the `links` method and the `select` method in the same query.") unless @select.empty? @links_navigation_property = navigation_property self end
ruby
{ "resource": "" }
q22183
OData.QueryBuilder.count
train
def count raise OData::NotSupportedError.new("You cannot call both the `links` method and the `count` method in the same query.") if @links_navigation_property raise OData::NotSupportedError.new("You cannot call both the `select` method and the `count` method in the same query.") unless @select.empty? @count = true self end
ruby
{ "resource": "" }
q22184
OData.QueryBuilder.select
train
def select(*fields) raise OData::NotSupportedError.new("You cannot call both the `links` method and the `select` method in the same query.") if @links_navigation_property raise OData::NotSupportedError.new("You cannot call both the `count` method and the `select` method in the same query.") if @count @select |= fields expands = fields.find_all { |f| /\// =~ f } expands.each do |e| parts = e.split '/' @expands |= [parts[0...-1].join('/')] end self end
ruby
{ "resource": "" }
q22185
Strava::Api::V3.Common.sanitize_request_parameters
train
def sanitize_request_parameters(parameters) parameters.reduce({}) do |result, (key, value)| # if the parameter is an array that contains non-enumerable values, # turn it into a comma-separated list # in Ruby 1.8.7, strings are enumerable, but we don't care if value.is_a?(Array) && value.none? {|entry| entry.is_a?(Enumerable) && !entry.is_a?(String)} value = value.join(",") end value = value.to_time if value.is_a? DateTime value = value.to_i if value.is_a? Time result.merge(key => value) end end
ruby
{ "resource": "" }
q22186
Strava::Api::V3.ActivityExtras.list_activity_photos
train
def list_activity_photos(id, args = {}, options = {}, &block) args['photo_sources'] = 'true' # Fetches the connections for given object. api_call("activities/#{id}/photos", args, 'get', options, &block) end
ruby
{ "resource": "" }
q22187
Strava::Api::V3.Stream.retrieve_activity_streams
train
def retrieve_activity_streams(id, types, args = {}, options = {}, &block) # Fetches the connections for given object. api_call("activities/#{id}/streams/#{types}", args, 'get', options, &block) end
ruby
{ "resource": "" }
q22188
Strava::Api::V3.Stream.retrieve_effort_streams
train
def retrieve_effort_streams(id, types, args = {}, options = {}, &block) # Fetches the connections for given object. api_call("segment_efforts/#{id}/streams/#{types}", args, 'get', options, &block) end
ruby
{ "resource": "" }
q22189
Strava::Api::V3.Stream.retrieve_segment_streams
train
def retrieve_segment_streams(id, types, args = {}, options = {}, &block) # Fetches the connections for given object. api_call("segments/#{id}/streams/#{types}", args, 'get', options, &block) end
ruby
{ "resource": "" }
q22190
CFPropertyList.CFDate.set_value
train
def set_value(value,format=CFDate::TIMESTAMP_UNIX) if(format == CFDate::TIMESTAMP_UNIX) then @value = Time.at(value) else @value = Time.at(value + CFDate::DATE_DIFF_APPLE_UNIX) end end
ruby
{ "resource": "" }
q22191
CFPropertyList.CFDate.get_value
train
def get_value(format=CFDate::TIMESTAMP_UNIX) if(format == CFDate::TIMESTAMP_UNIX) then @value.to_i else @value.to_f - CFDate::DATE_DIFF_APPLE_UNIX end end
ruby
{ "resource": "" }
q22192
CFPropertyList.CFArray.to_xml
train
def to_xml(parser) n = parser.new_node('array') @value.each do |v| n = parser.append_node(n, v.to_xml(parser)) end n end
ruby
{ "resource": "" }
q22193
CFPropertyList.CFDictionary.to_xml
train
def to_xml(parser) n = parser.new_node('dict') @value.each_pair do |key, value| k = parser.append_node(parser.new_node('key'), parser.new_text(key.to_s)) n = parser.append_node(n, k) n = parser.append_node(n, value.to_xml(parser)) end n end
ruby
{ "resource": "" }
q22194
CFPropertyList.Binary.load
train
def load(opts) @unique_table = {} @count_objects = 0 @object_refs = 0 @written_object_count = 0 @object_table = [] @object_ref_size = 0 @offsets = [] fd = nil if(opts.has_key?(:file)) fd = File.open(opts[:file],"rb") file = opts[:file] else fd = StringIO.new(opts[:data],"rb") file = "<string>" end # first, we read the trailer: 32 byte from the end fd.seek(-32,IO::SEEK_END) buff = fd.read(32) offset_size, object_ref_size, number_of_objects, top_object, table_offset = buff.unpack "x6CCx4Nx4Nx4N" # after that, get the offset table fd.seek(table_offset, IO::SEEK_SET) coded_offset_table = fd.read(number_of_objects * offset_size) raise CFFormatError.new("#{file}: Format error!") unless coded_offset_table.bytesize == number_of_objects * offset_size @count_objects = number_of_objects # decode offset table if(offset_size != 3) formats = ["","C*","n*","","N*"] @offsets = coded_offset_table.unpack(formats[offset_size]) else @offsets = coded_offset_table.unpack("C*").each_slice(3).map { |x,y,z| (x << 16) | (y << 8) | z } end @object_ref_size = object_ref_size val = read_binary_object_at(file,fd,top_object) fd.close val end
ruby
{ "resource": "" }
q22195
CFPropertyList.Binary.to_str
train
def to_str(opts={}) @unique_table = {} @count_objects = 0 @object_refs = 0 @written_object_count = 0 @object_table = [] @offsets = [] binary_str = "bplist00" @object_refs = count_object_refs(opts[:root]) opts[:root].to_binary(self) next_offset = 8 offsets = @object_table.map do |object| offset = next_offset next_offset += object.bytesize offset end binary_str << @object_table.join table_offset = next_offset offset_size = Binary.bytes_needed(table_offset) if offset_size < 8 # Fast path: encode the entire offset array at once. binary_str << offsets.pack((%w(C n N N)[offset_size - 1]) + '*') else # Slow path: host may be little or big endian, must pack each offset # separately. offsets.each do |offset| binary_str << "#{Binary.pack_it_with_size(offset_size,offset)}" end end binary_str << [offset_size, object_ref_size(@object_refs)].pack("x6CC") binary_str << [@object_table.size].pack("x4N") binary_str << [0].pack("x4N") binary_str << [table_offset].pack("x4N") binary_str end
ruby
{ "resource": "" }
q22196
CFPropertyList.Binary.read_binary_int
train
def read_binary_int(fname,fd,length) if length > 4 raise CFFormatError.new("Integer greater than 16 bytes: #{length}") end nbytes = 1 << length buff = fd.read(nbytes) CFInteger.new( case length when 0 then buff.unpack("C")[0] when 1 then buff.unpack("n")[0] when 2 then buff.unpack("N")[0] # 8 byte integers are always signed when 3 then buff.unpack("q>")[0] # 16 byte integers are used to represent unsigned 8 byte integers # where the unsigned value is stored in the lower 8 bytes and the # upper 8 bytes are unused. when 4 then buff.unpack("Q>Q>")[1] end ) end
ruby
{ "resource": "" }
q22197
CFPropertyList.Binary.read_binary_real
train
def read_binary_real(fname,fd,length) raise CFFormatError.new("Real greater than 8 bytes: #{length}") if length > 3 nbytes = 1 << length buff = fd.read(nbytes) CFReal.new( case length when 0 # 1 byte float? must be an error raise CFFormatError.new("got #{length+1} byte float, must be an error!") when 1 # 2 byte float? must be an error raise CFFormatError.new("got #{length+1} byte float, must be an error!") when 2 then buff.reverse.unpack("e")[0] when 3 then buff.reverse.unpack("E")[0] else fail "unexpected length: #{length}" end ) end
ruby
{ "resource": "" }
q22198
CFPropertyList.Binary.read_binary_date
train
def read_binary_date(fname,fd,length) raise CFFormatError.new("Date greater than 8 bytes: #{length}") if length > 3 nbytes = 1 << length buff = fd.read(nbytes) CFDate.new( case length when 0 then # 1 byte CFDate is an error raise CFFormatError.new("#{length+1} byte CFDate, error") when 1 then # 2 byte CFDate is an error raise CFFormatError.new("#{length+1} byte CFDate, error") when 2 then buff.reverse.unpack("e")[0] when 3 then buff.reverse.unpack("E")[0] end, CFDate::TIMESTAMP_APPLE ) end
ruby
{ "resource": "" }
q22199
CFPropertyList.Binary.read_binary_data
train
def read_binary_data(fname,fd,length) CFData.new(read_fd(fd, length), CFData::DATA_RAW) end
ruby
{ "resource": "" }