_id
stringlengths
2
6
title
stringlengths
0
58
partition
stringclasses
3 values
text
stringlengths
52
373k
language
stringclasses
1 value
meta_information
dict
q21400
train
function(el, options) { var eHandler = function(e) { e.mfpEl = this; mfp._openClick(e, el, options); }; if(!options) { options = {}; } var eName = 'click.magnificPopup'; options.mainEl = el; if(options.items) { options.isObj = true; el.off(eName).on(eName, eHandler); } else { options.isObj = false; if(options.delegate) { el.off(eName).on(eName, options.delegate , eHandler); } else { options.items = el; el.off(eName).on(eName, eHandler); } } }
javascript
{ "resource": "" }
q21401
train
function(status, text) { if(mfp.preloader) { if(_prevStatus !== status) { mfp.container.removeClass('mfp-s-'+_prevStatus); } if(!text && status === 'loading') { text = mfp.st.tLoading; } var data = { status: status, text: text }; // allows to modify status _mfpTrigger('UpdateStatus', data); status = data.status; text = data.text; mfp.preloader.html(text); mfp.preloader.find('a').on('click', function(e) { e.stopImmediatePropagation(); }); mfp.container.addClass('mfp-s-'+status); _prevStatus = status; } }
javascript
{ "resource": "" }
q21402
deserializeValue
train
function deserializeValue(value) { var num try { return value ? value == "true" || ( value == "false" ? false : value == "null" ? null : !isNaN(num = Number(value)) ? num : /^[\[\{]/.test(value) ? $.parseJSON(value) : value ) : value } catch(e) { return value } }
javascript
{ "resource": "" }
q21403
fix
train
function fix(event) { if (!('defaultPrevented' in event)) { event.defaultPrevented = false var prevent = event.preventDefault event.preventDefault = function() { this.defaultPrevented = true prevent.call(this) } } }
javascript
{ "resource": "" }
q21404
train
function(item) { var counter = 0, img = item.img[0], mfpSetInterval = function(delay) { if(_imgInterval) { clearInterval(_imgInterval); } // decelerating interval that checks for size of an image _imgInterval = setInterval(function() { if(img.naturalWidth > 0) { mfp._onImageHasSize(item); return; } if(counter > 200) { clearInterval(_imgInterval); } counter++; if(counter === 3) { mfpSetInterval(10); } else if(counter === 40) { mfpSetInterval(50); } else if(counter === 100) { mfpSetInterval(500); } }, delay); }; mfpSetInterval(1); }
javascript
{ "resource": "" }
q21405
createCountUp
train
function createCountUp() { establishOptionsFromInputs(); demo = new CountUp('myTargetElement', endVal, options); if (!demo.error) { errorSection.style.display = 'none'; if (input('useOnComplete').checked) { demo.start(methodToCallOnComplete); } else { demo.start(); } updateCodeVisualizer(); } else { errorSection.style.display = 'block'; document.getElementById('error').innerHTML = demo.error; console.error(demo.error); } }
javascript
{ "resource": "" }
q21406
train
function(self) { if (self.resumeToken) { self.options.resumeAfter = self.resumeToken; } var changeStreamCursor = buildChangeStreamAggregationCommand(self); /** * Fired for each new matching change in the specified namespace. Attaching a `change` * event listener to a Change Stream will switch the stream into flowing mode. Data will * then be passed as soon as it is available. * * @event ChangeStream#change * @type {object} */ if (self.listenerCount('change') > 0) { changeStreamCursor.on('data', function(change) { processNewChange({ changeStream: self, change, eventEmitter: true }); }); } /** * Change stream close event * * @event ChangeStream#close * @type {null} */ changeStreamCursor.on('close', function() { self.emit('close'); }); /** * Change stream end event * * @event ChangeStream#end * @type {null} */ changeStreamCursor.on('end', function() { self.emit('end'); }); /** * Fired when the stream encounters an error. * * @event ChangeStream#error * @type {Error} */ changeStreamCursor.on('error', function(error) { processNewChange({ changeStream: self, error, eventEmitter: true }); }); if (self.pipeDestinations) { const cursorStream = changeStreamCursor.stream(self.streamOptions); for (let pipeDestination in self.pipeDestinations) { cursorStream.pipe(pipeDestination); } } return changeStreamCursor; }
javascript
{ "resource": "" }
q21407
GridFSBucket
train
function GridFSBucket(db, options) { Emitter.apply(this); this.setMaxListeners(0); if (options && typeof options === 'object') { options = shallowClone(options); var keys = Object.keys(DEFAULT_GRIDFS_BUCKET_OPTIONS); for (var i = 0; i < keys.length; ++i) { if (!options[keys[i]]) { options[keys[i]] = DEFAULT_GRIDFS_BUCKET_OPTIONS[keys[i]]; } } } else { options = DEFAULT_GRIDFS_BUCKET_OPTIONS; } this.s = { db: db, options: options, _chunksCollection: db.collection(options.bucketName + '.chunks'), _filesCollection: db.collection(options.bucketName + '.files'), checkedIndexes: false, calledOpenUploadStream: false, promiseLibrary: db.s.promiseLibrary || Promise }; }
javascript
{ "resource": "" }
q21408
collections
train
function collections(db, options, callback) { let Collection = loadCollection(); options = Object.assign({}, options, { nameOnly: true }); // Let's get the collection names db.listCollections({}, options).toArray((err, documents) => { if (err != null) return handleCallback(callback, err, null); // Filter collections removing any illegal ones documents = documents.filter(doc => { return doc.name.indexOf('$') === -1; }); // Return the collection objects handleCallback( callback, null, documents.map(d => { return new Collection( db, db.s.topology, db.s.databaseName, d.name, db.s.pkFactory, db.s.options ); }) ); }); }
javascript
{ "resource": "" }
q21409
createIndex
train
function createIndex(db, name, fieldOrSpec, options, callback) { // Get the write concern options let finalOptions = Object.assign({}, { readPreference: ReadPreference.PRIMARY }, options); finalOptions = applyWriteConcern(finalOptions, { db }, options); // Ensure we have a callback if (finalOptions.writeConcern && typeof callback !== 'function') { throw MongoError.create({ message: 'Cannot use a writeConcern without a provided callback', driver: true }); } // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) return callback(new MongoError('topology was destroyed')); // Attempt to run using createIndexes command createIndexUsingCreateIndexes(db, name, fieldOrSpec, finalOptions, (err, result) => { if (err == null) return handleCallback(callback, err, result); /** * The following errors mean that the server recognized `createIndex` as a command so we don't need to fallback to an insert: * 67 = 'CannotCreateIndex' (malformed index options) * 85 = 'IndexOptionsConflict' (index already exists with different options) * 86 = 'IndexKeySpecsConflict' (index already exists with the same name) * 11000 = 'DuplicateKey' (couldn't build unique index because of dupes) * 11600 = 'InterruptedAtShutdown' (interrupted at shutdown) * 197 = 'InvalidIndexSpecificationOption' (`_id` with `background: true`) */ if ( err.code === 67 || err.code === 11000 || err.code === 85 || err.code === 86 || err.code === 11600 || err.code === 197 ) { return handleCallback(callback, err, result); } // Create command const doc = createCreateIndexCommand(db, name, fieldOrSpec, options); // Set no key checking finalOptions.checkKeys = false; // Insert document db.s.topology.insert( `${db.s.databaseName}.${CONSTANTS.SYSTEM_INDEX_COLLECTION}`, doc, finalOptions, (err, result) => { if (callback == null) return; if (err) return handleCallback(callback, err); if (result == null) return handleCallback(callback, null, null); if (result.result.writeErrors) return handleCallback(callback, MongoError.create(result.result.writeErrors[0]), null); handleCallback(callback, null, doc.name); } ); }); }
javascript
{ "resource": "" }
q21410
createListener
train
function createListener(db, e, object) { function listener(err) { if (object.listeners(e).length > 0) { object.emit(e, err, db); // Emit on all associated db's if available for (let i = 0; i < db.s.children.length; i++) { db.s.children[i].emit(e, err, db.s.children[i]); } } } return listener; }
javascript
{ "resource": "" }
q21411
dropCollection
train
function dropCollection(db, name, options, callback) { executeCommand(db, name, options, (err, result) => { // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) { return callback(new MongoError('topology was destroyed')); } if (err) return handleCallback(callback, err); if (result.ok) return handleCallback(callback, null, true); handleCallback(callback, null, false); }); }
javascript
{ "resource": "" }
q21412
dropDatabase
train
function dropDatabase(db, cmd, options, callback) { executeCommand(db, cmd, options, (err, result) => { // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) { return callback(new MongoError('topology was destroyed')); } if (callback == null) return; if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result.ok ? true : false); }); }
javascript
{ "resource": "" }
q21413
ensureIndex
train
function ensureIndex(db, name, fieldOrSpec, options, callback) { // Get the write concern options const finalOptions = applyWriteConcern({}, { db }, options); // Create command const selector = createCreateIndexCommand(db, name, fieldOrSpec, options); const index_name = selector.name; // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) return callback(new MongoError('topology was destroyed')); // Merge primary readPreference finalOptions.readPreference = ReadPreference.PRIMARY; // Check if the index already exists indexInformation(db, name, finalOptions, (err, indexInformation) => { if (err != null && err.code !== 26) return handleCallback(callback, err, null); // If the index does not exist, create it if (indexInformation == null || !indexInformation[index_name]) { createIndex(db, name, fieldOrSpec, options, callback); } else { if (typeof callback === 'function') return handleCallback(callback, null, index_name); } }); }
javascript
{ "resource": "" }
q21414
evaluate
train
function evaluate(db, code, parameters, options, callback) { let finalCode = code; let finalParameters = []; // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) return callback(new MongoError('topology was destroyed')); // If not a code object translate to one if (!(finalCode && finalCode._bsontype === 'Code')) finalCode = new Code(finalCode); // Ensure the parameters are correct if (parameters != null && !Array.isArray(parameters) && typeof parameters !== 'function') { finalParameters = [parameters]; } else if (parameters != null && Array.isArray(parameters) && typeof parameters !== 'function') { finalParameters = parameters; } // Create execution selector let cmd = { $eval: finalCode, args: finalParameters }; // Check if the nolock parameter is passed in if (options['nolock']) { cmd['nolock'] = options['nolock']; } // Set primary read preference options.readPreference = new ReadPreference(ReadPreference.PRIMARY); // Execute the command executeCommand(db, cmd, options, (err, result) => { if (err) return handleCallback(callback, err, null); if (result && result.ok === 1) return handleCallback(callback, null, result.retval); if (result) return handleCallback( callback, MongoError.create({ message: `eval failed: ${result.errmsg}`, driver: true }), null ); handleCallback(callback, err, result); }); }
javascript
{ "resource": "" }
q21415
executeDbAdminCommand
train
function executeDbAdminCommand(db, command, options, callback) { db.s.topology.command('admin.$cmd', command, options, (err, result) => { // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) { return callback(new MongoError('topology was destroyed')); } if (err) return handleCallback(callback, err); handleCallback(callback, null, result.result); }); }
javascript
{ "resource": "" }
q21416
indexInformation
train
function indexInformation(db, name, options, callback) { // If we specified full information const full = options['full'] == null ? false : options['full']; // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) return callback(new MongoError('topology was destroyed')); // Process all the results from the index command and collection function processResults(indexes) { // Contains all the information let info = {}; // Process all the indexes for (let i = 0; i < indexes.length; i++) { const index = indexes[i]; // Let's unpack the object info[index.name] = []; for (let name in index.key) { info[index.name].push([name, index.key[name]]); } } return info; } // Get the list of indexes of the specified collection db .collection(name) .listIndexes(options) .toArray((err, indexes) => { if (err) return callback(toError(err)); if (!Array.isArray(indexes)) return handleCallback(callback, null, []); if (full) return handleCallback(callback, null, indexes); handleCallback(callback, null, processResults(indexes)); }); }
javascript
{ "resource": "" }
q21417
processResults
train
function processResults(indexes) { // Contains all the information let info = {}; // Process all the indexes for (let i = 0; i < indexes.length; i++) { const index = indexes[i]; // Let's unpack the object info[index.name] = []; for (let name in index.key) { info[index.name].push([name, index.key[name]]); } } return info; }
javascript
{ "resource": "" }
q21418
listCollectionsTransforms
train
function listCollectionsTransforms(databaseName) { const matching = `${databaseName}.`; return { doc: doc => { const index = doc.name.indexOf(matching); // Remove database name if available if (doc.name && index === 0) { doc.name = doc.name.substr(index + matching.length); } return doc; } }; }
javascript
{ "resource": "" }
q21419
profilingInfo
train
function profilingInfo(db, options, callback) { try { db .collection('system.profile') .find({}, options) .toArray(callback); } catch (err) { return callback(err, null); } }
javascript
{ "resource": "" }
q21420
profilingLevel
train
function profilingLevel(db, options, callback) { executeCommand(db, { profile: -1 }, options, (err, doc) => { if (err == null && doc.ok === 1) { const was = doc.was; if (was === 0) return callback(null, 'off'); if (was === 1) return callback(null, 'slow_only'); if (was === 2) return callback(null, 'all'); return callback(new Error('Error: illegal profiling level value ' + was), null); } else { err != null ? callback(err, null) : callback(new Error('Error with profile command'), null); } }); }
javascript
{ "resource": "" }
q21421
removeUser
train
function removeUser(db, username, options, callback) { let Db = loadDb(); // Attempt to execute command executeAuthRemoveUserCommand(db, username, options, (err, result) => { if (err && err.code === -5000) { const finalOptions = applyWriteConcern(Object.assign({}, options), { db }, options); // If we have another db set const db = options.dbName ? new Db(options.dbName, db.s.topology, db.s.options) : db; // Fetch a user collection const collection = db.collection(CONSTANTS.SYSTEM_USER_COLLECTION); // Locate the user findOne(collection, { user: username }, finalOptions, (err, user) => { if (user == null) return handleCallback(callback, err, false); remove(collection, { user: username }, finalOptions, err => { handleCallback(callback, err, true); }); }); return; } if (err) return handleCallback(callback, err); handleCallback(callback, err, result); }); }
javascript
{ "resource": "" }
q21422
setProfilingLevel
train
function setProfilingLevel(db, level, options, callback) { const command = {}; let profile = 0; if (level === 'off') { profile = 0; } else if (level === 'slow_only') { profile = 1; } else if (level === 'all') { profile = 2; } else { return callback(new Error('Error: illegal profiling level value ' + level)); } // Set up the profile number command['profile'] = profile; executeCommand(db, command, options, (err, doc) => { if (err == null && doc.ok === 1) return callback(null, level); return err != null ? callback(err, null) : callback(new Error('Error with profile command'), null); }); }
javascript
{ "resource": "" }
q21423
validateDatabaseName
train
function validateDatabaseName(databaseName) { if (typeof databaseName !== 'string') throw MongoError.create({ message: 'database name must be a string', driver: true }); if (databaseName.length === 0) throw MongoError.create({ message: 'database name cannot be the empty string', driver: true }); if (databaseName === '$external') return; const invalidChars = [' ', '.', '$', '/', '\\']; for (let i = 0; i < invalidChars.length; i++) { if (databaseName.indexOf(invalidChars[i]) !== -1) throw MongoError.create({ message: "database names cannot contain the character '" + invalidChars[i] + "'", driver: true }); } }
javascript
{ "resource": "" }
q21424
createCreateIndexCommand
train
function createCreateIndexCommand(db, name, fieldOrSpec, options) { const indexParameters = parseIndexOptions(fieldOrSpec); const fieldHash = indexParameters.fieldHash; // Generate the index name const indexName = typeof options.name === 'string' ? options.name : indexParameters.name; const selector = { ns: db.databaseName + '.' + name, key: fieldHash, name: indexName }; // Ensure we have a correct finalUnique const finalUnique = options == null || 'object' === typeof options ? false : options; // Set up options options = options == null || typeof options === 'boolean' ? {} : options; // Add all the options const keysToOmit = Object.keys(selector); for (let optionName in options) { if (keysToOmit.indexOf(optionName) === -1) { selector[optionName] = options[optionName]; } } if (selector['unique'] == null) selector['unique'] = finalUnique; // Remove any write concern operations const removeKeys = ['w', 'wtimeout', 'j', 'fsync', 'readPreference', 'session']; for (let i = 0; i < removeKeys.length; i++) { delete selector[removeKeys[i]]; } // Return the command creation selector return selector; }
javascript
{ "resource": "" }
q21425
createIndexUsingCreateIndexes
train
function createIndexUsingCreateIndexes(db, name, fieldOrSpec, options, callback) { // Build the index const indexParameters = parseIndexOptions(fieldOrSpec); // Generate the index name const indexName = typeof options.name === 'string' ? options.name : indexParameters.name; // Set up the index const indexes = [{ name: indexName, key: indexParameters.fieldHash }]; // merge all the options const keysToOmit = Object.keys(indexes[0]).concat([ 'writeConcern', 'w', 'wtimeout', 'j', 'fsync', 'readPreference', 'session' ]); for (let optionName in options) { if (keysToOmit.indexOf(optionName) === -1) { indexes[0][optionName] = options[optionName]; } } // Get capabilities const capabilities = db.s.topology.capabilities(); // Did the user pass in a collation, check if our write server supports it if (indexes[0].collation && capabilities && !capabilities.commandsTakeCollation) { // Create a new error const error = new MongoError('server/primary/mongos does not support collation'); error.code = 67; // Return the error return callback(error); } // Create command, apply write concern to command const cmd = applyWriteConcern({ createIndexes: name, indexes }, { db }, options); // ReadPreference primary options.readPreference = ReadPreference.PRIMARY; // Build the command executeCommand(db, cmd, options, (err, result) => { if (err) return handleCallback(callback, err, null); if (result.ok === 0) return handleCallback(callback, toError(result), null); // Return the indexName for backward compatibility handleCallback(callback, null, indexName); }); }
javascript
{ "resource": "" }
q21426
executeAuthCreateUserCommand
train
function executeAuthCreateUserCommand(db, username, password, options, callback) { // Special case where there is no password ($external users) if (typeof username === 'string' && password != null && typeof password === 'object') { options = password; password = null; } // Unpack all options if (typeof options === 'function') { callback = options; options = {}; } // Error out if we digestPassword set if (options.digestPassword != null) { return callback( toError( "The digestPassword option is not supported via add_user. Please use db.command('createUser', ...) instead for this option." ) ); } // Get additional values const customData = options.customData != null ? options.customData : {}; let roles = Array.isArray(options.roles) ? options.roles : []; const maxTimeMS = typeof options.maxTimeMS === 'number' ? options.maxTimeMS : null; // If not roles defined print deprecated message if (roles.length === 0) { console.log('Creating a user without roles is deprecated in MongoDB >= 2.6'); } // Get the error options const commandOptions = { writeCommand: true }; if (options['dbName']) commandOptions.dbName = options['dbName']; // Add maxTimeMS to options if set if (maxTimeMS != null) commandOptions.maxTimeMS = maxTimeMS; // Check the db name and add roles if needed if ( (db.databaseName.toLowerCase() === 'admin' || options.dbName === 'admin') && !Array.isArray(options.roles) ) { roles = ['root']; } else if (!Array.isArray(options.roles)) { roles = ['dbOwner']; } const digestPassword = db.s.topology.lastIsMaster().maxWireVersion >= 7; // Build the command to execute let command = { createUser: username, customData: customData, roles: roles, digestPassword }; // Apply write concern to command command = applyWriteConcern(command, { db }, options); let userPassword = password; if (!digestPassword) { // Use node md5 generator const md5 = crypto.createHash('md5'); // Generate keys used for authentication md5.update(username + ':mongo:' + password); userPassword = md5.digest('hex'); } // No password if (typeof password === 'string') { command.pwd = userPassword; } // Force write using primary commandOptions.readPreference = ReadPreference.primary; // Execute the command executeCommand(db, command, commandOptions, (err, result) => { if (err && err.ok === 0 && err.code === undefined) return handleCallback(callback, { code: -5000 }, null); if (err) return handleCallback(callback, err, null); handleCallback( callback, !result.ok ? toError(result) : null, result.ok ? [{ user: username, pwd: '' }] : null ); }); }
javascript
{ "resource": "" }
q21427
executeAuthRemoveUserCommand
train
function executeAuthRemoveUserCommand(db, username, options, callback) { if (typeof options === 'function') (callback = options), (options = {}); options = options || {}; // Did the user destroy the topology if (db.serverConfig && db.serverConfig.isDestroyed()) return callback(new MongoError('topology was destroyed')); // Get the error options const commandOptions = { writeCommand: true }; if (options['dbName']) commandOptions.dbName = options['dbName']; // Get additional values const maxTimeMS = typeof options.maxTimeMS === 'number' ? options.maxTimeMS : null; // Add maxTimeMS to options if set if (maxTimeMS != null) commandOptions.maxTimeMS = maxTimeMS; // Build the command to execute let command = { dropUser: username }; // Apply write concern to command command = applyWriteConcern(command, { db }, options); // Force write using primary commandOptions.readPreference = ReadPreference.primary; // Execute the command executeCommand(db, command, commandOptions, (err, result) => { if (err && !err.ok && err.code === undefined) return handleCallback(callback, { code: -5000 }); if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result.ok ? true : false); }); }
javascript
{ "resource": "" }
q21428
checkForAtomicOperators
train
function checkForAtomicOperators(update) { const keys = Object.keys(update); // same errors as the server would give for update doc lacking atomic operators if (keys.length === 0) { return toError('The update operation document must contain at least one atomic operator.'); } if (keys[0][0] !== '$') { return toError('the update operation document must contain atomic operators.'); } }
javascript
{ "resource": "" }
q21429
count
train
function count(coll, query, options, callback) { if (typeof options === 'function') (callback = options), (options = {}); options = Object.assign({}, options); options.collectionName = coll.s.name; options.readPreference = resolveReadPreference(options, { db: coll.s.db, collection: coll }); let cmd; try { cmd = buildCountCommand(coll, query, options); } catch (err) { return callback(err); } executeCommand(coll.s.db, cmd, options, (err, result) => { if (err) return handleCallback(callback, err); handleCallback(callback, null, result.n); }); }
javascript
{ "resource": "" }
q21430
buildCountCommand
train
function buildCountCommand(collectionOrCursor, query, options) { const skip = options.skip; const limit = options.limit; let hint = options.hint; const maxTimeMS = options.maxTimeMS; query = query || {}; // Final query const cmd = { count: options.collectionName, query: query }; // check if collectionOrCursor is a cursor by using cursor.s.numberOfRetries if (collectionOrCursor.s.numberOfRetries) { if (collectionOrCursor.s.options.hint) { hint = collectionOrCursor.s.options.hint; } else if (collectionOrCursor.s.cmd.hint) { hint = collectionOrCursor.s.cmd.hint; } decorateWithCollation(cmd, collectionOrCursor, collectionOrCursor.s.cmd); } else { decorateWithCollation(cmd, collectionOrCursor, options); } // Add limit, skip and maxTimeMS if defined if (typeof skip === 'number') cmd.skip = skip; if (typeof limit === 'number') cmd.limit = limit; if (typeof maxTimeMS === 'number') cmd.maxTimeMS = maxTimeMS; if (hint) cmd.hint = hint; // Do we have a readConcern specified decorateWithReadConcern(cmd, collectionOrCursor); return cmd; }
javascript
{ "resource": "" }
q21431
createIndex
train
function createIndex(coll, fieldOrSpec, options, callback) { createIndexDb(coll.s.db, coll.s.name, fieldOrSpec, options, callback); }
javascript
{ "resource": "" }
q21432
deleteMany
train
function deleteMany(coll, filter, options, callback) { options.single = false; removeDocuments(coll, filter, options, (err, r) => deleteCallback(err, r, callback)); }
javascript
{ "resource": "" }
q21433
deleteOne
train
function deleteOne(coll, filter, options, callback) { options.single = true; removeDocuments(coll, filter, options, (err, r) => deleteCallback(err, r, callback)); }
javascript
{ "resource": "" }
q21434
distinct
train
function distinct(coll, key, query, options, callback) { // maxTimeMS option const maxTimeMS = options.maxTimeMS; // Distinct command const cmd = { distinct: coll.s.name, key: key, query: query }; options = Object.assign({}, options); // Ensure we have the right read preference inheritance options.readPreference = resolveReadPreference(options, { db: coll.s.db, collection: coll }); // Add maxTimeMS if defined if (typeof maxTimeMS === 'number') cmd.maxTimeMS = maxTimeMS; // Do we have a readConcern specified decorateWithReadConcern(cmd, coll, options); // Have we specified collation try { decorateWithCollation(cmd, coll, options); } catch (err) { return callback(err, null); } // Execute the command executeCommand(coll.s.db, cmd, options, (err, result) => { if (err) return handleCallback(callback, err); handleCallback(callback, null, result.values); }); }
javascript
{ "resource": "" }
q21435
dropIndex
train
function dropIndex(coll, indexName, options, callback) { // Delete index command const cmd = { dropIndexes: coll.s.name, index: indexName }; // Decorate command with writeConcern if supported applyWriteConcern(cmd, { db: coll.s.db, collection: coll }, options); // Execute command executeCommand(coll.s.db, cmd, options, (err, result) => { if (typeof callback !== 'function') return; if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result); }); }
javascript
{ "resource": "" }
q21436
dropIndexes
train
function dropIndexes(coll, options, callback) { dropIndex(coll, '*', options, err => { if (err) return handleCallback(callback, err, false); handleCallback(callback, null, true); }); }
javascript
{ "resource": "" }
q21437
ensureIndex
train
function ensureIndex(coll, fieldOrSpec, options, callback) { ensureIndexDb(coll.s.db, coll.s.name, fieldOrSpec, options, callback); }
javascript
{ "resource": "" }
q21438
findAndModify
train
function findAndModify(coll, query, sort, doc, options, callback) { // Create findAndModify command object const queryObject = { findAndModify: coll.s.name, query: query }; sort = formattedOrderClause(sort); if (sort) { queryObject.sort = sort; } queryObject.new = options.new ? true : false; queryObject.remove = options.remove ? true : false; queryObject.upsert = options.upsert ? true : false; const projection = options.projection || options.fields; if (projection) { queryObject.fields = projection; } if (options.arrayFilters) { queryObject.arrayFilters = options.arrayFilters; delete options.arrayFilters; } if (doc && !options.remove) { queryObject.update = doc; } if (options.maxTimeMS) queryObject.maxTimeMS = options.maxTimeMS; // Either use override on the function, or go back to default on either the collection // level or db options.serializeFunctions = options.serializeFunctions || coll.s.serializeFunctions; // No check on the documents options.checkKeys = false; // Final options for retryable writes and write concern let finalOptions = Object.assign({}, options); finalOptions = applyRetryableWrites(finalOptions, coll.s.db); finalOptions = applyWriteConcern(finalOptions, { db: coll.s.db, collection: coll }, options); // Decorate the findAndModify command with the write Concern if (finalOptions.writeConcern) { queryObject.writeConcern = finalOptions.writeConcern; } // Have we specified bypassDocumentValidation if (finalOptions.bypassDocumentValidation === true) { queryObject.bypassDocumentValidation = finalOptions.bypassDocumentValidation; } finalOptions.readPreference = ReadPreference.primary; // Have we specified collation try { decorateWithCollation(queryObject, coll, finalOptions); } catch (err) { return callback(err, null); } // Execute the command executeCommand(coll.s.db, queryObject, finalOptions, (err, result) => { if (err) return handleCallback(callback, err, null); return handleCallback(callback, null, result); }); }
javascript
{ "resource": "" }
q21439
findAndRemove
train
function findAndRemove(coll, query, sort, options, callback) { // Add the remove option options.remove = true; // Execute the callback findAndModify(coll, query, sort, null, options, callback); }
javascript
{ "resource": "" }
q21440
findOne
train
function findOne(coll, query, options, callback) { const cursor = coll .find(query, options) .limit(-1) .batchSize(1); // Return the item cursor.next((err, item) => { if (err != null) return handleCallback(callback, toError(err), null); handleCallback(callback, null, item); }); }
javascript
{ "resource": "" }
q21441
findOneAndDelete
train
function findOneAndDelete(coll, filter, options, callback) { // Final options const finalOptions = Object.assign({}, options); finalOptions.fields = options.projection; finalOptions.remove = true; // Execute find and Modify findAndModify(coll, filter, options.sort, null, finalOptions, callback); }
javascript
{ "resource": "" }
q21442
findOneAndReplace
train
function findOneAndReplace(coll, filter, replacement, options, callback) { // Final options const finalOptions = Object.assign({}, options); finalOptions.fields = options.projection; finalOptions.update = true; finalOptions.new = options.returnOriginal !== void 0 ? !options.returnOriginal : false; finalOptions.upsert = options.upsert !== void 0 ? !!options.upsert : false; // Execute findAndModify findAndModify(coll, filter, options.sort, replacement, finalOptions, callback); }
javascript
{ "resource": "" }
q21443
geoHaystackSearch
train
function geoHaystackSearch(coll, x, y, options, callback) { // Build command object let commandObject = { geoSearch: coll.s.name, near: [x, y] }; // Remove read preference from hash if it exists commandObject = decorateCommand(commandObject, options, ['readPreference', 'session']); options = Object.assign({}, options); // Ensure we have the right read preference inheritance options.readPreference = resolveReadPreference(options, { db: coll.s.db, collection: coll }); // Do we have a readConcern specified decorateWithReadConcern(commandObject, coll, options); // Execute the command executeCommand(coll.s.db, commandObject, options, (err, res) => { if (err) return handleCallback(callback, err); if (res.err || res.errmsg) handleCallback(callback, toError(res)); // should we only be returning res.results here? Not sure if the user // should see the other return information handleCallback(callback, null, res); }); }
javascript
{ "resource": "" }
q21444
group
train
function group(coll, keys, condition, initial, reduce, finalize, command, options, callback) { // Execute using the command if (command) { const reduceFunction = reduce && reduce._bsontype === 'Code' ? reduce : new Code(reduce); const selector = { group: { ns: coll.s.name, $reduce: reduceFunction, cond: condition, initial: initial, out: 'inline' } }; // if finalize is defined if (finalize != null) selector.group['finalize'] = finalize; // Set up group selector if ('function' === typeof keys || (keys && keys._bsontype === 'Code')) { selector.group.$keyf = keys && keys._bsontype === 'Code' ? keys : new Code(keys); } else { const hash = {}; keys.forEach(key => { hash[key] = 1; }); selector.group.key = hash; } options = Object.assign({}, options); // Ensure we have the right read preference inheritance options.readPreference = resolveReadPreference(options, { db: coll.s.db, collection: coll }); // Do we have a readConcern specified decorateWithReadConcern(selector, coll, options); // Have we specified collation try { decorateWithCollation(selector, coll, options); } catch (err) { return callback(err, null); } // Execute command executeCommand(coll.s.db, selector, options, (err, result) => { if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result.retval); }); } else { // Create execution scope const scope = reduce != null && reduce._bsontype === 'Code' ? reduce.scope : {}; scope.ns = coll.s.name; scope.keys = keys; scope.condition = condition; scope.initial = initial; // Pass in the function text to execute within mongodb. const groupfn = groupFunction.replace(/ reduce;/, reduce.toString() + ';'); evaluate(coll.s.db, new Code(groupfn, scope), null, options, (err, results) => { if (err) return handleCallback(callback, err, null); handleCallback(callback, null, results.result || results); }); } }
javascript
{ "resource": "" }
q21445
indexes
train
function indexes(coll, options, callback) { options = Object.assign({}, { full: true }, options); indexInformationDb(coll.s.db, coll.s.name, options, callback); }
javascript
{ "resource": "" }
q21446
indexExists
train
function indexExists(coll, indexes, options, callback) { indexInformation(coll, options, (err, indexInformation) => { // If we have an error return if (err != null) return handleCallback(callback, err, null); // Let's check for the index names if (!Array.isArray(indexes)) return handleCallback(callback, null, indexInformation[indexes] != null); // Check in list of indexes for (let i = 0; i < indexes.length; i++) { if (indexInformation[indexes[i]] == null) { return handleCallback(callback, null, false); } } // All keys found return true return handleCallback(callback, null, true); }); }
javascript
{ "resource": "" }
q21447
indexInformation
train
function indexInformation(coll, options, callback) { indexInformationDb(coll.s.db, coll.s.name, options, callback); }
javascript
{ "resource": "" }
q21448
insertOne
train
function insertOne(coll, doc, options, callback) { if (Array.isArray(doc)) { return callback( MongoError.create({ message: 'doc parameter must be an object', driver: true }) ); } insertDocuments(coll, [doc], options, (err, r) => { if (callback == null) return; if (err && callback) return callback(err); // Workaround for pre 2.6 servers if (r == null) return callback(null, { result: { ok: 1 } }); // Add values to top level to ensure crud spec compatibility r.insertedCount = r.result.n; r.insertedId = doc._id; if (callback) callback(null, r); }); }
javascript
{ "resource": "" }
q21449
isCapped
train
function isCapped(coll, options, callback) { optionsOp(coll, options, (err, document) => { if (err) return handleCallback(callback, err); handleCallback(callback, null, !!(document && document.capped)); }); }
javascript
{ "resource": "" }
q21450
optionsOp
train
function optionsOp(coll, opts, callback) { coll.s.db.listCollections({ name: coll.s.name }, opts).toArray((err, collections) => { if (err) return handleCallback(callback, err); if (collections.length === 0) { return handleCallback( callback, MongoError.create({ message: `collection ${coll.s.namespace} not found`, driver: true }) ); } handleCallback(callback, err, collections[0].options || null); }); }
javascript
{ "resource": "" }
q21451
parallelCollectionScan
train
function parallelCollectionScan(coll, options, callback) { // Create command object const commandObject = { parallelCollectionScan: coll.s.name, numCursors: options.numCursors }; // Do we have a readConcern specified decorateWithReadConcern(commandObject, coll, options); // Store the raw value const raw = options.raw; delete options['raw']; // Execute the command executeCommand(coll.s.db, commandObject, options, (err, result) => { if (err) return handleCallback(callback, err, null); if (result == null) return handleCallback( callback, new Error('no result returned for parallelCollectionScan'), null ); options = Object.assign({ explicitlyIgnoreSession: true }, options); const cursors = []; // Add the raw back to the option if (raw) options.raw = raw; // Create command cursors for each item for (let i = 0; i < result.cursors.length; i++) { const rawId = result.cursors[i].cursor.id; // Convert cursorId to Long if needed const cursorId = typeof rawId === 'number' ? Long.fromNumber(rawId) : rawId; // Add a command cursor cursors.push(coll.s.topology.cursor(coll.s.namespace, cursorId, options)); } handleCallback(callback, null, cursors); }); }
javascript
{ "resource": "" }
q21452
prepareDocs
train
function prepareDocs(coll, docs, options) { const forceServerObjectId = typeof options.forceServerObjectId === 'boolean' ? options.forceServerObjectId : coll.s.db.options.forceServerObjectId; // no need to modify the docs if server sets the ObjectId if (forceServerObjectId === true) { return docs; } return docs.map(doc => { if (forceServerObjectId !== true && doc._id == null) { doc._id = coll.s.pkFactory.createPk(); } return doc; }); }
javascript
{ "resource": "" }
q21453
processScope
train
function processScope(scope) { if (!isObject(scope) || scope._bsontype === 'ObjectID') { return scope; } const keys = Object.keys(scope); let key; const new_scope = {}; for (let i = keys.length - 1; i >= 0; i--) { key = keys[i]; if ('function' === typeof scope[key]) { new_scope[key] = new Code(String(scope[key])); } else { new_scope[key] = processScope(scope[key]); } } return new_scope; }
javascript
{ "resource": "" }
q21454
reIndex
train
function reIndex(coll, options, callback) { // Reindex const cmd = { reIndex: coll.s.name }; // Execute the command executeCommand(coll.s.db, cmd, options, (err, result) => { if (callback == null) return; if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result.ok ? true : false); }); }
javascript
{ "resource": "" }
q21455
replaceOne
train
function replaceOne(coll, filter, doc, options, callback) { // Set single document update options.multi = false; // Execute update updateDocuments(coll, filter, doc, options, (err, r) => { if (callback == null) return; if (err && callback) return callback(err); if (r == null) return callback(null, { result: { ok: 1 } }); r.modifiedCount = r.result.nModified != null ? r.result.nModified : r.result.n; r.upsertedId = Array.isArray(r.result.upserted) && r.result.upserted.length > 0 ? r.result.upserted[0] // FIXME(major): should be `r.result.upserted[0]._id` : null; r.upsertedCount = Array.isArray(r.result.upserted) && r.result.upserted.length ? r.result.upserted.length : 0; r.matchedCount = Array.isArray(r.result.upserted) && r.result.upserted.length > 0 ? 0 : r.result.n; r.ops = [doc]; if (callback) callback(null, r); }); }
javascript
{ "resource": "" }
q21456
save
train
function save(coll, doc, options, callback) { // Get the write concern options const finalOptions = applyWriteConcern( Object.assign({}, options), { db: coll.s.db, collection: coll }, options ); // Establish if we need to perform an insert or update if (doc._id != null) { finalOptions.upsert = true; return updateDocuments(coll, { _id: doc._id }, doc, finalOptions, callback); } // Insert the document insertDocuments(coll, [doc], finalOptions, (err, result) => { if (callback == null) return; if (doc == null) return handleCallback(callback, null, null); if (err) return handleCallback(callback, err, null); handleCallback(callback, null, result); }); }
javascript
{ "resource": "" }
q21457
stats
train
function stats(coll, options, callback) { // Build command object const commandObject = { collStats: coll.s.name }; // Check if we have the scale value if (options['scale'] != null) commandObject['scale'] = options['scale']; options = Object.assign({}, options); // Ensure we have the right read preference inheritance options.readPreference = resolveReadPreference(options, { db: coll.s.db, collection: coll }); // Execute the command executeCommand(coll.s.db, commandObject, options, callback); }
javascript
{ "resource": "" }
q21458
updateMany
train
function updateMany(coll, filter, update, options, callback) { // Set single document update options.multi = true; // Execute update updateDocuments(coll, filter, update, options, (err, r) => updateCallback(err, r, callback)); }
javascript
{ "resource": "" }
q21459
updateOne
train
function updateOne(coll, filter, update, options, callback) { // Set single document update options.multi = false; // Execute update updateDocuments(coll, filter, update, options, (err, r) => updateCallback(err, r, callback)); }
javascript
{ "resource": "" }
q21460
train
function(topology, storeOptions) { var self = this; var storedOps = []; storeOptions = storeOptions || { force: false, bufferMaxEntries: -1 }; // Internal state this.s = { storedOps: storedOps, storeOptions: storeOptions, topology: topology }; Object.defineProperty(this, 'length', { enumerable: true, get: function() { return self.s.storedOps.length; } }); }
javascript
{ "resource": "" }
q21461
GridFSBucketWriteStream
train
function GridFSBucketWriteStream(bucket, filename, options) { options = options || {}; this.bucket = bucket; this.chunks = bucket.s._chunksCollection; this.filename = filename; this.files = bucket.s._filesCollection; this.options = options; // Signals the write is all done this.done = false; this.id = options.id ? options.id : core.BSON.ObjectId(); this.chunkSizeBytes = this.options.chunkSizeBytes; this.bufToStore = Buffer.alloc(this.chunkSizeBytes); this.length = 0; this.md5 = !options.disableMD5 && crypto.createHash('md5'); this.n = 0; this.pos = 0; this.state = { streamEnd: false, outstandingRequests: 0, errored: false, aborted: false, promiseLibrary: this.bucket.s.promiseLibrary }; if (!this.bucket.s.calledOpenUploadStream) { this.bucket.s.calledOpenUploadStream = true; var _this = this; checkIndexes(this, function() { _this.bucket.s.checkedIndexes = true; _this.bucket.emit('index'); }); } }
javascript
{ "resource": "" }
q21462
train
function(options) { var r = null; if (options.readPreference) { r = options.readPreference; } else { return options; } if (typeof r === 'string') { options.readPreference = new ReadPreference(r); } else if (r && !(r instanceof ReadPreference) && typeof r === 'object') { const mode = r.mode || r.preference; if (mode && typeof mode === 'string') { options.readPreference = new ReadPreference(mode, r.tags, { maxStalenessSeconds: r.maxStalenessSeconds }); } } else if (!(r instanceof ReadPreference)) { throw new TypeError('Invalid read preference: ' + r); } return options; }
javascript
{ "resource": "" }
q21463
train
function(obj, name, value) { Object.defineProperty(obj, name, { enumerable: true, get: function() { return value; } }); }
javascript
{ "resource": "" }
q21464
train
function(error) { if (error instanceof Error) return error; var msg = error.err || error.errmsg || error.errMessage || error; var e = MongoError.create({ message: msg, driver: true }); // Get all object keys var keys = typeof error === 'object' ? Object.keys(error) : []; for (var i = 0; i < keys.length; i++) { try { e[keys[i]] = error[keys[i]]; } catch (err) { // continue } } return e; }
javascript
{ "resource": "" }
q21465
train
function(fieldOrSpec) { var fieldHash = {}; var indexes = []; var keys; // Get all the fields accordingly if ('string' === typeof fieldOrSpec) { // 'type' indexes.push(fieldOrSpec + '_' + 1); fieldHash[fieldOrSpec] = 1; } else if (Array.isArray(fieldOrSpec)) { fieldOrSpec.forEach(function(f) { if ('string' === typeof f) { // [{location:'2d'}, 'type'] indexes.push(f + '_' + 1); fieldHash[f] = 1; } else if (Array.isArray(f)) { // [['location', '2d'],['type', 1]] indexes.push(f[0] + '_' + (f[1] || 1)); fieldHash[f[0]] = f[1] || 1; } else if (isObject(f)) { // [{location:'2d'}, {type:1}] keys = Object.keys(f); keys.forEach(function(k) { indexes.push(k + '_' + f[k]); fieldHash[k] = f[k]; }); } else { // undefined (ignore) } }); } else if (isObject(fieldOrSpec)) { // {location:'2d', type:1} keys = Object.keys(fieldOrSpec); keys.forEach(function(key) { indexes.push(key + '_' + fieldOrSpec[key]); fieldHash[key] = fieldOrSpec[key]; }); } return { name: indexes.join('_'), keys: keys, fieldHash: fieldHash }; }
javascript
{ "resource": "" }
q21466
train
function(target, source) { var translations = { // SSL translation options sslCA: 'ca', sslCRL: 'crl', sslValidate: 'rejectUnauthorized', sslKey: 'key', sslCert: 'cert', sslPass: 'passphrase', // SocketTimeout translation options socketTimeoutMS: 'socketTimeout', connectTimeoutMS: 'connectionTimeout', // Replicaset options replicaSet: 'setName', rs_name: 'setName', secondaryAcceptableLatencyMS: 'acceptableLatency', connectWithNoPrimary: 'secondaryOnlyConnectionAllowed', // Mongos options acceptableLatencyMS: 'localThresholdMS' }; for (var name in source) { if (translations[name]) { target[translations[name]] = source[name]; } else { target[name] = source[name]; } } return target; }
javascript
{ "resource": "" }
q21467
train
function(targetOptions, sourceOptions, keys, mergeWriteConcern) { // Mix in any allowed options for (var i = 0; i < keys.length; i++) { if (!targetOptions[keys[i]] && sourceOptions[keys[i]] !== undefined) { targetOptions[keys[i]] = sourceOptions[keys[i]]; } } // No merging of write concern if (!mergeWriteConcern) return targetOptions; // Found no write Concern options var found = false; for (i = 0; i < writeConcernKeys.length; i++) { if (targetOptions[writeConcernKeys[i]]) { found = true; break; } } if (!found) { for (i = 0; i < writeConcernKeys.length; i++) { if (sourceOptions[writeConcernKeys[i]]) { targetOptions[writeConcernKeys[i]] = sourceOptions[writeConcernKeys[i]]; } } } return targetOptions; }
javascript
{ "resource": "" }
q21468
applyWriteConcern
train
function applyWriteConcern(target, sources, options) { options = options || {}; const db = sources.db; const coll = sources.collection; if (options.session && options.session.inTransaction()) { // writeConcern is not allowed within a multi-statement transaction if (target.writeConcern) { delete target.writeConcern; } return target; } if (options.w != null || options.j != null || options.fsync != null) { const writeConcern = {}; if (options.w != null) writeConcern.w = options.w; if (options.wtimeout != null) writeConcern.wtimeout = options.wtimeout; if (options.j != null) writeConcern.j = options.j; if (options.fsync != null) writeConcern.fsync = options.fsync; return Object.assign(target, { writeConcern }); } if ( coll && (coll.writeConcern.w != null || coll.writeConcern.j != null || coll.writeConcern.fsync != null) ) { return Object.assign(target, { writeConcern: Object.assign({}, coll.writeConcern) }); } if ( db && (db.writeConcern.w != null || db.writeConcern.j != null || db.writeConcern.fsync != null) ) { return Object.assign(target, { writeConcern: Object.assign({}, db.writeConcern) }); } return target; }
javascript
{ "resource": "" }
q21469
deprecateOptions
train
function deprecateOptions(config, fn) { if (process.noDeprecation === true) { return fn; } const msgHandler = config.msgHandler ? config.msgHandler : defaultMsgHandler; const optionsWarned = new Set(); function deprecated() { const options = arguments[config.optionsIndex]; // ensure options is a valid, non-empty object, otherwise short-circuit if (!isObject(options) || Object.keys(options).length === 0) { return fn.apply(this, arguments); } config.deprecatedOptions.forEach(deprecatedOption => { if (options.hasOwnProperty(deprecatedOption) && !optionsWarned.has(deprecatedOption)) { optionsWarned.add(deprecatedOption); const msg = msgHandler(config.name, deprecatedOption); emitDeprecationWarning(msg); if (this && this.getLogger) { const logger = this.getLogger(); if (logger) { logger.warn(msg); } } } }); return fn.apply(this, arguments); } // These lines copied from https://github.com/nodejs/node/blob/25e5ae41688676a5fd29b2e2e7602168eee4ceb5/lib/internal/util.js#L73-L80 // The wrapper will keep the same prototype as fn to maintain prototype chain Object.setPrototypeOf(deprecated, fn); if (fn.prototype) { // Setting this (rather than using Object.setPrototype, as above) ensures // that calling the unwrapped constructor gives an instanceof the wrapped // constructor. deprecated.prototype = fn.prototype; } return deprecated; }
javascript
{ "resource": "" }
q21470
count
train
function count(cursor, applySkipLimit, opts, callback) { if (applySkipLimit) { if (typeof cursor.cursorSkip() === 'number') opts.skip = cursor.cursorSkip(); if (typeof cursor.cursorLimit() === 'number') opts.limit = cursor.cursorLimit(); } // Ensure we have the right read preference inheritance if (opts.readPreference) { cursor.setReadPreference(opts.readPreference); } if ( typeof opts.maxTimeMS !== 'number' && cursor.s.cmd && typeof cursor.s.cmd.maxTimeMS === 'number' ) { opts.maxTimeMS = cursor.s.cmd.maxTimeMS; } let options = {}; options.skip = opts.skip; options.limit = opts.limit; options.hint = opts.hint; options.maxTimeMS = opts.maxTimeMS; // Command const delimiter = cursor.s.ns.indexOf('.'); options.collectionName = cursor.s.ns.substr(delimiter + 1); let command; try { command = buildCountCommand(cursor, cursor.s.cmd.query, options); } catch (err) { return callback(err); } // Set cursor server to the same as the topology cursor.server = cursor.topology.s.coreTopology; // Execute the command cursor.s.topology.command( `${cursor.s.ns.substr(0, delimiter)}.$cmd`, command, cursor.s.options, (err, result) => { callback(err, result ? result.result.n : null); } ); }
javascript
{ "resource": "" }
q21471
each
train
function each(cursor, callback) { let Cursor = loadCursor(); if (!callback) throw MongoError.create({ message: 'callback is mandatory', driver: true }); if (cursor.isNotified()) return; if (cursor.s.state === Cursor.CLOSED || cursor.isDead()) { return handleCallback( callback, MongoError.create({ message: 'Cursor is closed', driver: true }) ); } if (cursor.s.state === Cursor.INIT) cursor.s.state = Cursor.OPEN; // Define function to avoid global scope escape let fn = null; // Trampoline all the entries if (cursor.bufferedCount() > 0) { while ((fn = loop(cursor, callback))) fn(cursor, callback); each(cursor, callback); } else { cursor.next((err, item) => { if (err) return handleCallback(callback, err); if (item == null) { return cursor.close({ skipKillCursors: true }, () => handleCallback(callback, null, null)); } if (handleCallback(callback, null, item) === false) return; each(cursor, callback); }); } }
javascript
{ "resource": "" }
q21472
next
train
function next(cursor, callback) { // Return the currentDoc if someone called hasNext first if (cursor.s.currentDoc) { const doc = cursor.s.currentDoc; cursor.s.currentDoc = null; return callback(null, doc); } // Return the next object nextObject(cursor, callback); }
javascript
{ "resource": "" }
q21473
train
function(file, mongoObject, writeConcern) { if (!(this instanceof Chunk)) return new Chunk(file, mongoObject); this.file = file; var mongoObjectFinal = mongoObject == null ? {} : mongoObject; this.writeConcern = writeConcern || { w: 1 }; this.objectId = mongoObjectFinal._id == null ? new ObjectID() : mongoObjectFinal._id; this.chunkNumber = mongoObjectFinal.n == null ? 0 : mongoObjectFinal.n; this.data = new Binary(); if (typeof mongoObjectFinal.data === 'string') { var buffer = Buffer.alloc(mongoObjectFinal.data.length); buffer.write(mongoObjectFinal.data, 0, mongoObjectFinal.data.length, 'binary'); this.data = new Binary(buffer); } else if (Array.isArray(mongoObjectFinal.data)) { buffer = Buffer.alloc(mongoObjectFinal.data.length); var data = mongoObjectFinal.data.join(''); buffer.write(data, 0, data.length, 'binary'); this.data = new Binary(buffer); } else if (mongoObjectFinal.data && mongoObjectFinal.data._bsontype === 'Binary') { this.data = mongoObjectFinal.data; } else if (!Buffer.isBuffer(mongoObjectFinal.data) && !(mongoObjectFinal.data == null)) { throw Error('Illegal chunk format'); } // Update position this.internalPosition = 0; }
javascript
{ "resource": "" }
q21474
train
function(bson, ns, cmd, options, topology, topologyOptions) { CoreCursor.apply(this, Array.prototype.slice.call(arguments, 0)); var state = CommandCursor.INIT; var streamOptions = {}; // MaxTimeMS var maxTimeMS = null; // Get the promiseLibrary var promiseLibrary = options.promiseLibrary || Promise; // Set up Readable.call(this, { objectMode: true }); // Internal state this.s = { // MaxTimeMS maxTimeMS: maxTimeMS, // State state: state, // Stream options streamOptions: streamOptions, // BSON bson: bson, // Namespace ns: ns, // Command cmd: cmd, // Options options: options, // Topology topology: topology, // Topology Options topologyOptions: topologyOptions, // Promise library promiseLibrary: promiseLibrary, // Optional ClientSession session: options.session }; }
javascript
{ "resource": "" }
q21475
train
function(self, callback) { // Calcuate the length var mongoObject = { _id: self.fileId, filename: self.filename, contentType: self.contentType, length: self.position ? self.position : 0, chunkSize: self.chunkSize, uploadDate: self.uploadDate, aliases: self.aliases, metadata: self.metadata }; var md5Command = { filemd5: self.fileId, root: self.root }; self.db.command(md5Command, function(err, results) { if (err) return callback(err); mongoObject.md5 = results.md5; callback(null, mongoObject); }); }
javascript
{ "resource": "" }
q21476
train
function(self, chunkNumber, options, callback) { if (typeof options === 'function') { callback = options; options = {}; } options = options || self.writeConcern; options.readPreference = self.readPreference; // Get the nth chunk self .chunkCollection() .findOne({ files_id: self.fileId, n: chunkNumber }, options, function(err, chunk) { if (err) return callback(err); var finalChunk = chunk == null ? {} : chunk; callback(null, new Chunk(self, finalChunk, self.writeConcern)); }); }
javascript
{ "resource": "" }
q21477
train
function(self, options, callback) { if (typeof options === 'function') { callback = options; options = {}; } options = options || self.writeConcern; if (self.fileId != null) { self.chunkCollection().remove({ files_id: self.fileId }, options, function(err) { if (err) return callback(err, false); callback(null, true); }); } else { callback(null, true); } }
javascript
{ "resource": "" }
q21478
GridFSBucketReadStream
train
function GridFSBucketReadStream(chunks, files, readPreference, filter, options) { this.s = { bytesRead: 0, chunks: chunks, cursor: null, expected: 0, files: files, filter: filter, init: false, expectedEnd: 0, file: null, options: options, readPreference: readPreference }; stream.Readable.call(this); }
javascript
{ "resource": "" }
q21479
replSetGetStatus
train
function replSetGetStatus(admin, options, callback) { executeDbAdminCommand(admin.s.db, { replSetGetStatus: 1 }, options, callback); }
javascript
{ "resource": "" }
q21480
serverStatus
train
function serverStatus(admin, options, callback) { executeDbAdminCommand(admin.s.db, { serverStatus: 1 }, options, callback); }
javascript
{ "resource": "" }
q21481
validateCollection
train
function validateCollection(admin, collectionName, options, callback) { const command = { validate: collectionName }; const keys = Object.keys(options); // Decorate command with extra options for (let i = 0; i < keys.length; i++) { if (options.hasOwnProperty(keys[i]) && keys[i] !== 'session') { command[keys[i]] = options[keys[i]]; } } executeCommand(admin.s.db, command, options, (err, doc) => { if (err != null) return callback(err, null); if (doc.ok === 0) return callback(new Error('Error with validate command'), null); if (doc.result != null && doc.result.constructor !== String) return callback(new Error('Error with validation data'), null); if (doc.result != null && doc.result.match(/exception|corrupt/) != null) return callback(new Error('Error: invalid collection ' + collectionName), null); if (doc.valid != null && !doc.valid) return callback(new Error('Error: invalid collection ' + collectionName), null); return callback(null, doc); }); }
javascript
{ "resource": "" }
q21482
collectEvents
train
function collectEvents(mongoClient, topology) { let MongoClient = loadClient(); const collectedEvents = []; if (mongoClient instanceof MongoClient) { monitoringEvents.forEach(event => { topology.on(event, (object1, object2) => { if (event === 'open') { collectedEvents.push({ event: event, object1: mongoClient }); } else { collectedEvents.push({ event: event, object1: object1, object2: object2 }); } }); }); } return collectedEvents; }
javascript
{ "resource": "" }
q21483
replayEvents
train
function replayEvents(mongoClient, events) { for (let i = 0; i < events.length; i++) { mongoClient.emit(events[i].event, events[i].object1, events[i].object2); } }
javascript
{ "resource": "" }
q21484
MongoClient
train
function MongoClient(url, options) { if (!(this instanceof MongoClient)) return new MongoClient(url, options); // Set up event emitter EventEmitter.call(this); // The internal state this.s = { url: url, options: options || {}, promiseLibrary: null, dbCache: {}, sessions: [] }; // Get the promiseLibrary const promiseLibrary = this.s.options.promiseLibrary || Promise; // Add the promise to the internal state this.s.promiseLibrary = promiseLibrary; }
javascript
{ "resource": "" }
q21485
Db
train
function Db(databaseName, topology, options) { options = options || {}; if (!(this instanceof Db)) return new Db(databaseName, topology, options); EventEmitter.call(this); // Get the promiseLibrary const promiseLibrary = options.promiseLibrary || Promise; // Filter the options options = filterOptions(options, legalOptionNames); // Ensure we put the promiseLib in the options options.promiseLibrary = promiseLibrary; // Internal state of the db object this.s = { // Database name databaseName: databaseName, // DbCache dbCache: {}, // Children db's children: [], // Topology topology: topology, // Options options: options, // Logger instance logger: Logger('Db', options), // Get the bson parser bson: topology ? topology.bson : null, // Unpack read preference readPreference: options.readPreference, // Set buffermaxEntries bufferMaxEntries: typeof options.bufferMaxEntries === 'number' ? options.bufferMaxEntries : -1, // Parent db (if chained) parentDb: options.parentDb || null, // Set up the primary key factory or fallback to ObjectID pkFactory: options.pkFactory || ObjectID, // Get native parser nativeParser: options.nativeParser || options.native_parser, // Promise library promiseLibrary: promiseLibrary, // No listener noListener: typeof options.noListener === 'boolean' ? options.noListener : false, // ReadConcern readConcern: options.readConcern }; // Ensure we have a valid db name validateDatabaseName(this.s.databaseName); // Add a read Only property getSingleProperty(this, 'serverConfig', this.s.topology); getSingleProperty(this, 'bufferMaxEntries', this.s.bufferMaxEntries); getSingleProperty(this, 'databaseName', this.s.databaseName); // This is a child db, do not register any listeners if (options.parentDb) return; if (this.s.noListener) return; // Add listeners topology.on('error', createListener(this, 'error', this)); topology.on('timeout', createListener(this, 'timeout', this)); topology.on('close', createListener(this, 'close', this)); topology.on('parseError', createListener(this, 'parseError', this)); topology.once('open', createListener(this, 'open', this)); topology.once('fullsetup', createListener(this, 'fullsetup', this)); topology.once('all', createListener(this, 'all', this)); topology.on('reconnect', createListener(this, 'reconnect', this)); }
javascript
{ "resource": "" }
q21486
createItemString
train
function createItemString(data, limit) { let count = 0; let hasMore = false; if (Number.isSafeInteger(data.size)) { count = data.size; } else { // eslint-disable-next-line no-unused-vars for (const entry of data) { if (limit && count + 1 > limit) { hasMore = true; break; } count += 1; } } return `${hasMore ? '>' : ''}${count} ${count !== 1 ? 'entries' : 'entry'}`; }
javascript
{ "resource": "" }
q21487
computeWithTryCatch
train
function computeWithTryCatch(reducer, action, state) { let nextState = state; let nextError; try { nextState = reducer(state, action); } catch (err) { nextError = err.toString(); if (isChrome) { // In Chrome, rethrowing provides better source map support setTimeout(() => { throw err; }); } else { console.error(err); // eslint-disable-line no-console } } return { state: nextState, error: nextError }; }
javascript
{ "resource": "" }
q21488
computeNextEntry
train
function computeNextEntry(reducer, action, state, shouldCatchErrors) { if (!shouldCatchErrors) { return { state: reducer(state, action) }; } return computeWithTryCatch(reducer, action, state); }
javascript
{ "resource": "" }
q21489
recomputeStates
train
function recomputeStates( computedStates, minInvalidatedStateIndex, reducer, committedState, actionsById, stagedActionIds, skippedActionIds, shouldCatchErrors ) { // Optimization: exit early and return the same reference // if we know nothing could have changed. if ( !computedStates || minInvalidatedStateIndex === -1 || (minInvalidatedStateIndex >= computedStates.length && computedStates.length === stagedActionIds.length) ) { return computedStates; } const nextComputedStates = computedStates.slice(0, minInvalidatedStateIndex); for (let i = minInvalidatedStateIndex; i < stagedActionIds.length; i++) { const actionId = stagedActionIds[i]; const action = actionsById[actionId].action; const previousEntry = nextComputedStates[i - 1]; const previousState = previousEntry ? previousEntry.state : committedState; const shouldSkip = skippedActionIds.indexOf(actionId) > -1; let entry; if (shouldSkip) { entry = previousEntry; } else { if (shouldCatchErrors && previousEntry && previousEntry.error) { entry = { state: previousState, error: 'Interrupted by an error up the chain' }; } else { entry = computeNextEntry( reducer, action, previousState, shouldCatchErrors ); } } nextComputedStates.push(entry); } return nextComputedStates; }
javascript
{ "resource": "" }
q21490
AbstractMediaLoader
train
function AbstractMediaLoader(loadItem, preferXHR, type) { this.AbstractLoader_constructor(loadItem, preferXHR, type); // public properties this.resultFormatter = this._formatResult; // protected properties this._tagSrcAttribute = "src"; this.on("initialize", this._updateXHR, this); }
javascript
{ "resource": "" }
q21491
XHRRequest
train
function XHRRequest (item) { this.AbstractRequest_constructor(item); // protected properties /** * A reference to the XHR request used to load the content. * @property _request * @type {XMLHttpRequest | XDomainRequest | ActiveX.XMLHTTP} * @private */ this._request = null; /** * A manual load timeout that is used for browsers that do not support the onTimeout event on XHR (XHR level 1, * typically IE9). * @property _loadTimeout * @type {Number} * @private */ this._loadTimeout = null; /** * The browser's XHR (XMLHTTPRequest) version. Supported versions are 1 and 2. There is no official way to detect * the version, so we use capabilities to make a best guess. * @property _xhrLevel * @type {Number} * @default 1 * @private */ this._xhrLevel = 1; /** * The response of a loaded file. This is set because it is expensive to look up constantly. This property will be * null until the file is loaded. * @property _response * @type {mixed} * @private */ this._response = null; /** * The response of the loaded file before it is modified. In most cases, content is converted from raw text to * an HTML tag or a formatted object which is set to the <code>result</code> property, but the developer may still * want to access the raw content as it was loaded. * @property _rawResponse * @type {String|Object} * @private */ this._rawResponse = null; this._canceled = false; // Setup our event handlers now. this._handleLoadStartProxy = createjs.proxy(this._handleLoadStart, this); this._handleProgressProxy = createjs.proxy(this._handleProgress, this); this._handleAbortProxy = createjs.proxy(this._handleAbort, this); this._handleErrorProxy = createjs.proxy(this._handleError, this); this._handleTimeoutProxy = createjs.proxy(this._handleTimeout, this); this._handleLoadProxy = createjs.proxy(this._handleLoad, this); this._handleReadyStateChangeProxy = createjs.proxy(this._handleReadyStateChange, this); if (!this._createXHR(item)) { //TODO: Throw error? } }
javascript
{ "resource": "" }
q21492
LoadQueue
train
function LoadQueue (preferXHR, basePath, crossOrigin) { this.AbstractLoader_constructor(); /** * An array of the plugins registered using {{#crossLink "LoadQueue/installPlugin"}}{{/crossLink}}. * @property _plugins * @type {Array} * @private * @since 0.6.1 */ this._plugins = []; /** * An object hash of callbacks that are fired for each file type before the file is loaded, giving plugins the * ability to override properties of the load. Please see the {{#crossLink "LoadQueue/installPlugin"}}{{/crossLink}} * method for more information. * @property _typeCallbacks * @type {Object} * @private */ this._typeCallbacks = {}; /** * An object hash of callbacks that are fired for each file extension before the file is loaded, giving plugins the * ability to override properties of the load. Please see the {{#crossLink "LoadQueue/installPlugin"}}{{/crossLink}} * method for more information. * @property _extensionCallbacks * @type {null} * @private */ this._extensionCallbacks = {}; /** * The next preload queue to process when this one is complete. If an error is thrown in the current queue, and * {{#crossLink "LoadQueue/stopOnError:property"}}{{/crossLink}} is `true`, the next queue will not be processed. * @property next * @type {LoadQueue} * @default null */ this.next = null; /** * Ensure loaded scripts "complete" in the order they are specified. Loaded scripts are added to the document head * once they are loaded. Scripts loaded via tags will load one-at-a-time when this property is `true`, whereas * scripts loaded using XHR can load in any order, but will "finish" and be added to the document in the order * specified. * * Any items can be set to load in order by setting the {{#crossLink "maintainOrder:property"}}{{/crossLink}} * property on the load item, or by ensuring that only one connection can be open at a time using * {{#crossLink "LoadQueue/setMaxConnections"}}{{/crossLink}}. Note that when the `maintainScriptOrder` property * is set to `true`, scripts items are automatically set to `maintainOrder=true`, and changing the * `maintainScriptOrder` to `false` during a load will not change items already in a queue. * * <h4>Example</h4> * * var queue = new createjs.LoadQueue(); * queue.setMaxConnections(3); // Set a higher number to load multiple items at once * queue.maintainScriptOrder = true; // Ensure scripts are loaded in order * queue.loadManifest([ * "script1.js", * "script2.js", * "image.png", // Load any time * {src: "image2.png", maintainOrder: true} // Will wait for script2.js * "image3.png", * "script3.js" // Will wait for image2.png before loading (or completing when loading with XHR) * ]); * * @property maintainScriptOrder * @type {Boolean} * @default true */ this.maintainScriptOrder = true; /** * Determines if the LoadQueue will stop processing the current queue when an error is encountered. * @property stopOnError * @type {Boolean} * @default false */ this.stopOnError = false; /** * The number of maximum open connections that a loadQueue tries to maintain. Please see * {{#crossLink "LoadQueue/setMaxConnections"}}{{/crossLink}} for more information. * @property _maxConnections * @type {Number} * @default 1 * @private */ this._maxConnections = 1; /** * An internal list of all the default Loaders that are included with PreloadJS. Before an item is loaded, the * available loader list is iterated, in the order they are included, and as soon as a loader indicates it can * handle the content, it will be selected. The default loader, ({{#crossLink "TextLoader"}}{{/crossLink}} is * last in the list, so it will be used if no other match is found. Typically, loaders will match based on the * {{#crossLink "LoadItem/type"}}{{/crossLink}}, which is automatically determined using the file extension of * the {{#crossLink "LoadItem/src:property"}}{{/crossLink}}. * * Loaders can be removed from PreloadJS by simply not including them. * * Custom loaders installed using {{#crossLink "registerLoader"}}{{/crossLink}} will be prepended to this list * so that they are checked first. * @property _availableLoaders * @type {Array} * @private * @since 0.6.0 */ this._availableLoaders = [ createjs.FontLoader, createjs.ImageLoader, createjs.JavaScriptLoader, createjs.CSSLoader, createjs.JSONLoader, createjs.JSONPLoader, createjs.SoundLoader, createjs.ManifestLoader, createjs.SpriteSheetLoader, createjs.XMLLoader, createjs.SVGLoader, createjs.BinaryLoader, createjs.VideoLoader, createjs.TextLoader ]; /** * The number of built in loaders, so they can't be removed by {{#crossLink "unregisterLoader"}}{{/crossLink}. * @property _defaultLoaderLength * @type {Number} * @private * @since 0.6.0 */ this._defaultLoaderLength = this._availableLoaders.length; this.init(preferXHR, basePath, crossOrigin); }
javascript
{ "resource": "" }
q21493
BinaryLoader
train
function BinaryLoader(loadItem) { this.AbstractLoader_constructor(loadItem, true, createjs.Types.BINARY); this.on("initialize", this._updateXHR, this); }
javascript
{ "resource": "" }
q21494
train
function(escapable) { var i; var unrolled = {}; var c = []; for (i = 0; i < 65536; i++) { c.push( String.fromCharCode(i) ); } escapable.lastIndex = 0; c.join('').replace(escapable, function(a) { unrolled[ a ] = '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); return ''; }); escapable.lastIndex = 0; return unrolled; }
javascript
{ "resource": "" }
q21495
mount
train
function mount(glide, extensions, events) { var components = {}; for (var name in extensions) { if (isFunction(extensions[name])) { components[name] = extensions[name](glide, components, events); } else { warn('Extension must be a function'); } } for (var _name in components) { if (isFunction(components[_name].mount)) { components[_name].mount(); } } return components; }
javascript
{ "resource": "" }
q21496
sortKeys
train
function sortKeys(obj) { return Object.keys(obj).sort().reduce(function (r, k) { r[k] = obj[k]; return r[k], r; }, {}); }
javascript
{ "resource": "" }
q21497
mergeOptions
train
function mergeOptions(defaults, settings) { var options = _extends({}, defaults, settings); // `Object.assign` do not deeply merge objects, so we // have to do it manually for every nested object // in options. Although it does not look smart, // it's smaller and faster than some fancy // merging deep-merge algorithm script. if (settings.hasOwnProperty('classes')) { options.classes = _extends({}, defaults.classes, settings.classes); if (settings.classes.hasOwnProperty('direction')) { options.classes.direction = _extends({}, defaults.classes.direction, settings.classes.direction); } } if (settings.hasOwnProperty('breakpoints')) { options.breakpoints = _extends({}, defaults.breakpoints, settings.breakpoints); } return options; }
javascript
{ "resource": "" }
q21498
EventsBus
train
function EventsBus() { var events = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; classCallCheck(this, EventsBus); this.events = events; this.hop = events.hasOwnProperty; }
javascript
{ "resource": "" }
q21499
Glide
train
function Glide(selector) { var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; classCallCheck(this, Glide); this._c = {}; this._t = []; this._e = new EventsBus(); this.disabled = false; this.selector = selector; this.settings = mergeOptions(defaults, options); this.index = this.settings.startAt; }
javascript
{ "resource": "" }