_id
stringlengths
2
6
title
stringlengths
0
58
partition
stringclasses
3 values
text
stringlengths
52
373k
language
stringclasses
1 value
meta_information
dict
q60300
validation
function (next) { var options = { type: ctx.type, count: ctx.count, now: ctx.now, nowISOString: ctx.nowISOString, params: { pk: Model.getIdName(), id: ctx.params.id, relation: ctx.params.relation, custom: ctx.params.custom } }; if (ctx.type === 'model') { options.params.where = ctx.params.where; options.params.range = ctx.params.range; } new _queryBuilder2.default(options).onComplete(next).build(); }
javascript
{ "resource": "" }
q60301
validation
function (query, next) { switch (ctx.type) { case 'model': Model.find(query, next); break; case 'relation': Model.findOne(query, function (err, instance) { if (err) return next(err); var builder = new _queryBuilder2.default({ type: ctx.type, count: ctx.count, now: ctx.now, nowISOString: ctx.nowISOString, params: { range: ctx.params.range, where: ctx.params.where, custom: ctx.params.custom } }); builder.onComplete(function (_err, _query) { if (_err) return next(_err); instance[ctx.relation || ctx.params.relation](_query, next); }); builder.build(); }); break; case 'nested': Model.findOne(query, function (err, instance) { return next(err, instance[ctx.nested]); }); break; default: next(null, []); } }
javascript
{ "resource": "" }
q60302
validation
function(sourceFileName, code) { // If we're processing a block of code, // let's try to find a class name in the block to specify the sourceFileName var re = /class\s?([^\s]+)(\s?{|\s.*)|\s([^\s]+)\s?=\s?class/; var m = re.exec(code); return (sourceFileName || "") + (m && m[1] || nonce()); }
javascript
{ "resource": "" }
q60303
validation
function(options, code) { var rst = JSON.parse(JSON.stringify(options)); // Let's clone options // In the case were source maps are activated, we need to make some verifications if (rst.sourceMaps) { // 1. If we're processing a block of code, we need to ensure that the block will have a specific source name if (code) { rst.sourceFileName = processSourceFileName(rst.sourceFileName, code); } // 2. If the sourceMap options equals to "inline" or "both", we need to set it to true before processing babel transform // otherwize, the sourceMapUrl will be added by babel to the generated code of each file / block of code. // We will take care to handle the "inline" or "both" specification when the concatenated code has been fully generated if (rst.sourceMaps === "inline" || rst.sourceMaps === "both") { rst.sourceMaps = true; } } return rst; }
javascript
{ "resource": "" }
q60304
get
validation
function get(self, steamObj, key) { steamObj = steamObj || {}; if (steamObj[key] !== undefined) { return steamObj[key]; } else if (self[key] !== undefined) { return self[key]; } else if (Steam[key] !== undefined) { return Steam[key]; } else { throw new Error("Missing required field: "+key); } }
javascript
{ "resource": "" }
q60305
request
validation
function request(self, options, callback) { var _http = get(self, options, 'secure')? https : http; if (Steam.devMode) console.log(options); var req = _http.request(options, function(res) { var data, dataStr = ''; res.on('data', function (chunk) { dataStr += chunk; }); res.on('end', function(){ var statusCode = res.statusCode; if (statusCode !== 200) { if (statusCode === 401) { return callback(new Error('Invalid API Key')); } else { return callback(new Error("HTTP "+statusCode+" "+http.STATUS_CODES[statusCode])); } } // Ensure it is complete and valid JSON try { data = JSON.parse(dataStr); } catch (e) { return callback(new Error('Unable to parse JSON data')); } // Trim or simplify data object if it's entirely wrapped in data.response or data.result if ((data.response instanceof Object) && (Object.keys(data).length === 1)) { data = data.response; } if ((data.result instanceof Object) && Object.keys(data).length === 1) { data = data.result; } callback(null, data); }) }); req.end(options.data); req.on('error', function(err) { callback(err); }); }
javascript
{ "resource": "" }
q60306
getParams
validation
function getParams(self, steamObj, requiredParams, optionalParams) { // Required params will throw exception if they don't exist var paramObj = {}; for (var i = 0, len = requiredParams.length; i < len; i++) { var paramName = requiredParams[i]; // Support array arguments paramName = paramName.replace("[0]",""); paramObj[paramName] = get(self, steamObj, paramName); } // Ignore the thrown exception on optionalParams if field isn't given for (var i = 0, len = optionalParams.length; i < len; i++) { var paramName = optionalParams[i]; // Support array arguments paramName = paramName.replace("[0]",""); try { paramObj[paramName] = get(self, steamObj, paramName); } catch(e) { } } return paramObj; }
javascript
{ "resource": "" }
q60307
addInterfaceMethod
validation
function addInterfaceMethod(interfaceName, funcName, fN) { // Store a reference to every interface/method if (Steam.INTERFACES[interfaceName] === undefined) { Steam.INTERFACES[interfaceName] = {}; } Steam.INTERFACES[interfaceName][funcName] = fN; // Camel case the method name var name = funcName.substr(0,1).toLowerCase() + funcName.substr(1); // Add method to Steam's prototype if (!isMultiGameInterface(interfaceName)) { steam[name] = fN; } // If multiple interfaces use the same method name // Create a new method that requires a gameid property to find the correct method // and call the steam method automatically else { // We only need to do this once if (steam[name] !== undefined) return; // e.g. Turns 'IEconItems_440' into 'IEconItems' var multi_interface_name = interfaceName.split('_')[0]; // Add method to Steam's prototype steam[name] = function(steamObj, callback) { var gameid = get(this, steamObj, 'gameid'); var interface_name = multi_interface_name + '_' + gameid; Steam.INTERFACES[interface_name][funcName].call(this, steamObj, callback); }; } }
javascript
{ "resource": "" }
q60308
buildSteamWrapperMethod
validation
function buildSteamWrapperMethod(interfaceName, funcName, defaultVersion, httpMethod, requiredParams, optionalParams) { // Always include the key and language fields, if available // GetSupportedAPIList doesn't always list them. optionalParams.push('key'); optionalParams.push('language'); // Require gameid for methods with the same name in the different interfaces if (isMultiGameInterface(interfaceName)) { requiredParams.push('gameid'); } var wrapperMethod = function(steamObj, callback) { var params = getParams(this, steamObj, requiredParams, optionalParams); var version = steamObj.version || defaultVersion; this.request(interfaceName, funcName, version, httpMethod, params, callback); }; addInterfaceMethod(interfaceName, funcName, wrapperMethod); }
javascript
{ "resource": "" }
q60309
retrieveSteamAPIMethods
validation
function retrieveSteamAPIMethods(key, callback) { var _steam = new Steam(); _steam.getSupportedAPIList({key:key}, function(err, data) { if (err) return callback(err); var apiList = data.apilist; if (apiList === undefined) return callback(new Error('No data returned')); apiList = apiList.interfaces; // List of interfaces for (var i= 0; i<apiList.length; i++) { var _interface = apiList[i]; var methods = _interface.methods; // List of methods for (var j= 0; j<methods.length; j++) { var method = methods[j]; var optionalParams = [], requiredParams = []; var params = method.parameters; //List of parameters for (var k=0; k<params.length; k++) { var param = params[k]; if (param.optional) { optionalParams.push(param.name); } else { requiredParams.push(param.name); } } buildSteamWrapperMethod(_interface.name, method.name, method.version, method.httpmethod, requiredParams, optionalParams); } } callback(); }); }
javascript
{ "resource": "" }
q60310
filterElement
validation
function filterElement(flJson){ var returnJson = {"photo":{}}; returnJson.photo.title = flJson.photo.title; returnJson.photo.description = flJson.photo.description; returnJson.photo.farm = flJson.photo.farm; returnJson.photo.id = flJson.photo.id; returnJson.photo.media = flJson.photo.media; returnJson.photo.secret = flJson.photo.secret; returnJson.photo.server = flJson.photo.server; returnJson.photo.urls = flJson.photo.urls; return returnJson; }
javascript
{ "resource": "" }
q60311
getFlickrCacheJson
validation
function getFlickrCacheJson(photoId){ if(!enabledCache)return null; var d = new Date(); for(var i = 0; i < cacheJson.length; i++){ if(cacheJson[i].fl.photo.id == photoId){ if( cacheJson[i].expires > d.getTime() ){ return cacheJson[i].fl; } break; } } return null; }
javascript
{ "resource": "" }
q60312
getImageSize
validation
function getImageSize( flickrJson, photo_id, photo_size ){ var sizeInfo = {"width":0, "height":0}; var sizeTable = { "s": "Square", "q": "Large Square", "t": "Thumbnail", "m": "Small", "n": "Small 320", "-" : "Medium", "z": "Medium 640", "c": "Medium 800", "b": "Large", "o": "Original" } if(flickrJson && flickrJson.sizes.size){ for(var i=0; i<flickrJson.sizes.size.length; i++){ if(flickrJson.sizes.size[i].label == sizeTable[photo_size]){ sizeInfo.width = flickrJson.sizes.size[i].width; sizeInfo.height = flickrJson.sizes.size[i].height; } } } return sizeInfo; }
javascript
{ "resource": "" }
q60313
hasClass
validation
function hasClass(el, className) { var test; if (el.classList) { test = el.classList.contains(className); } else { var className = new RegExp('(^| )' + className + '( |$)', 'gi'); test = new RegExp(className).test(el.className); } return test; }
javascript
{ "resource": "" }
q60314
LeipzigEvent
validation
function LeipzigEvent(name, data) { var leipzigEvent = undefined; if (window.CustomEvent) { leipzigEvent = new CustomEvent(name, { detail: data, bubbles: true, cancelable: true }); } else { // For Internet Explorer & PhantomJS leipzigEvent = document.createEvent('CustomEvent'); leipzigEvent.initCustomEvent(name, true, true, data); } return leipzigEvent; }
javascript
{ "resource": "" }
q60315
triggerEvent
validation
function triggerEvent(el, name, data) { var e = new LeipzigEvent(name, data); el.dispatchEvent(e); }
javascript
{ "resource": "" }
q60316
processGloss
validation
function processGloss(gloss, callback) { if (!(gloss instanceof Element)) { var err = new Error('Invalid gloss element'); if (typeof callback === 'function') { callback(err); } else { throw err; } } var lines = Array.prototype.slice.call(gloss.children); var linesToAlign = []; var firstRawLine = null; var firstRawLineNum = 0; triggerEvent(gloss, events.beforeGloss); if (firstLineOrig) { var firstLine = lines[0]; addClass(firstLine, classes.original); } if (lastLineFree) { var lastLine = lines[lines.length - 1]; addClass(lastLine, classes.freeTranslation); } // process each line in the gloss lines.forEach(function (line, lineNum) { // don't align lines that are free translations or original, // unformatted lines var isOrig = hasClass(line, classes.original); var isFree = hasClass(line, classes.freeTranslation); var shouldSkip = hasClass(line, classes.noAlign); var shouldAlign = !isOrig && !isFree && !shouldSkip; if (shouldAlign) { triggerEvent(line, events.beforeLex, { lineNum: lineNum }); var tokens = _this2.lex(line.innerHTML); triggerEvent(line, events.afterLex, { tokens: tokens, lineNum: lineNum }); linesToAlign.push(tokens); addClass(line, classes.hidden); // if this is the first aligned line, mark the location // so that the final aligned glosses can be inserted here if (!firstRawLine) { firstRawLine = line; firstRawLineNum = lineNum; } } else { addClass(line, classes.line); addClass(line, classes.lineNum + lineNum); } }); var lastRawLineNum = firstRawLineNum + (linesToAlign.length - 1); triggerEvent(gloss, events.beforeAlign, { lines: linesToAlign, firstLineNum: firstRawLineNum, lastLineNum: lastRawLineNum }); var alignedLines = _this2.align(linesToAlign); triggerEvent(gloss, events.afterAlign, { lines: alignedLines, firstLineNum: firstRawLineNum, lastLineNum: lastRawLineNum }); // determine which type of element the aligned glosses should be wrapped in var alignedWrapper = undefined; if (gloss.tagName === 'UL' || gloss.tagName === 'OL') { alignedWrapper = 'li'; } else { alignedWrapper = 'div'; } triggerEvent(gloss, events.beforeFormat, { lines: alignedLines, firstLineNum: firstRawLineNum, lastLineNum: lastRawLineNum }); var formattedLines = _this2.format(alignedLines, alignedWrapper, firstRawLineNum); gloss.insertBefore(formattedLines, firstRawLine); triggerEvent(formattedLines, events.afterFormat, { firstLineNum: firstRawLineNum, lastLineNum: lastRawLineNum }); // finish up by adding relevant classes to the main container if (!spacing) { addClass(gloss, classes.noSpace); } addClass(gloss, classes.glossed); triggerEvent(gloss, events.afterGloss); }
javascript
{ "resource": "" }
q60317
UrlHelper
validation
function UrlHelper(params){ this.url = "http://store.steampowered.com/search/?"; if(!!params.tags){ for(i = 0; i < params.tags.length; i++) params.tags[i] = parameters.tags.get(params.tags[i]).value this.url += "tags=" + params.tags.join(",") + "&"; } if(!!params.os){ for(i = 0; i < params.os.length; i++) params.os[i] = parameters.os.get(params.os[i]).value this.url += "os=" + params.os.join(",") + "&"; } if(!!params.playerAmount) for(i = 0; i < params.playerAmount.length; i++) params.playerAmount[i] = parameters.playerAmount.get(params.playerAmount[i]).value this.url += "category3=" + params.playerAmount.join(",") + "&"; if(params.gamesOnly) this.url += "category1=998&"; }
javascript
{ "resource": "" }
q60318
serializeState
validation
function serializeState(){ var data = model.data, scale = model.scale, translate = model.translate; model.state = { nodes: data.nodes.map(function(node){ return { type: node.type, property: node.property, fixed: node.fixed, // Keep size of JSON small, so it fits in a URL. x: Math.round(node.x), y: Math.round(node.y) }; }), links: data.links.map(function(link){ // Replaced link object references with indices for serialization. return { source: link.source.index, target: link.target.index }; }), scale: scale, translate: translate }; }
javascript
{ "resource": "" }
q60319
track
validation
function track(property){ if(!(property in trackedProperties)){ trackedProperties[property] = true; values[property] = model[property]; Object.defineProperty(model, property, { get: function () { return values[property]; }, set: function(value) { values[property] = value; getListeners(property).forEach(function(callback){ callback(value); }); } }); } }
javascript
{ "resource": "" }
q60320
randomDate
validation
function randomDate(start, end) { return new Date(start.getTime() + Math.random() * (end.getTime() - start.getTime())); }
javascript
{ "resource": "" }
q60321
randomPrice
validation
function randomPrice(oldPrice, volatility) { const rnd = randomNumber(); let changePercent = 2 * volatility * rnd; if (changePercent > volatility) { changePercent -= (2 * volatility); } const changeAmount = oldPrice * changePercent; const newPrice = oldPrice + changeAmount; return newPrice; }
javascript
{ "resource": "" }
q60322
randomIncident
validation
function randomIncident(types, start, end) { const x1 = randomDate(start, end); const x2 = new Date(x1); x2.setDate(x1.getDate() + (Math.floor(Math.random() * 45))); Incidents.insert({ x1: x1.getTime(), x2: x2.getTime(), y1: (Math.floor(Math.random() * 99)), type: types[(Math.floor(Math.random() * 2))], }); }
javascript
{ "resource": "" }
q60323
getDatetimeUnit
validation
function getDatetimeUnit(min, max) { const diff = max.diff(min, 'days'); let unit = 'month'; if (diff <= 14) { unit = 'day'; } else if (diff > 14 && diff <= 183) { unit = 'week'; } return unit; }
javascript
{ "resource": "" }
q60324
getRenderedTemplate
validation
function getRenderedTemplate(template, data, variableRegex) { return template.replace(variableRegex, (match, captured) => { const replacement = valueForProperty(data, captured.trim()); // If a template variable is found but nothing is supplied to fill it, remove it if (replacement == null) { return ''; } // If the replacement is a function, replace the variable with the result of the function if (typeof replacement === 'function') { return replacement(); } // otherwise replace the template variable with the associated data return replacement; }); }
javascript
{ "resource": "" }
q60325
getProcessedPath
validation
function getProcessedPath(file, data) { return file.replace(/__([A-Za-z0-9-]+_?[A-Za-z0-9-]+)+__/g, (match) => { const dataKey = match.substring(2, match.length - 2); const dataVal = data[dataKey]; if (dataVal) { return dataVal; } return match; }); }
javascript
{ "resource": "" }
q60326
scaffold
validation
function scaffold( { source = '', destination = 'destination', onlyFiles = false, exclude = [], variableRegex = /\{\{\s?([A-Za-z0-9-]+_?[A-Za-z0-9-]+)+\s?\}\}/g, } = {}, data = {}, ) { const cwd = process.cwd(); const thisSource = pathIsAbsolute(source) ? source : path.join(cwd, source); const thisDestination = pathIsAbsolute(destination) ? destination : path.join(cwd, destination); const thisOnlyFiles = onlyFiles; const thisExclude = exclude; const thisVariableRegex = variableRegex; try { const listOfFiles = fs.readdirSync(thisSource); const destinationExists = fs.existsSync(thisDestination); if (!destinationExists) { fs.mkdirSync(thisDestination); } listOfFiles.forEach((file) => { const thisFile = path.join(thisSource, file); const stat = fs.statSync(thisFile); const destinationFilename = getProcessedPath(file, data); const destinationPath = path.join(thisDestination, destinationFilename); if (stat && stat.isDirectory() && !thisOnlyFiles && !thisExclude.includes(file)) { scaffold({ source: thisFile, destination: destinationPath, onlyFiles: thisOnlyFiles, exclude: thisExclude, variableRegex: thisVariableRegex, }, data); } else if (!stat.isDirectory()) { const template = fs.readFileSync(thisFile, 'utf-8'); const filledTemplate = getRenderedTemplate(template, data, thisVariableRegex); fs.writeFileSync(destinationPath, filledTemplate); } }); } catch (e) { throw new Error(e); } }
javascript
{ "resource": "" }
q60327
validation
function (lookup) { var matches = tmplRegExp.exec(lookup); if (matches) { return { src: matches && matches[0], prop: matches && matches[1] || matches[2], }; } else { return false; } }
javascript
{ "resource": "" }
q60328
validation
function (data, lookup, options) { var property; // as long as this contains a template string, keep traversing while(property = findProperty(lookup)) { // if this doesn't solely contain a template lookup (e.g. '<%= key %>'), then // recursively process it as a template to handle interpolated strings (e.g. 'hi <%= key %>). if (property.src !== lookup) { lookup = _.template(lookup, data, options); } else { // expand to the literal value of this key lookup = expander.process(data, getobject.get(data, property.prop), options); } } // do one final check for templates. if (hasTemplate(lookup)) { lookup = _.template(lookup, data, options); } return lookup; }
javascript
{ "resource": "" }
q60329
validation
function (data, arr, options) { return arr.map(function(lookup) { return expander.process(data, lookup, options); }); }
javascript
{ "resource": "" }
q60330
validation
function (data, obj, options) { var result = {}; Object.keys(obj).forEach(function(key) { result[key] = expander.process(data, obj[key], options); }); return result; }
javascript
{ "resource": "" }
q60331
_set
validation
function _set(obj, keypath, value) { var parts = _keyPathNormalize(keypath).split('.') var last = parts.pop() var dest = obj var hasError var errorInfo util.some(parts, function(key) { var t = util.type(dest) if (t != 'object' && t != 'array') { hasError = true errorInfo = [key, dest] return true } dest = dest[key] }) // set value if (!hasError) { if (util.type(dest) != 'object' && util.type(dest) != 'array') { hasError = true errorInfo = [last, dest] } else { dest[last] = value return obj } } throw new Error('Can\' not access "' + errorInfo[0] + '" of "'+ errorInfo[1] + '" when set value of "' + keypath + '"') }
javascript
{ "resource": "" }
q60332
validation
function(text) { if (!text || !text.replace) return text return text.replace(/(&[#a-zA-Z0-9]+;)/g, function (m, s) { return _convertEntity(s) }) }
javascript
{ "resource": "" }
q60333
compute
validation
function compute () { // set value util.forEach(expressions, function(exp, index) { var v = that.$exec(exp) if (!v[0]) caches[index] = v[1] }) // get content var str = '' util.forEach(parts, function(item, index) { str += emptyStr(item) if (index < expressions.length) { str += emptyStr(caches[index]) } }) return Expression.unveil(str) }
javascript
{ "resource": "" }
q60334
patchFileObjects
validation
function patchFileObjects( formData ) { // There are several landmines to avoid when making file uploads work on all browsers: // - the `new File()` constructor trick breaks file uploads on Safari 10 in a way that's // impossible to detect: it will send empty files in the multipart/form-data body. // Therefore we need to detect Chrome. // - IE11 and Edge don't support the `new File()` constructor at all. It will throw exception, // so it's detectable by the `supportsFileConstructor` code. // - `window.chrome` exists also on Edge (!), `window.chrome.webstore` is only in Chrome and // not in other Chromium based browsers (which have the site isolation bug, too). if ( ! window.chrome || ! supportsFileConstructor ) { return; } for ( let i = 0; i < formData.length; i++ ) { const val = getFileValue( formData[ i ][ 1 ] ); if ( val ) { formData[ i ][ 1 ] = new File( [ val ], val.name, { type: val.type } ); } } }
javascript
{ "resource": "" }
q60335
onmessage
validation
function onmessage( e ) { debug( 'onmessage' ); // safeguard... if ( e.origin !== proxyOrigin ) { debug( 'ignoring message... %o !== %o', e.origin, proxyOrigin ); return; } let { data } = e; if ( ! data ) { return debug( 'no `data`, bailing' ); } // Once the iframe is loaded, we can start using it. if ( data === 'ready' ) { onload(); return; } if ( postStrings && 'string' === typeof data ) { data = JSON.parse( data ); } // check if we're receiving a "progress" event if ( data.upload || data.download ) { return onprogress( data ); } if ( ! data.length ) { return debug( '`e.data` doesn\'t appear to be an Array, bailing...' ); } // first get the `xhr` instance that we're interested in const id = data[ data.length - 1 ]; if ( ! ( id in requests ) ) { return debug( 'bailing, no matching request with callback: %o', id ); } const xhr = requests[ id ]; // Build `error` and `body` object from the `data` object const { params } = xhr; let body, statusCode, headers; // apiNamespace (WP-API) const { apiNamespace } = params; body = data[ 0 ]; statusCode = data[ 1 ]; headers = data[ 2 ]; if ( statusCode === 207 ) { // 207 is a signal from rest-proxy. It means, "this isn't the final // response to the query." The proxy supports WebSocket connections // by invoking the original success callback for each message received. } else { // this is the final response to this query delete requests[ id ]; } if ( ! params.metaAPI ) { debug( 'got %o status code for URL: %o', statusCode, params.path ); } else { statusCode = body === 'metaAPIupdated' ? 200 : 500; } // add statusCode into headers object if ( typeof headers === 'object' ) { headers.status = statusCode; } if ( statusCode && 2 === Math.floor( statusCode / 100 ) ) { // 2xx status code, success resolve( xhr, body, headers ); } else { // any other status code is a failure const wpe = WPError( params, statusCode, body ); reject( xhr, wpe, headers ); } }
javascript
{ "resource": "" }
q60336
onprogress
validation
function onprogress( data ) { debug( 'got "progress" event: %o', data ); const xhr = requests[ data.callbackId ]; if ( xhr ) { const prog = new ProgressEvent( 'progress', data ); const target = data.upload ? xhr.upload : xhr; target.dispatchEvent( prog ); } }
javascript
{ "resource": "" }
q60337
resolve
validation
function resolve( xhr, body, headers ) { const e = new ProgressEvent( 'load' ); e.data = e.body = e.response = body; e.headers = headers; xhr.dispatchEvent( e ); }
javascript
{ "resource": "" }
q60338
reject
validation
function reject( xhr, err, headers ) { const e = new ProgressEvent( 'error' ); e.error = e.err = err; e.headers = headers; xhr.dispatchEvent( e ); }
javascript
{ "resource": "" }
q60339
instanceScopedDirective
validation
function instanceScopedDirective(tar, dec, dname) { // don't compile child scope if (scopedChilds && scopedChilds.length && util.some(scopedChilds, function (item) { return tar == item })) return var drefs = tar._diretives || [] // prevent repetitive binding if (drefs && ~util.indexOf(drefs, dname)) return var def = _diretives[dname] var expr = _getAttribute(tar, dec) || '' drefs.push(dec) tar._diretives = drefs _removeAttribute(tar, dec) var d = new Directive(vm, tar, def, dec, expr, scope) $directives.push(d) }
javascript
{ "resource": "" }
q60340
_safelyCall
validation
function _safelyCall(isCatch, fn, ctx) { if (!fn) return if (isCatch) { try { fn.call(ctx) } catch(e) { consoler.errorTrace(e) } } else { fn.call(ctx) } }
javascript
{ "resource": "" }
q60341
loadMiddlewareFilters
validation
function loadMiddlewareFilters(filters) { var result = [], index, num = filters.length, filter, modulePath, middleware; function startsWith(string, searchString, position) { position = position || 0; return string.indexOf(searchString, position) === position; } for (index = 0; index < num; index++ ) { filter = filters[index]; // Check filter has required properties if (!filter.id) { throw new InvalidConfiguration(InvalidConfiguration.INVALID_FILTER_MESSAGE + " Filter at index " + index + " has no 'id' property."); } if (!filter.path) { throw new InvalidConfiguration(InvalidConfiguration.INVALID_FILTER_MESSAGE + " Filter at index " + index + " has no 'path' property."); } // Determine module path depending on if it must be loaded from 'filters' folder // or node modules. modulePath = filter.path; if (startsWith(filter.path, ".")) { modulePath = path.join(basePath, filter.path); } // Load filter module and initialize middleware filter middleware = require(modulePath).init(filter.id, filter.config); result.push({ id: filter.id, path: modulePath, config: filter.config, middleware: middleware }); } return result; }
javascript
{ "resource": "" }
q60342
loadResources
validation
function loadResources(resources, providerIndex) { var result = [], index, num = resources.length, resource, prefilters, postfilters; for (index = 0; index < num; index++ ) { resource = resources[index]; prefilters = []; postfilters = []; // Check resource has required properties if (!resource.id) { throw new InvalidConfiguration( InvalidConfiguration.INVALID_RESOURCE_MESSAGE + " Resource at index " + index + " on" + " provider at index " + providerIndex + " has no 'id' property." ); } if (!resource.context) { throw new InvalidConfiguration( InvalidConfiguration.INVALID_RESOURCE_MESSAGE + " Resource at index " + index + " on" + " provider at index " + providerIndex + " has no 'context' property." ); } // Load resource's filters if (resource.prefilters) { prefilters = loadMiddlewareFilters(resource.prefilters); } if (resource.postfilters) { postfilters = loadMiddlewareFilters(resource.postfilters); } // Store resource information result.push({ id: resource.id, context: resource.context, prefilters: prefilters, postfilters: postfilters }); } return result; }
javascript
{ "resource": "" }
q60343
loadProviders
validation
function loadProviders(providers) { var result = [], index, num = providers.length, provider, prefilters, postfilters, resources; for (index = 0; index < num; index++ ) { provider = providers[index]; prefilters = []; postfilters = []; resources = []; // Check provider has required properties if (!provider.id) { throw new InvalidConfiguration(InvalidConfiguration.INVALID_PROVIDER_MESSAGE + " Provider at index " + index + " has no 'id' property."); } if (!provider.target) { throw new InvalidConfiguration(InvalidConfiguration.INVALID_PROVIDER_MESSAGE + " Provider at index " + index + " has no 'target' property."); } if (!provider.context) { throw new InvalidConfiguration(InvalidConfiguration.INVALID_PROVIDER_MESSAGE + " Provider at index " + index + " has no context' property."); } // Load provider's filters if (provider.prefilters) { prefilters = loadMiddlewareFilters(provider.prefilters); } if (provider.postfilters) { postfilters = loadMiddlewareFilters(provider.postfilters); } // Load provider's resources if (provider.resources) { resources = loadResources(provider.resources, index); } // Store provider information result.push({ id: provider.id, context: provider.context, target: provider.target, prefilters: prefilters, postfilters: postfilters, resources: resources }); } return result; }
javascript
{ "resource": "" }
q60344
load
validation
function load(config) { var configuration = { prefilters: [], providers: [], postfilters: [] }; // Check a valid object if (typeof config !== "object") { throw new InvalidConfiguration(InvalidConfiguration.EMPTY_MESSAGE); } // Check at least one provider if (!config.providers || !config.providers.length) { throw new InvalidConfiguration(InvalidConfiguration.NO_PROVIDER_MESSAGE); } // Load prefilters if (config.prefilters) { configuration.prefilters = loadMiddlewareFilters(config.prefilters); } // Load postfilters if (config.postfilters) { configuration.postfilters = loadMiddlewareFilters(config.postfilters); } // Load providers configuration.providers = loadProviders(config.providers); return configuration; }
javascript
{ "resource": "" }
q60345
Currencies
validation
function Currencies (currencies) { if (!currencies) { throw new Error('Missing currencies object'); } this.keys = parseInt(currencies.keys || 0); this.metal = parseFloat(currencies.metal || 0); if (isNaN(this.keys) || isNaN(this.metal)) { throw new Error('Not a valid currencies object'); } this.metal = toRefined(toScrap(this.metal)); }
javascript
{ "resource": "" }
q60346
getOptions
validation
function getOptions() { // // Defines and parses the command line interface to start Clyde server. // var argv = yargs .usage("Usage: $0 [options] config_file") .example("$0 config.json", "Start clyde reading configuration from 'config.json' file.") .example("$0 --log debug config.json", "Start clyde with log messages on 'debug' level and reading configuration from 'config.json' file.") .describe("logfile", "Path to the log file. Default 'clyde.log'.") .nargs("logfile", 1) .describe("loglevel", "Level used for clyde log messages. Default 'info'.") .nargs("loglevel", 1) .describe("port", "Port where clyde will listen. Default 8080.") .nargs("port", 1) .help("help") .demand(1, "A configuration file must be specified") .showHelpOnFail(false, "Specify --help for available options") .argv; // // Load configuration file // var options = require(path.join(process.cwd(), argv._[0])); // // Override options with command line specified. Command line takes precedence. // options.logfile = argv.logfile || options.logfile || "clyde.log"; options.loglevel = argv.loglevel || options.loglevel || "info"; options.port = argv.port || options.port || 8000; return options; }
javascript
{ "resource": "" }
q60347
getWindowForElement
validation
function getWindowForElement(element) { var doc = element.ownerDocument || element; return doc.defaultView || doc.parentWindow || window; }
javascript
{ "resource": "" }
q60348
createInputInstance
validation
function createInputInstance(manager) { var Type; var inputClass = manager.options.inputClass; if (inputClass) { Type = inputClass; } else if (SUPPORT_POINTER_EVENTS) { Type = PointerEventInput; } else if (SUPPORT_ONLY_TOUCH) { Type = TouchInput; } else if (!SUPPORT_TOUCH) { Type = MouseInput; } else { Type = TouchMouseInput; } return new Type(manager, inputHandler); }
javascript
{ "resource": "" }
q60349
inputHandler
validation
function inputHandler(manager, eventType, input) { var pointersLen = input.pointers.length; var changedPointersLen = input.changedPointers.length; var isFirst = eventType & INPUT_START && pointersLen - changedPointersLen === 0; var isFinal = eventType & (INPUT_END | INPUT_CANCEL) && pointersLen - changedPointersLen === 0; input.isFirst = !!isFirst; input.isFinal = !!isFinal; if (isFirst) { manager.session = {}; } // source event is the normalized value of the domEvents // like 'touchstart, mouseup, pointerdown' input.eventType = eventType; // compute scale, rotation etc computeInputData(manager, input); // emit secret event manager.emit('hammer.input', input); manager.recognize(input); manager.session.prevInput = input; }
javascript
{ "resource": "" }
q60350
computeInputData
validation
function computeInputData(manager, input) { var session = manager.session; var pointers = input.pointers; var pointersLength = pointers.length; // store the first input to calculate the distance and direction if (!session.firstInput) { session.firstInput = simpleCloneInputData(input); } // to compute scale and rotation we need to store the multiple touches if (pointersLength > 1 && !session.firstMultiple) { session.firstMultiple = simpleCloneInputData(input); } else if (pointersLength === 1) { session.firstMultiple = false; } var firstInput = session.firstInput; var firstMultiple = session.firstMultiple; var offsetCenter = firstMultiple ? firstMultiple.center : firstInput.center; var center = input.center = getCenter(pointers); input.timeStamp = now(); input.deltaTime = input.timeStamp - firstInput.timeStamp; input.angle = getAngle(offsetCenter, center); input.distance = getDistance(offsetCenter, center); computeDeltaXY(session, input); input.offsetDirection = getDirection(input.deltaX, input.deltaY); var overallVelocity = getVelocity(input.deltaTime, input.deltaX, input.deltaY); input.overallVelocityX = overallVelocity.x; input.overallVelocityY = overallVelocity.y; input.overallVelocity = abs(overallVelocity.x) > abs(overallVelocity.y) ? overallVelocity.x : overallVelocity.y; input.scale = firstMultiple ? getScale(firstMultiple.pointers, pointers) : 1; input.rotation = firstMultiple ? getRotation(firstMultiple.pointers, pointers) : 0; input.maxPointers = !session.prevInput ? input.pointers.length : input.pointers.length > session.prevInput.maxPointers ? input.pointers.length : session.prevInput.maxPointers; computeIntervalInputData(session, input); // find the correct target var target = manager.element; if (hasParent(input.srcEvent.target, target)) { target = input.srcEvent.target; } input.target = target; }
javascript
{ "resource": "" }
q60351
TMEhandler
validation
function TMEhandler(manager, inputEvent, inputData) { var isTouch = inputData.pointerType == INPUT_TYPE_TOUCH, isMouse = inputData.pointerType == INPUT_TYPE_MOUSE; if (isMouse && inputData.sourceCapabilities && inputData.sourceCapabilities.firesTouchEvents) { return; } // when we're in a touch event, record touches to de-dupe synthetic mouse event if (isTouch) { recordTouches.call(this, inputEvent, inputData); } else if (isMouse && isSyntheticEvent.call(this, inputData)) { return; } this.callback(manager, inputEvent, inputData); }
javascript
{ "resource": "" }
q60352
validation
function (input) { var srcEvent = input.srcEvent; var direction = input.offsetDirection; // if the touch action did prevented once this session if (this.manager.session.prevented) { srcEvent.preventDefault(); return; } var actions = this.actions; var hasNone = inStr(actions, TOUCH_ACTION_NONE) && !TOUCH_ACTION_MAP[TOUCH_ACTION_NONE]; var hasPanY = inStr(actions, TOUCH_ACTION_PAN_Y) && !TOUCH_ACTION_MAP[TOUCH_ACTION_PAN_Y]; var hasPanX = inStr(actions, TOUCH_ACTION_PAN_X) && !TOUCH_ACTION_MAP[TOUCH_ACTION_PAN_X]; if (hasNone) { //do not prevent defaults if this is a tap gesture var isTapPointer = input.pointers.length === 1; var isTapMovement = input.distance < 2; var isTapTouchTime = input.deltaTime < 250; if (isTapPointer && isTapMovement && isTapTouchTime) { return; } } if (hasPanX && hasPanY) { // `pan-x pan-y` means browser handles all scrolling/panning, do not prevent return; } if (hasNone || hasPanY && direction & DIRECTION_HORIZONTAL || hasPanX && direction & DIRECTION_VERTICAL) { return this.preventSrc(srcEvent); } }
javascript
{ "resource": "" }
q60353
validation
function (tId, partialTree) { if (tId === void 0) { tId = ''; } if (partialTree === void 0) { partialTree = tree; } if (!tId) { return undefined; } var found; partialTree.some(function (treeItem) { if (treeItem[id] === tId) { found = treeItem; return true; } found = treeItem[children] ? find(tId, treeItem[children]) : undefined; return found ? true : false; }); return found; }
javascript
{ "resource": "" }
q60354
validation
function (el) { return el.id ? el.id : el.parentElement ? findId(el.parentElement) : null; }
javascript
{ "resource": "" }
q60355
listLocalDatasetClientData
validation
function listLocalDatasetClientData(datasetClient, cb) { syncStorage.readDatasetClientWithRecords(datasetClient.getId(), function(err, datasetClientsWithRecords) { if (err) { return cb(err); } //no one sync loop has completed yet, return null if (!datasetClientsWithRecords || !datasetClientsWithRecords.syncCompleted) { return cb(null, null); } else { return cb(null, datasetClientsWithRecords); } }); }
javascript
{ "resource": "" }
q60356
listAppliedChangeSinceLastSync
validation
function listAppliedChangeSinceLastSync(datasetId, lastSyncEndTime, clientInfo, cb) { syncStorage.listUpdates(datasetId, { type: SYNC_UPDATE_TYPES.APPLIED, cuid: clientInfo.cuid, timestamp: {$gt: lastSyncEndTime} }, null, cb); }
javascript
{ "resource": "" }
q60357
listPendingChangesForClient
validation
function listPendingChangesForClient(datasetId, clientInfo, cb) { pendingQueue.search({datasetId: datasetId, cuid: clientInfo.cuid}, cb); }
javascript
{ "resource": "" }
q60358
removePendingChanges
validation
function removePendingChanges(clientRecords, localDatasetClient, pendingChanges) { _.each(pendingChanges, function(pendingChange, uid) { if (clientRecords[uid]) { delete clientRecords[uid]; } if (localDatasetClient[uid]) { delete localDatasetClient[uid]; } }); }
javascript
{ "resource": "" }
q60359
computeDelta
validation
function computeDelta(datasetId, clientRecords, serverRecords) { var creates = {}; var updates = {}; var deletes = {}; _.each(serverRecords, function(serverRecord, serverRecordUid) { var serverRecHash = serverRecord.hash; //record is in both client and server if (clientRecords[serverRecordUid]) { //hash value doesn't match, needs update if (clientRecords[serverRecordUid] !== serverRecHash) { debug('[%s] Updating client record %s client hash=%s', datasetId, serverRecordUid, clientRecords[serverRecordUid]); updates[serverRecordUid] = serverRecord; } } else { //record is not in the client, needs create debug('[%s] Creating client record %s', datasetId, serverRecordUid); creates[serverRecordUid] = serverRecord; } }); _.each(clientRecords, function(clientRecordHash, clientRecordUid) { if (!serverRecords[clientRecordUid]) { //the record is in the client but not in the server, need delete debug('[%s] Deleting client record %s', datasetId, clientRecordUid); deletes[clientRecordUid] = {}; } }); return { create: creates, update: updates, delete: deletes }; }
javascript
{ "resource": "" }
q60360
syncRecords
validation
function syncRecords(datasetId, params, cb) { debug('[%s] process syncRecords request', datasetId); var queryParams = params.query_params || {}; var metaData = params.meta_data || {}; //NOTE: the client doesn't send in this value for syncRecords ATM var cuid = syncConfig.cuidProducer(params); var datasetClient = new DatasetClient(datasetId, {queryParams: queryParams, metaData: metaData}); var clientRecords = params.clientRecs || {}; async.waterfall([ function checkDatasetclientStopped(callback) { syncStorage.readDatasetClient(datasetClient.getId(), function(err, datasetClientJson) { if (err) { return callback(err); } if (!datasetClientJson) { var errMsg = "unknown dataset client datasetId = " + datasetId + " :: queryParams = " + util.inspect(queryParams); debugError("[%s] %s", datasetId, errMsg); return callback(errMsg); } if (datasetClientJson.stopped === true) { return callback(new Error('sync stopped for dataset ' + datasetId)); } else { return callback(); } }); }, async.apply(listLocalDatasetClientData, datasetClient), function listOtherChanges(localDatasetData, callback) { if (localDatasetData) { listChangesNotInLocalDataset(datasetId, localDatasetData.syncLoopEnd, {cuid: cuid}, function(err, appliedUpdates, clientPendingChanges) { return callback(err, localDatasetData, appliedUpdates, clientPendingChanges); }); } else { return callback(); } } ], function(err, localDatasetData, appliedUpdatesSinceLastSync, pendingUpdates) { if (err) { return cb(err); } if (!localDatasetData) { return cb(null, {}); } var localDatasetDataObj = convertToObject(localDatasetData.records); var appliedUpdatesSinceLastSyncObj = convertToObject(appliedUpdatesSinceLastSync); var pendingUpdatesObj = convertToObject(pendingUpdates); removeAppliedUpdates(clientRecords, localDatasetDataObj, appliedUpdatesSinceLastSyncObj); removePendingChanges(clientRecords, localDatasetDataObj, pendingUpdatesObj); var delta = computeDelta(datasetId, clientRecords, localDatasetDataObj); var res = delta; res.hash = localDatasetData.globalHash; debug('[%s] syncRecords API response %j', datasetId, res); return cb(null, res); }); }
javascript
{ "resource": "" }
q60361
validation
function(metric, records) { var returnValue = {message: 'no stats available', name: metric.displayName}; if (records && records.length > 0) { returnValue = _.chain(records).map(function(recordStr) { return JSON.parse(recordStr); }).groupBy(function(record) { return record.tags[metric.groupByTag]; }).reduce(function(reduced, groupRecords, groupKey) { groupRecords = _.sortBy(groupRecords, 'ts'); var processedData = _.reduce(groupRecords, function(memo, groupRecord) { var value = groupRecord.fields[metric.valueField]; memo.current = value; memo.numberOfRecords++; memo.total += value; memo.max = Math.max(value, memo.max); memo.min = Math.min(value, memo.min); memo.from = Math.min(groupRecord.ts, memo.from); memo.end = Math.max(groupRecord.ts, memo.end); return memo; }, {max: 0, min: MAX_NUMBER, current: 0, numberOfRecords: 0, total: 0, from: MAX_NUMBER, end: 0}); reduced[groupKey] = { current: metric.dataFormatter(processedData.current), max: metric.dataFormatter(processedData.max), min: metric.dataFormatter(processedData.min), average: metric.dataFormatter(processedData.total / processedData.numberOfRecords), numberOfRecords: processedData.numberOfRecords, from: new Date(processedData.from).toISOString(), end: new Date(processedData.end).toISOString() }; return reduced; }, {}).value(); } return returnValue; }
javascript
{ "resource": "" }
q60362
init
validation
function init(dataset_id, options, cb) { debug('[%s] init sync with options %j', dataset_id, options); datasets.init(dataset_id, options); //make sure we use the exported version here as the start function should be called only ONCE module.exports.api.start(function(err) { if (err) { return cb(err); } syncStorage.updateManyDatasetClients({datasetId: dataset_id}, {stopped: false}, cb); }); }
javascript
{ "resource": "" }
q60363
stop
validation
function stop(dataset_id, cb) { if (!syncStarted) { return cb(); } debug('[%s] stop sync for dataset', dataset_id); syncStorage.updateManyDatasetClients({datasetId: dataset_id}, {stopped: true}, cb); }
javascript
{ "resource": "" }
q60364
stopAll
validation
function stopAll(cb) { //sync is not started yet, but connect could be called already. In this case, just reset a few things if (!syncStarted) { interceptors.restore(); dataHandlers.restore(); hashProvider.restore(); mongoDbClient = null; redisClient = null; metricsClient = null; return cb(); } debug('stopAll syncs'); datasetClientCleaner.stop(); async.parallel([ async.apply(syncStorage.updateManyDatasetClients, {}, {stopped: true}), async.apply(stopAllWorkers, syncWorkers), async.apply(stopAllWorkers, ackWorkers), async.apply(stopAllWorkers, pendingWorkers), async.apply(syncScheduler.stop.bind(syncScheduler)) ], function(err) { if (err) { debugError('Failed to stop sync due to error : %s', err); return cb(err); } setConfig(); interceptors.restore(); dataHandlers.restore(); hashProvider.restore(); mongoDbClient = null; redisClient = null; metricsClient = null; ackQueue = null; pendingQueue = null; syncQueue = null; ackWorkers = []; pendingWorkers = []; syncWorkers = []; syncStarted = false; syncLock = null; datasetClientCleaner = null; // Reset the memoized start fn so it can be called again module.exports.api.start = async.memoize(start); return cb(); }); }
javascript
{ "resource": "" }
q60365
removeCollision
validation
function removeCollision(datasetId, params, cb) { debug('[%s] removeCollision'); dataHandlers.removeCollision(datasetId, params.hash, params.meta_data, cb); }
javascript
{ "resource": "" }
q60366
doListDatasetClients
validation
function doListDatasetClients(filter, cb) { debug('doListDatasetClients'); var col = mongoClient.collection(DATASETCLIENTS_COLLECTION); col.find(filter).toArray(function(err, datasetClients) { if (err) { debugError('Failed to list datasetClients due to error %s', err); } return cb(err, datasetClients); }); }
javascript
{ "resource": "" }
q60367
doRemoveDatasetClients
validation
function doRemoveDatasetClients(datasetClientsToRemove, cb) { var removeIds = _.pluck(datasetClientsToRemove, 'id'); var datasetIds = _.uniq(_.pluck(datasetClientsToRemove, 'datasetId')); debug('doRemoveDatasetClients: removed datasetClients = %d, datasets = %d', removeIds.length, datasetIds.length); async.series([ function deleteDatasetClientAndRefs(callback) { async.map(removeIds, doRemoveDatasetClientWithRecords, function(err, deleteResult) { if (err) { debug('Failed to delete datasetClients due to error %s', err); } return callback(err, deleteResult); }); }, function removeUnusedRecords(callback) { async.map(datasetIds, removeRecordsForDataset, function(err, deletedCount) { if (err) { debug('Error occured when delete records for dataset due to error %s', err); } return callback(err, deletedCount); }); } ], function(err, results){ if (err) { return cb(err); } else { return cb(null, results[0]); } }); }
javascript
{ "resource": "" }
q60368
doRemoveDatasetClientWithRecords
validation
function doRemoveDatasetClientWithRecords(datasetClientId, cb) { debug('doRemoveDatasetClientWithRecords datasetClientId = %s', datasetClientId); async.waterfall([ async.apply(doReadDatasetClient, datasetClientId), function removeRefs(datasetClientJson, next) { var datasetId = datasetClientJson.datasetId; var recordsCollection = mongoClient.collection(getDatasetRecordsCollectionName(datasetId)); recordsCollection.updateMany({'refs': datasetClientId}, {'$pull': {'refs': datasetClientId}}, function(err) { return next(err, datasetClientJson); }); }, function deleteDatasetClient(datasetClientJson, next) { var col = mongoClient.collection(DATASETCLIENTS_COLLECTION); col.findOneAndDelete({'id': datasetClientId}, next); } ], function(err, result) { if (err) { debug('Failed to doRemoveDatasetClientWithRecords due to error %s', err); return cb(err); } else { return cb(null, result && result.value); } }); }
javascript
{ "resource": "" }
q60369
doReadDatasetClient
validation
function doReadDatasetClient(datasetClientId, cb) { debug('doReadDatasetClient datasetClientId = %s', datasetClientId); var col = mongoClient.collection(DATASETCLIENTS_COLLECTION); col.findOne({id: datasetClientId}, function(err, datasetClient) { if (err) { debugError('Failed to read datasetClient due to error %s :: datasetClientId = %s', err, datasetClientId); } return cb(err, datasetClient); }); }
javascript
{ "resource": "" }
q60370
doUpdateDatasetClient
validation
function doUpdateDatasetClient(datasetClientId, fields, upsert, cb) { debug('doUpdateDatasetClient datasetClientId = %s :: fields = %j', datasetClientId, fields); var col = mongoClient.collection(DATASETCLIENTS_COLLECTION); delete fields._id; col.findOneAndUpdate({id: datasetClientId}, {'$set': fields}, {upsert: upsert, returnOriginal: false}, function(err, result) { if (err) { debugError('Failed to update datasetClients due to error %s :: datasetClientId = %s :: fields = %j',err,datasetClientId,fields); return cb(err); } if (result.value === null) { return cb(new Error('DatasetClient not found for id ' + datasetClientId)); } //ensure the indexes are create for a given dataset ensureIndexesForDataset(result.value.datasetId); return cb(null, result.value); }); }
javascript
{ "resource": "" }
q60371
createIndexForCollection
validation
function createIndexForCollection(collectionName, indexField, indexOpts) { var collection = mongoClient.collection(collectionName); collection.createIndex(indexField, indexOpts, function(err) { if (err) { debugError('Failed to create index for collection. collection = %s :: index = %j :: error = %j',collectionName,indexField,err); } else { debug('Index created for collection. Collection = %s :: index = %j',collectionName,indexField); } }); }
javascript
{ "resource": "" }
q60372
ensureIndexesForDataset
validation
function ensureIndexesForDataset(datasetId) { createIndexForCollection(getDatasetRecordsCollectionName(datasetId), {'uid': 1}, {}); createIndexForCollection(getDatasetRecordsCollectionName(datasetId), {'refs': 1}, {}); createIndexForCollection(require('./sync-updates').getDatasetUpdatesCollectionName(datasetId), {'cuid': 1, 'hash': 1}, {}); }
javascript
{ "resource": "" }
q60373
diffRecords
validation
function diffRecords(localRecords, newRecords) { var recordsDiff = {}; _.each(newRecords, function(record, uid) { if (localRecords[uid]) { if (localRecords[uid].hash !== record.hash) { record.op = "update"; recordsDiff[uid] = record; } } else { record.op = "update"; recordsDiff[uid] = record; } }); _.each(localRecords, function(record, uid) { if (!newRecords[uid]) { record.op = "delete"; recordsDiff[uid] = record; } }); return recordsDiff; }
javascript
{ "resource": "" }
q60374
diff
validation
function diff(datasetClientJson, localRecords, next) { var recordsDiff = diffRecords(syncUtil.convertToObject(localRecords), syncUtil.convertToObject(records)); return next(null, datasetClientJson, recordsDiff); }
javascript
{ "resource": "" }
q60375
validation
function(filter, callback) { metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doListDatasetClients)(filter, callback); }
javascript
{ "resource": "" }
q60376
validation
function(datasetClientsToRemove, callback) { if (!datasetClientsToRemove.length) { return callback(); } return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doRemoveDatasetClients)(datasetClientsToRemove, callback); }
javascript
{ "resource": "" }
q60377
validation
function(datasetClientId, fieldsToUpdate, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doUpdateDatasetClient)(datasetClientId, fieldsToUpdate, false, callback); }
javascript
{ "resource": "" }
q60378
validation
function(datasetClientId, fields, callback) { fields.id = datasetClientId; return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doUpdateDatasetClient)(datasetClientId, fields, true, callback); }
javascript
{ "resource": "" }
q60379
validation
function(datasetClientId, fields, records, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doUpdateDatasetClientWithRecords)(datasetClientId, fields, records, callback); }
javascript
{ "resource": "" }
q60380
validation
function(datasetClientId, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doReadDatasetClientWithRecordsUseCache)(datasetClientId, callback); }
javascript
{ "resource": "" }
q60381
doFindAndDeleteUpdate
validation
function doFindAndDeleteUpdate(datasetId, acknowledgement, callback) { debug('[%s] doFindAndDeleteUpdate acknowledgement = %j',datasetId,acknowledgement); var updatesCollection = mongoClient.collection(getDatasetUpdatesCollectionName(datasetId)); updatesCollection.findOneAndDelete({cuid: acknowledgement.cuid, hash: acknowledgement.hash}, function(err, result) { if (err) { debugError('[%s] Failed to doFindAndDeleteUpdate due to error %s :: acknowledgement = %j',datasetId,err,acknowledgement); return callback(err); } return callback(null, result.value); }); }
javascript
{ "resource": "" }
q60382
doListUpdates
validation
function doListUpdates(datasetId, criteria, options, callback) { debug('[%s] doListUpdates criteria = %j',datasetId,criteria); var updatesCollection = mongoClient.collection(getDatasetUpdatesCollectionName(datasetId)); var docLimit = options && options.limit; var cursor = updatesCollection.find(criteria); if (docLimit && docLimit > 0) { cursor = cursor.limit(docLimit); } cursor.toArray(function(err, updates) { if (err) { debugError('[%s] Failed to doListUpdates due to error %s :: criteria = %j' + criteria,datasetId,err,criteria); return callback(err); } return callback(null, updates); }); }
javascript
{ "resource": "" }
q60383
validation
function(datasetId, acknowledgement, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doFindAndDeleteUpdate)(datasetId, acknowledgement, callback); }
javascript
{ "resource": "" }
q60384
validation
function(datasetId, acknowledgementFields, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doSaveUpdate)(datasetId, acknowledgementFields, callback); }
javascript
{ "resource": "" }
q60385
validation
function(datasetId, criteria, options, callback) { return metrics.timeAsyncFunc(metrics.KEYS.MONGODB_OPERATION_TIME, doListUpdates)(datasetId, criteria, options, callback); }
javascript
{ "resource": "" }
q60386
DatasetClient
validation
function DatasetClient(datasetId, opts){ opts = opts || {}; this.datasetId = datasetId; this.queryParams = opts.queryParams || {}; this.metaData = opts.metaData || {}; this.id = generateDatasetClientId(this); this.config = opts.config || datasets.getDatasetConfig(datasetId); this.collisionCount = opts.collisionCount || 0; this.stopped = opts.stopped; this.syncScheduled = opts.syncScheduled; this.syncCompleted = opts.syncCompleted; this.syncLoopStart = opts.syncLoopStart; this.syncLoopEnd = opts.syncLoopEnd; this.lastAccessed = opts.lastAccessed; }
javascript
{ "resource": "" }
q60387
invoke
validation
function invoke(dataset_id, params, callback) { debug('invoke'); if (arguments.length < 3) throw new Error('invoke requires 3 arguments'); // Verify that fn param has been passed if (!params || !params.fn) { var err = new Error('no fn parameter provided in params "' + util.inspect(params) + '"'); debugError('[%s] warn %s %j', dataset_id, err, params); return callback(err, null); } var fn = params.fn; // Verify that fn param is valid if (invokeFunctions.indexOf(fn) < 0) { return callback(new Error('invalid fn parameter provided in params "' + fn + '"'), null); } // We can only continue if sync has connected to its dependencies i.e. mongo & redis if (!syncConnected) { return callback(new Error('Sync not connected')); } var fnHandler = module.exports[fn] || server[fn] || server.api[fn]; server.api.start(function() { return fnHandler(dataset_id, params, callback); }); }
javascript
{ "resource": "" }
q60388
convertToObject
validation
function convertToObject(itemArr) { var obj = {}; _.each(itemArr, function(item) { obj[item.uid] = item; }); return obj; }
javascript
{ "resource": "" }
q60389
SyncScheduler
validation
function SyncScheduler(syncQueueImpl, options) { if (!syncQueueImpl) { throw new Error('syncQueueImpl is required'); } this.syncQueue = syncQueueImpl; options = options || {}; this.syncSchedulerLockName = options.syncSchedulerLockName || 'locks:sync:SyncScheduler'; this.timeBetweenChecks = options.timeBetweenChecks || 500; this.timeBeforeCrashAssumed = options.timeBeforeCrashAssumed || 20000; this.stopped = false; }
javascript
{ "resource": "" }
q60390
updateDatasetClients
validation
function updateDatasetClients(wcb) { var datasetClientIds = _.pluck(datasetClientsToSync, 'id'); syncStorage.updateManyDatasetClients({id: {$in: datasetClientIds}}, {syncScheduled: Date.now()}, wcb); }
javascript
{ "resource": "" }
q60391
insertDocsToDb
validation
function insertDocsToDb(dburl, collectionName, docs, cb) { MongoClient.connect(dburl, function(err, db){ if (err) { return cb(err); } var col = db.collection(collectionName); col.insertMany(docs, function(err, result){ if (err) { return cb(err); } return cb(null, result); }); }); }
javascript
{ "resource": "" }
q60392
set
validation
function set(key, value, cb) { if (!syncConfig.useCache || !redisClient) { return cb && cb(); } return redisClient.set(key, value, cb); }
javascript
{ "resource": "" }
q60393
get
validation
function get(key, cb) { if (!syncConfig.useCache || !redisClient) { return cb && cb(); } return redisClient.get(key, cb); }
javascript
{ "resource": "" }
q60394
del
validation
function del(key, cb) { if (!syncConfig.useCache || !redisClient) { return cb && cb(); } return redisClient.del(key, cb); }
javascript
{ "resource": "" }
q60395
syncWithBackend
validation
function syncWithBackend(payload, tries, callback) { var datasetClientId = payload.id; var datasetId = payload.datasetId; var startTime = payload.startTime; if (!datasetClientId || !datasetId) { recordProcessTime(startTime, false); debugError("no datasetId value found in sync request payload %j" ,payload); return callback(); } if (tries > 1) { //the request is already run once, but for some reason is not acked, we just make sure it's completed and ack it markDatasetClientAsCompleted(datasetClientId, startTime, callback); return; } var queryParams = payload.queryParams || {}; var metaData = payload.metaData || {}; //we need to add this so that if this sync processor crashed before reaching the end, the scheduler will still be able to push a sync request var expectedTimeout = datasets.getDatasetConfig(datasetId).backendListTimeout * 1000 || 5*60*1000; async.waterfall([ function setSyncStart(cb) { var syncLoopStartTime = Date.now(); syncStorage.updateDatasetClient(datasetClientId, {syncLoopStart: syncLoopStartTime, syncLoopEnd: syncLoopStartTime + expectedTimeout}, function(err, datasetClient){ return cb(err, datasetClient); }); }, function listData(datasetClient, cb) { listDataWithTimeout({datasetClientId: datasetClientId, datasetId: datasetId, queryParams: queryParams, metaData: metaData}, expectedTimeout, function(err, res) { return cb(err, datasetClient, res); }); }, function saveRecords(datasetClient, recordsWithHash, cb) { var toHash = _.pluck(recordsWithHash, 'hash'); var globalHash = hashProvider.globalHash(datasetId, toHash); var globalHashWithCollisionCount = [globalHash, datasetClient.collisionCount].join('_'); syncStorage.updateDatasetClientWithRecords(datasetClientId, {globalHash: globalHashWithCollisionCount}, recordsWithHash, cb); } ], function(err){ if (err) { debugError("[%s] Error when sync data with backend. error = %s",datasetId,err); } markDatasetClientAsCompleted(datasetClientId, startTime, callback); }); }
javascript
{ "resource": "" }
q60396
doCreate
validation
function doCreate(datasetId, pendingChange, callback) { var record = pendingChange.post; var metaData = pendingChange.meta_data; debug('[%s] CREATE Start data = %j', datasetId, record); dataHandlers.doCreate(datasetId, record, metaData, function(err, data) { if (err) { debugError('[%s] CREATE Failed - : err = %s', datasetId, err); } else { debug('[%s] CREATE Success - uid = %s', datasetId, data.uid); pendingChange.oldUid = pendingChange.uid; pendingChange.uid = data.uid; } return saveUpdate(datasetId, pendingChange, err ? SYNC_UPDATE_TYPES.FAILED : SYNC_UPDATE_TYPES.APPLIED, err ? util.inspect(err) : null, callback); }); }
javascript
{ "resource": "" }
q60397
doDelete
validation
function doDelete(datasetId, pendingChange, callback) { debug('[%s] DELETE Start', datasetId); var metaData = pendingChange.meta_data; var uid = pendingChange.uid; dataHandlers.doRead(datasetId, uid, metaData, function(err, data) { if (err) { debugError('READ for DELETE Failed - uid = %s : err = %s', datasetId, uid, err); return saveUpdate(datasetId, pendingChange, SYNC_UPDATE_TYPES.FAILED, util.inspect(err), callback); } debug('[%s] READ for DELETE Success', datasetId); debug('[%s] READ for DELETE Data : \n%j', datasetId, data); var preHash = hashProvider.recordHash(datasetId, pendingChange.pre); var dataHash = hashProvider.recordHash(datasetId, data); debug('[%s] DELETE Hash Check %s (client :: dataStore) = %s :: %s', datasetId, uid, preHash.dataHash); if (!dataHash) { //record has already been deleted debug('[%s] DELETE Already performed - uid=%s', datasetId, uid); return saveUpdate(datasetId, pendingChange, SYNC_UPDATE_TYPES.APPLIED, null, callback); } else { if (preHash === dataHash) { dataHandlers.doDelete(datasetId, uid, metaData, function(err) { if (err) { debugError('[%s] DELETE Failed - uid=%s : err = %s', datasetId, err); } else { debug('[%s] DELETE Success - uid=%s : hash = %s', datasetId, dataHash); } return saveUpdate(datasetId, pendingChange, err ? SYNC_UPDATE_TYPES.FAILED : SYNC_UPDATE_TYPES.APPLIED, err ? util.inspect(err) : null, callback); }); } else { debug('[%s] DELETE COLLISION \n Pre record from client:\n%j\nCurrent record from data store:\n%j', datasetId, syncUtil.sortObject(pendingChange.pre), syncUtil.sortObject(data)); handleCollision(datasetId, metaData, pendingChange, dataHash, function(err) { if (err) { debugError('[%s] Failed to save collision uid = %s : err = %s', datasetId, err); } }); return saveUpdate(datasetId, pendingChange, SYNC_UPDATE_TYPES.COLLISION, null, callback); } } }); }
javascript
{ "resource": "" }
q60398
applyPendingChange
validation
function applyPendingChange(pendingChange, tries, callback) { var datasetId = pendingChange.datasetId; if (!datasetId || !pendingChange.action || !pendingChange.uid || !pendingChange.cuid || !pendingChange.hash) { debugError("[%s] invalid pendingChange request dropped :: item = %j", datasetId, pendingChange); return callback(); } debug('[%s] processPending :: item = %j', datasetId, pendingChange); if (tries > retryLimit) { //the pendingChange has been processed before. Mark it as failed debugError('[%s] processPending failed :: tries = %d :: item = %j', datasetId, tries, pendingChange); return saveUpdate(datasetId, pendingChange, SYNC_UPDATE_TYPES.FAILED, "crashed", callback); } pendingChange.tries = tries; var action = pendingChange.action.toLowerCase(); var timer = metrics.startTimer(); function onComplete(err) { metricsClient.gauge(metrics.KEYS.PENDING_CHANGE_PROCESS_TIME, {success: !err, action: action}, timer.stop()); return callback(err); } switch (action) { case "create": doCreate(datasetId, pendingChange, onComplete); break; case "update": doUpdate(datasetId, pendingChange, onComplete); break; case "delete": doDelete(datasetId, pendingChange, onComplete); break; default: debugError("[%s] invalid pendingChange request dropped :: item = %j", datasetId, pendingChange); return onComplete(); } }
javascript
{ "resource": "" }
q60399
MongodbQueue
validation
function MongodbQueue(name, metrics, lock, opts) { if (!name) { throw new Error('name is required to create a mongodb queue'); } if (!opts || !opts.mongodb) { throw new Error('mongodb is not specified to create mongodb queue'); } this.queueName = name; this.metrics = metrics; this.lock = lock; this.lockName = opts.lockName || ('lock:sync:' + this.queueName); this.lockTimeout = opts.lockTimeout || 10000; this.mongodb = opts.mongodb; this.queueOptions = { visibility: opts.visibility || 30, ttl: opts.queueMessagesTTL || 24*60*60 }; this.queue; }
javascript
{ "resource": "" }